From 525ab1f14d01d40e777924be4c8e28b61e7bf58e Mon Sep 17 00:00:00 2001 From: Alexey Lebedeff Date: Tue, 16 Feb 2016 13:28:31 +0300 Subject: [PATCH] Upgrade rabbitmq to 3.6.1 Per https://mirantis.jira.com/browse/PROD-3272 Change-Id: I072fd1608247465efb10926b93f67dabc29f689b --- debian/changelog | 6 + debian/control | 1 + .../detect-stuck-queue-on-declare.diff | 35 - .../fix-management-startup-after-split.diff | 20 - .../patches/fix-pmon-demonitor-function.diff | 19 - debian/patches/series | 3 - debian/rabbitmq-env.conf | 13 - debian/rabbitmq-server-wait | 22 - debian/rabbitmq-server.init | 18 +- debian/rabbitmq-server.install | 4 - debian/rabbitmq-server.links | 3 - debian/rabbitmq-server.manpages | 4 + debian/rabbitmq-server.service | 4 +- debian/rules | 57 +- rabbitmq-server/{codegen => }/CONTRIBUTING.md | 0 rabbitmq-server/LICENSE | 12 +- .../sockjs-erlang-git => }/LICENSE-APL2-Rebar | 0 rabbitmq-server/LICENSE-EPL-OTP | 286 + .../LICENSE-MIT-Mochiweb | 0 .../sockjs-erlang-git => }/LICENSE-MIT-SockJS | 0 rabbitmq-server/LICENSE-MPL2 | 373 + rabbitmq-server/Makefile | 788 +- rabbitmq-server/README | 2 +- rabbitmq-server/README.md | 47 + rabbitmq-server/build.config | 43 + rabbitmq-server/calculate-relative | 45 - rabbitmq-server/check_xref | 291 + rabbitmq-server/codegen/license_info | 4 - .../amqp_client}/CONTRIBUTING.md | 0 rabbitmq-server/deps/amqp_client/Makefile | 167 + .../amqp_client}/README.in | 0 rabbitmq-server/deps/amqp_client/build.config | 43 + rabbitmq-server/deps/amqp_client/erlang.mk | 6533 ++++++++++++++++ .../amqp_client}/include/amqp_client.hrl | 2 +- .../include/amqp_client_internal.hrl | 0 .../include/amqp_gen_consumer_spec.hrl | 2 +- .../include/rabbit_routing_prefixes.hrl | 0 .../deps/amqp_client/rabbitmq-components.mk | 331 + .../amqp_client}/src/amqp_auth_mechanisms.erl | 2 +- .../amqp_client}/src/amqp_channel.erl | 6 +- .../amqp_client}/src/amqp_channel_sup.erl | 2 +- .../amqp_client}/src/amqp_channel_sup_sup.erl | 2 +- .../src/amqp_channels_manager.erl | 2 +- .../amqp_client/src/amqp_client.app.src} | 4 +- .../amqp_client}/src/amqp_client.erl | 3 +- .../amqp_client}/src/amqp_connection.erl | 4 +- .../amqp_client}/src/amqp_connection_sup.erl | 2 +- .../src/amqp_connection_type_sup.erl | 2 +- .../src/amqp_direct_connection.erl | 12 +- .../amqp_client}/src/amqp_direct_consumer.erl | 0 .../amqp_client}/src/amqp_gen_connection.erl | 2 +- .../amqp_client}/src/amqp_gen_consumer.erl | 0 .../amqp_client}/src/amqp_main_reader.erl | 2 +- .../src/amqp_network_connection.erl | 12 +- .../amqp_client}/src/amqp_rpc_client.erl | 2 +- .../amqp_client}/src/amqp_rpc_server.erl | 2 +- .../src/amqp_selective_consumer.erl | 0 .../amqp_client}/src/amqp_sup.erl | 2 +- .../amqp_client}/src/amqp_uri.erl | 31 +- .../amqp_client}/src/overview.edoc.in | 0 .../amqp_client}/src/rabbit_routing_util.erl | 2 +- .../amqp_client}/src/uri_parser.erl | 0 rabbitmq-server/deps/amqp_client/test.mk | 173 + .../amqp_client}/test/Makefile | 0 .../amqp_client}/test/amqp_client_SUITE.erl | 4 +- .../amqp_client}/test/amqp_dbg.erl | 2 +- .../amqp_client}/test/negative_test_util.erl | 6 +- .../amqp_client}/test/test_util.erl | 63 +- rabbitmq-server/deps/cowboy/AUTHORS | 91 + rabbitmq-server/deps/cowboy/CHANGELOG.md | 910 +++ rabbitmq-server/deps/cowboy/CONTRIBUTING.md | 173 + .../cowboy-git => deps/cowboy}/LICENSE | 2 +- rabbitmq-server/deps/cowboy/Makefile | 75 + rabbitmq-server/deps/cowboy/README.md | 50 + rabbitmq-server/deps/cowboy/ROADMAP.md | 127 + rabbitmq-server/deps/cowboy/all.sh | 19 + rabbitmq-server/deps/cowboy/circle.yml | 22 + rabbitmq-server/deps/cowboy/erlang.mk | 1 + rabbitmq-server/deps/cowboy/rebar.config | 4 + .../deps/cowboy/src/cowboy.app.src | 9 + rabbitmq-server/deps/cowboy/src/cowboy.erl | 75 + .../cowboy/src/cowboy_app.erl} | 27 +- .../cowboy}/src/cowboy_bstr.erl | 61 +- .../cowboy}/src/cowboy_clock.erl | 106 +- .../deps/cowboy/src/cowboy_handler.erl | 304 + .../cowboy}/src/cowboy_http.erl | 572 +- .../deps/cowboy/src/cowboy_http_handler.erl | 37 + .../deps/cowboy/src/cowboy_loop_handler.erl | 40 + .../deps/cowboy/src/cowboy_middleware.erl | 25 + .../deps/cowboy/src/cowboy_protocol.erl | 510 ++ .../deps/cowboy/src/cowboy_req.erl | 1385 ++++ .../cowboy/src/cowboy_rest.erl} | 745 +- .../deps/cowboy/src/cowboy_router.erl | 572 ++ .../deps/cowboy/src/cowboy_spdy.erl | 505 ++ .../deps/cowboy/src/cowboy_static.erl | 291 + .../deps/cowboy/src/cowboy_sub_protocol.erl | 23 + .../cowboy}/src/cowboy_sup.erl | 18 +- .../deps/cowboy/src/cowboy_websocket.erl | 770 ++ .../cowboy/src/cowboy_websocket_handler.erl | 50 + .../cowboy/test}/cover.spec | 0 .../deps/cowboy/test/cowboy_ct_hook.erl | 23 + .../deps/cowboy/test/cowboy_error_h.erl | 145 + .../deps/cowboy/test/cowboy_test.erl | 200 + .../deps/cowboy/test/eunit_SUITE.erl | 22 + .../cowboy/test/handlers/input_crash_h.erl | 10 + .../cowboy/test/handlers/long_polling_h.erl | 27 + .../test/handlers/loop_handler_body_h.erl | 24 + .../test/handlers/loop_handler_timeout_h.erl | 23 + .../deps/cowboy/test/http_SUITE.erl | 1111 +++ .../test/http_SUITE_data/http_body_qs.erl | 40 + .../test/http_SUITE_data/http_chunked.erl | 19 + .../test/http_SUITE_data/http_echo_body.erl | 29 + .../test/http_SUITE_data/http_errors.erl} | 26 +- .../test/http_SUITE_data}/http_handler.erl | 6 +- .../http_SUITE_data/http_init_shutdown.erl | 17 + .../http_SUITE_data/http_loop_stream_recv.erl | 34 + .../test/http_SUITE_data/http_multipart.erl | 25 + .../http_SUITE_data/http_multipart_stream.erl | 34 + .../test/http_SUITE_data/http_req_attr.erl | 19 + .../test/http_SUITE_data/http_set_resp.erl | 31 + .../test/http_SUITE_data/http_stream_body.erl | 32 + .../test/http_SUITE_data/http_streamed.erl | 20 + .../http_SUITE_data/rest_empty_resource.erl | 5 + .../test/http_SUITE_data/rest_expires.erl | 22 + .../http_SUITE_data/rest_expires_binary.erl | 18 + .../rest_forbidden_resource.erl | 19 +- .../rest_missing_callbacks.erl | 23 + .../rest_nodelete_resource.erl | 17 + .../test/http_SUITE_data/rest_param_all.erl | 36 + .../http_SUITE_data/rest_patch_resource.erl | 34 + .../rest_post_charset_resource.erl | 15 + .../rest_postonly_resource.erl | 14 + .../http_SUITE_data/rest_resource_etags.erl | 32 + .../http_SUITE_data}/rest_simple_resource.erl | 2 +- .../deps/cowboy/test/loop_handler_SUITE.erl | 87 + .../deps/cowboy/test/spdy_SUITE.erl | 147 + rabbitmq-server/deps/cowboy/test/ws_SUITE.erl | 710 ++ .../cowboy/test/ws_SUITE_data/client.json | 14 + .../cowboy/test/ws_SUITE_data/ws_echo.erl | 27 + .../test/ws_SUITE_data/ws_echo_timer.erl} | 17 +- .../test/ws_SUITE_data/ws_init_shutdown.erl} | 17 +- .../test/ws_SUITE_data/ws_send_many.erl | 27 + .../test/ws_SUITE_data/ws_timeout_cancel.erl | 26 + .../ws_SUITE_data/ws_timeout_hibernate.erl} | 15 +- .../ws_SUITE_data/ws_upgrade_with_opts.erl | 28 + rabbitmq-server/deps/cowlib/AUTHORS | 4 + rabbitmq-server/deps/cowlib/CHANGELOG.md | 12 + rabbitmq-server/deps/cowlib/LICENSE | 13 + rabbitmq-server/deps/cowlib/Makefile | 43 + rabbitmq-server/deps/cowlib/README.md | 20 + rabbitmq-server/deps/cowlib/all.sh | 17 + rabbitmq-server/deps/cowlib/build.config | 20 + rabbitmq-server/deps/cowlib/erlang.mk | 1 + .../deps/cowlib/include/cow_inline.hrl | 388 + .../deps/cowlib/src/cow_cookie.erl | 267 + rabbitmq-server/deps/cowlib/src/cow_date.erl | 206 + rabbitmq-server/deps/cowlib/src/cow_http.erl | 301 + .../deps/cowlib/src/cow_http_hd.erl | 194 + .../deps/cowlib/src/cow_http_te.erl | 327 + .../deps/cowlib/src/cow_mimetypes.erl | 1041 +++ .../deps/cowlib/src/cow_mimetypes.erl.src | 59 + .../deps/cowlib/src/cow_multipart.erl | 752 ++ rabbitmq-server/deps/cowlib/src/cow_qs.erl | 571 ++ rabbitmq-server/deps/cowlib/src/cow_spdy.erl | 265 + rabbitmq-server/deps/cowlib/src/cow_spdy.hrl | 181 + .../deps/cowlib/src/cowlib.app.src | 7 + .../cowlib/test/eunit_SUITE.erl} | 26 +- .../licensing/LICENSE-APACHE2-ExplorerCanvas | 0 .../licensing/LICENSE-APL2-Rebar} | 0 .../licensing/LICENSE-APL2-Stomp-Websocket | 0 .../licensing/LICENSE-BSD-base64js | 0 .../licensing/LICENSE-BSD-glMatrix | 0 .../deps/licensing/LICENSE-EPL-OTP | 286 + .../licensing/LICENSE-MIT-EJS10 | 0 .../licensing/LICENSE-MIT-Flot | 0 .../deps/licensing/LICENSE-MIT-Mochiweb | 22 + .../licensing/LICENSE-MIT-Sammy060 | 0 .../licensing/LICENSE-MIT-SockJS} | 4 +- .../licensing/LICENSE-MIT-jQuery164 | 0 .../licensing}/LICENSE-MPL-RabbitMQ | 0 rabbitmq-server/deps/licensing/LICENSE-MPL2 | 373 + .../licensing/license_info_rabbitmq_codegen | 4 + .../license_info_rabbitmq_management} | 0 ...cense_info_rabbitmq_management_visualiser} | 0 rabbitmq-server/deps/mochiweb/CHANGES.md | 201 + .../mochiweb-git => deps/mochiweb}/LICENSE | 0 rabbitmq-server/deps/mochiweb/Makefile | 22 + .../deps/mochiweb/Makefile.orig.mk | 24 + .../mochiweb-git => deps/mochiweb}/README | 0 .../mochiweb}/examples/hmac_api/README | 0 .../mochiweb}/examples/hmac_api/hmac_api.hrl | 0 .../examples/hmac_api/hmac_api_client.erl | 0 .../examples/hmac_api/hmac_api_lib.erl | 0 .../mochiweb}/examples/https/https_store.erl | 0 .../mochiweb}/examples/https/server_cert.pem | 0 .../mochiweb}/examples/https/server_key.pem | 0 .../examples/keepalive/keepalive.erl | 0 .../mochiweb/examples/websocket/index.html | 59 + .../mochiweb/examples/websocket/websocket.erl | 148 + .../mochiweb}/include/internal.hrl | 0 rabbitmq-server/deps/mochiweb/rebar | Bin 0 -> 159782 bytes .../mochiweb}/rebar.config | 3 +- .../mochiweb}/scripts/entities.erl | 0 .../mochiweb}/src/mochifmt.erl | 18 + .../mochiweb}/src/mochifmt_records.erl | 18 + .../deps/mochiweb/src/mochifmt_std.erl | 51 + .../deps/mochiweb/src/mochiglobal.erl | 127 + .../mochiweb}/src/mochihex.erl | 18 + .../mochiweb}/src/mochijson.erl | 18 + .../deps/mochiweb/src/mochijson2.erl | 907 +++ .../mochiweb}/src/mochilists.erl | 18 + .../mochiweb}/src/mochilogfile2.erl | 18 + .../deps/mochiweb/src/mochinum.erl | 372 + .../mochiweb}/src/mochitemp.erl | 26 +- .../mochiweb}/src/mochiutf8.erl | 48 +- .../deps/mochiweb/src/mochiweb.app.src | 8 + .../deps/mochiweb/src/mochiweb.erl | 101 + .../deps/mochiweb/src/mochiweb_acceptor.erl | 83 + .../mochiweb}/src/mochiweb_base64url.erl | 26 +- .../mochiweb}/src/mochiweb_charref.erl | 20 +- .../deps/mochiweb/src/mochiweb_clock.erl | 101 + .../mochiweb}/src/mochiweb_cookies.erl | 18 + .../mochiweb}/src/mochiweb_cover.erl | 18 + .../deps/mochiweb/src/mochiweb_echo.erl | 59 + .../mochiweb}/src/mochiweb_headers.erl | 18 + .../mochiweb}/src/mochiweb_html.erl | 63 +- .../mochiweb}/src/mochiweb_http.erl | 147 +- .../mochiweb}/src/mochiweb_io.erl | 18 + .../mochiweb}/src/mochiweb_mime.erl | 18 + .../mochiweb}/src/mochiweb_multipart.erl | 38 +- .../mochiweb}/src/mochiweb_request.erl | 227 +- .../mochiweb}/src/mochiweb_response.erl | 18 + .../mochiweb}/src/mochiweb_session.erl | 86 +- .../deps/mochiweb/src/mochiweb_socket.erl | 148 + .../mochiweb}/src/mochiweb_socket_server.erl | 108 +- .../mochiweb}/src/mochiweb_util.erl | 37 +- .../deps/mochiweb/src/mochiweb_websocket.erl | 281 + .../mochiweb}/src/reloader.erl | 20 +- .../support/templates/mochiwebapp.template | 2 + .../templates/mochiwebapp_skel/bench.sh | 19 + .../mochiwebapp_skel/priv/www/index.html | 0 .../templates/mochiwebapp_skel/rebar.config | 0 .../mochiwebapp_skel/src/mochiapp.app.src | 0 .../mochiwebapp_skel/src/mochiapp.erl | 0 .../mochiwebapp_skel/src/mochiapp_app.erl | 0 .../mochiwebapp_skel/src/mochiapp_deps.erl | 0 .../mochiwebapp_skel/src/mochiapp_sup.erl | 0 .../mochiwebapp_skel/src/mochiapp_web.erl | 6 +- .../templates/mochiwebapp_skel/start-dev.sh | 7 + .../support/test-materials/test_ssl_cert.pem | 0 .../support/test-materials/test_ssl_key.pem | 0 .../test/mochiweb_base64url_tests.erl | 0 .../mochiweb}/test/mochiweb_html_tests.erl | 6 + .../mochiweb}/test/mochiweb_http_tests.erl | 0 .../mochiweb/test}/mochiweb_request_tests.erl | 0 .../test/mochiweb_socket_server_tests.erl | 149 + .../deps/mochiweb/test/mochiweb_test_util.erl | 134 + .../deps/mochiweb/test/mochiweb_test_util.hrl | 1 + .../deps/mochiweb/test/mochiweb_tests.erl | 220 + .../test/mochiweb_websocket_tests.erl | 160 + rabbitmq-server/deps/rabbit_common/LICENSE | 11 + .../rabbit_common}/LICENSE-MPL-RabbitMQ | 2 +- rabbitmq-server/deps/rabbit_common/Makefile | 71 + .../deps/rabbit_common/build.config | 43 + .../{ => deps/rabbit_common}/codegen.py | 223 +- rabbitmq-server/deps/rabbit_common/erlang.mk | 6617 ++++++++++++++++ .../rabbit_common}/include/rabbit.hrl | 46 +- .../include/rabbit_msg_store.hrl | 0 .../rabbit_common/mk/rabbitmq-components.mk | 331 + .../deps/rabbit_common/mk/rabbitmq-dist.mk | 159 + .../deps/rabbit_common/mk/rabbitmq-plugin.mk | 15 + .../deps/rabbit_common/mk/rabbitmq-run.mk | 280 + .../deps/rabbit_common/mk/rabbitmq-tests.mk | 86 + .../deps/rabbit_common/mk/rabbitmq-tools.mk | 63 + .../rabbit_common}/src/app_utils.erl | 2 +- .../rabbit_common}/src/credit_flow.erl | 34 +- .../rabbit_common}/src/gen_server2.erl | 15 +- .../src/mirrored_supervisor.erl | 8 +- .../rabbit_common}/src/mochijson2.erl | 0 .../{ => deps/rabbit_common}/src/pmon.erl | 2 +- .../rabbit_common}/src/priority_queue.erl | 2 +- .../rabbit_common}/src/rabbit_amqqueue.erl | 119 +- .../src/rabbit_auth_mechanism.erl | 2 +- .../src/rabbit_authn_backend.erl | 2 +- .../src/rabbit_authz_backend.erl | 2 +- .../src/rabbit_backing_queue.erl | 32 +- .../rabbit_common}/src/rabbit_basic.erl | 11 +- .../src/rabbit_binary_generator.erl | 31 +- .../src/rabbit_binary_parser.erl | 27 +- .../rabbit_common}/src/rabbit_channel.erl | 196 +- .../src/rabbit_channel_interceptor.erl | 117 + .../src/rabbit_command_assembler.erl | 2 +- .../rabbit_common/src/rabbit_common.app.src | 13 + .../rabbit_common/src/rabbit_control_misc.erl | 96 + .../src/rabbit_data_coercion.erl | 22 + .../rabbit_common}/src/rabbit_event.erl | 7 +- .../src/rabbit_exchange_decorator.erl | 2 +- .../src/rabbit_exchange_type.erl | 2 +- .../rabbit_common}/src/rabbit_heartbeat.erl | 2 +- .../rabbit_common}/src/rabbit_misc.erl | 126 +- .../src/rabbit_msg_store_index.erl | 2 +- .../rabbit_common}/src/rabbit_net.erl | 77 +- .../rabbit_common}/src/rabbit_networking.erl | 168 +- .../rabbit_common}/src/rabbit_nodes.erl | 10 +- .../src/rabbit_password_hashing.erl | 33 + .../src/rabbit_policy_validator.erl | 2 +- .../src/rabbit_queue_collector.erl | 5 +- .../src/rabbit_queue_decorator.erl | 4 +- .../src/rabbit_queue_master_locator.erl | 33 + .../rabbit_common}/src/rabbit_reader.erl | 355 +- .../src/rabbit_runtime_parameter.erl | 2 +- .../rabbit_common}/src/rabbit_writer.erl | 44 +- .../rabbit_common}/src/ssl_compat.erl | 2 +- .../rabbit_common}/src/supervisor2.erl | 17 +- .../rabbit_common}/src/time_compat.erl | 0 .../rabbitmq_amqp1_0}/CONTRIBUTING.md | 0 .../deps/rabbitmq_amqp1_0/Makefile | 61 + .../rabbitmq_amqp1_0}/README.md | 0 .../deps/rabbitmq_amqp1_0/build.config | 43 + .../rabbitmq_amqp1_0}/codegen.py | 55 +- .../deps/rabbitmq_amqp1_0/erlang.mk | 6589 ++++++++++++++++ .../include/rabbit_amqp1_0.hrl | 0 .../rabbitmq_amqp1_0/rabbitmq-components.mk | 331 + .../rabbitmq_amqp1_0}/spec/messaging.xml | 0 .../rabbitmq_amqp1_0}/spec/security.xml | 0 .../rabbitmq_amqp1_0}/spec/transactions.xml | 0 .../rabbitmq_amqp1_0}/spec/transport.xml | 0 .../rabbitmq_amqp1_0}/spec/types.xml | 0 .../src/rabbit_amqp1_0_binary_generator.erl | 28 +- .../src/rabbit_amqp1_0_binary_parser.erl | 2 +- .../src/rabbit_amqp1_0_channel.erl | 2 +- .../src/rabbit_amqp1_0_framing.erl | 2 +- .../src/rabbit_amqp1_0_incoming_link.erl | 4 +- .../src/rabbit_amqp1_0_link_util.erl | 2 +- .../src/rabbit_amqp1_0_message.erl | 2 +- .../src/rabbit_amqp1_0_outgoing_link.erl | 4 +- .../src/rabbit_amqp1_0_reader.erl | 2 +- .../src/rabbit_amqp1_0_session.erl | 2 +- .../src/rabbit_amqp1_0_session_process.erl | 2 +- .../src/rabbit_amqp1_0_session_sup.erl | 2 +- .../src/rabbit_amqp1_0_session_sup_sup.erl | 2 +- .../src/rabbit_amqp1_0_util.erl | 2 +- .../src/rabbit_amqp1_0_writer.erl | 2 +- .../src/rabbitmq_amqp1_0.app.src | 2 +- .../rabbitmq_amqp1_0}/test/lib-java/junit.jar | Bin .../rabbitmq_amqp1_0}/test/proton/Makefile | 0 .../rabbitmq_amqp1_0}/test/proton/build.xml | 0 .../amqp1_0/tests/proton/ProtonTests.java | 0 .../test/src/rabbit_amqp1_0_test.erl | 2 +- .../rabbitmq_amqp1_0}/test/swiftmq/Makefile | 6 +- .../rabbitmq_amqp1_0}/test/swiftmq/build.xml | 0 .../test/swiftmq/run-tests.sh | 2 + .../amqp1_0/tests/swiftmq/SwiftMQTests.java | 39 +- .../CONTRIBUTING.md | 0 .../deps/rabbitmq_auth_backend_ldap/Makefile | 29 + .../README-authorisation.md | 1 + .../README-tests.md} | 11 +- .../deps/rabbitmq_auth_backend_ldap/README.md | 19 + .../rabbitmq_auth_backend_ldap/build.config | 43 + .../deps/rabbitmq_auth_backend_ldap/erlang.mk | 6589 ++++++++++++++++ .../etc/rabbit-test.config | 0 .../example/README | 0 .../example/global.ldif | 0 .../example/groups.ldif | 16 + .../example/people.ldif | 18 + .../example/rabbit.ldif | 0 .../example/seed.sh} | 9 - .../example/setup.sh | 10 + .../rabbitmq-components.mk | 331 + .../src/rabbit_auth_backend_ldap.erl | 52 +- .../src/rabbit_auth_backend_ldap_app.erl | 14 +- .../src/rabbit_auth_backend_ldap_util.erl | 2 +- .../src/rabbitmq_auth_backend_ldap.app.src | 5 +- .../src/rabbit_auth_backend_ldap_test.erl | 2 +- .../rabbit_auth_backend_ldap_unit_test.erl | 2 +- .../CONTRIBUTING.md | 0 .../deps/rabbitmq_auth_mechanism_ssl/Makefile | 14 + .../rabbitmq_auth_mechanism_ssl/README.md} | 24 +- .../rabbitmq_auth_mechanism_ssl/erlang.mk | 6640 +++++++++++++++++ .../rabbitmq-components.mk | 331 + .../src/rabbit_auth_mechanism_ssl.erl | 2 +- .../src/rabbit_auth_mechanism_ssl_app.erl | 2 +- .../src/rabbitmq_auth_mechanism_ssl.app.src | 2 +- .../rabbitmq_codegen}/CONTRIBUTING.md | 0 .../rabbitmq_codegen}/LICENSE | 0 .../rabbitmq_codegen}/LICENSE-MPL-RabbitMQ | 0 .../rabbitmq_codegen}/Makefile | 2 +- .../rabbitmq_codegen}/README.extensions.md | 0 .../rabbitmq_codegen}/amqp-rabbitmq-0.8.json | 2 +- .../amqp-rabbitmq-0.9.1.json | 2 +- .../rabbitmq_codegen}/amqp_codegen.py | 38 +- .../rabbitmq_codegen}/credit_extension.json | 2 +- .../rabbitmq_codegen}/demo_extension.json | 0 .../deps/rabbitmq_codegen/license_info | 4 + .../CONTRIBUTING.md | 0 .../LICENSE | 0 .../LICENSE-MPL-RabbitMQ | 2 +- .../Makefile | 21 + .../README.md | 122 +- .../erlang.mk | 6640 +++++++++++++++++ .../rabbitmq-components.mk | 331 + .../rabbit_exchange_type_consistent_hash.erl | 63 +- .../rabbitmq_consistent_hash_exchange.app.src | 2 +- ...bit_exchange_type_consistent_hash_test.erl | 84 +- .../deps/rabbitmq_event_exchange/LICENSE | 5 + .../LICENSE-MPL-RabbitMQ | 455 ++ .../deps/rabbitmq_event_exchange/Makefile | 21 + .../deps/rabbitmq_event_exchange/README.md | 136 + .../deps/rabbitmq_event_exchange/build.config | 43 + .../deps/rabbitmq_event_exchange/erlang.mk | 6589 ++++++++++++++++ .../examples/java/QueueEvents.java | 43 + .../rabbitmq-components.mk | 331 + .../src/rabbit_exchange_type_event.erl | 131 + .../src/rabbitmq_event_exchange.app.src | 7 + .../src/rabbit_exchange_type_event_test.erl | 52 + .../rabbit_exchange_type_event_test_all.erl | 29 + .../rabbit_exchange_type_event_unit_test.erl | 56 + .../rabbitmq_federation}/CONTRIBUTING.md | 0 .../deps/rabbitmq_federation/Makefile | 36 + .../rabbitmq_federation}/README-hacking | 0 .../rabbitmq_federation}/README.md | 0 .../deps/rabbitmq_federation/build.config | 43 + .../deps/rabbitmq_federation/erlang.mk | 6589 ++++++++++++++++ .../rabbitmq_federation}/etc/rabbit-test.sh | 0 .../etc/setup-rabbit-test.sh | 2 + .../include/rabbit_federation.hrl | 2 +- .../rabbitmq-components.mk | 331 + .../src/rabbit_federation_app.erl | 2 +- .../src/rabbit_federation_db.erl | 2 +- .../src/rabbit_federation_event.erl | 2 +- .../src/rabbit_federation_exchange.erl | 2 +- .../src/rabbit_federation_exchange_link.erl | 2 +- ...abbit_federation_exchange_link_sup_sup.erl | 2 +- .../src/rabbit_federation_link_sup.erl | 2 +- .../src/rabbit_federation_link_util.erl | 2 +- .../src/rabbit_federation_parameters.erl | 2 +- .../src/rabbit_federation_queue.erl | 2 +- .../src/rabbit_federation_queue_link.erl | 2 +- .../rabbit_federation_queue_link_sup_sup.erl | 2 +- .../src/rabbit_federation_status.erl | 2 +- .../src/rabbit_federation_sup.erl | 2 +- .../src/rabbit_federation_upstream.erl | 6 +- .../rabbit_federation_upstream_exchange.erl | 2 +- .../src/rabbit_federation_util.erl | 2 +- .../src/rabbitmq_federation.app.src | 2 +- .../src/rabbit_federation_exchange_test.erl | 4 +- .../test/src/rabbit_federation_queue_test.erl | 2 +- .../test/src/rabbit_federation_test_util.erl | 7 +- .../test/src/rabbit_federation_unit_test.erl | 2 +- .../CONTRIBUTING.md | 0 .../rabbitmq_federation_management/LICENSE | 14 + .../LICENSE-APACHE2-ExplorerCanvas | 0 .../LICENSE-BSD-base64js | 0 .../LICENSE-MIT-EJS10 | 0 .../LICENSE-MIT-Flot | 0 .../LICENSE-MIT-Sammy060 | 0 .../LICENSE-MIT-jQuery164 | 0 .../LICENSE-MPL-RabbitMQ | 455 ++ .../rabbitmq_federation_management/Makefile | 14 + .../rabbitmq_federation_management/README.md | 38 + .../rabbitmq_federation_management/erlang.mk | 6640 +++++++++++++++++ .../priv/www/js/federation.js | 30 +- .../priv/www/js/tmpl/federation-upstream.ejs | 61 +- .../priv/www/js/tmpl/federation-upstreams.ejs | 133 +- .../priv/www/js/tmpl/federation.ejs | 0 .../rabbitmq-components.mk | 331 + .../src/rabbit_federation_mgmt.erl | 2 +- .../rabbitmq_federation_management.app.src | 2 +- .../rabbitmq_management}/CONTRIBUTING.md | 0 .../rabbitmq_management}/LICENSE | 0 .../LICENSE-APACHE2-ExplorerCanvas} | 24 + .../rabbitmq_management/LICENSE-BSD-base64js | 25 + .../rabbitmq_management/LICENSE-MIT-EJS10} | 6 +- .../deps/rabbitmq_management/LICENSE-MIT-Flot | 22 + .../rabbitmq_management/LICENSE-MIT-Sammy060 | 25 + .../rabbitmq_management/LICENSE-MIT-jQuery164 | 21 + .../rabbitmq_management/LICENSE-MPL-RabbitMQ | 455 ++ .../deps/rabbitmq_management/Makefile | 52 + .../deps/rabbitmq_management/README.md | 13 + .../rabbitmq_management}/bin/rabbitmqadmin | 98 +- .../deps/rabbitmq_management/build.config | 43 + .../deps/rabbitmq_management/erlang.mk | 6589 ++++++++++++++++ .../rabbitmq_management}/etc/bunny.config | 0 .../rabbitmq_management}/etc/hare.config | 0 .../etc/rabbit-test.config | 0 .../include/rabbit_mgmt.hrl | 6 +- .../include/rabbit_mgmt_test.hrl | 0 .../rabbitmq_management}/license_info | 0 .../priv/www/api/index.html | 54 + .../priv/www/cli/index.html | 0 .../priv/www/css/evil.css | 0 .../priv/www/css/main.css | 3 + .../priv/www/doc/stats.html | 0 .../rabbitmq_management}/priv/www/favicon.ico | Bin .../priv/www/img/bg-binary.png | Bin .../priv/www/img/bg-green-dark.png | Bin .../priv/www/img/bg-red-dark.png | Bin .../priv/www/img/bg-red.png | Bin .../priv/www/img/bg-yellow-dark.png | Bin .../priv/www/img/collapse.png | Bin .../priv/www/img/expand.png | Bin .../priv/www/img/rabbitmqlogo.png | Bin .../rabbitmq_management}/priv/www/index.html | 3 +- .../priv/www/js/base64.js | 0 .../priv/www/js/charts.js | 0 .../priv/www/js/dispatcher.js | 35 +- .../rabbitmq_management}/priv/www/js/ejs.js | 0 .../priv/www/js/ejs.min.js | 0 .../priv/www/js/excanvas.js | 0 .../priv/www/js/excanvas.min.js | 0 .../priv/www/js/formatters.js | 119 +- .../priv/www/js/global.js | 5 + .../rabbitmq_management}/priv/www/js/help.js | 6 + .../priv/www/js/jquery-1.6.4.js | 0 .../priv/www/js/jquery-1.6.4.min.js | 0 .../priv/www/js/jquery.flot.js | 0 .../priv/www/js/jquery.flot.min.js | 0 .../priv/www/js/jquery.flot.time.js | 0 .../priv/www/js/jquery.flot.time.min.js | 0 .../rabbitmq_management}/priv/www/js/json2.js | 0 .../rabbitmq_management}/priv/www/js/main.js | 205 +- .../rabbitmq_management}/priv/www/js/prefs.js | 0 .../rabbitmq_management/priv/www/js/sammy.js} | 930 ++- .../priv/www/js/sammy.min.js | 5 + .../priv/www/js/tmpl/404.ejs | 0 .../priv/www/js/tmpl/add-binding.ejs | 0 .../priv/www/js/tmpl/binary.ejs | 0 .../priv/www/js/tmpl/bindings.ejs | 0 .../priv/www/js/tmpl/channel.ejs | 0 .../priv/www/js/tmpl/channels-list.ejs | 0 .../priv/www/js/tmpl/channels.ejs | 7 + .../priv/www/js/tmpl/cluster-name.ejs | 0 .../priv/www/js/tmpl/columns-options.ejs | 0 .../priv/www/js/tmpl/connection.ejs | 0 .../priv/www/js/tmpl/connections.ejs | 10 +- .../priv/www/js/tmpl/consumers.ejs | 0 .../priv/www/js/tmpl/error-popup.ejs | 0 .../priv/www/js/tmpl/exchange.ejs | 2 +- .../priv/www/js/tmpl/exchanges.ejs | 13 +- .../priv/www/js/tmpl/import-succeeded.ejs | 0 .../priv/www/js/tmpl/layout.ejs | 0 .../priv/www/js/tmpl/login.ejs | 0 .../priv/www/js/tmpl/memory-bar.ejs | 0 .../priv/www/js/tmpl/memory-table.ejs | 0 .../priv/www/js/tmpl/memory.ejs | 0 .../priv/www/js/tmpl/messages.ejs | 0 .../www/js/tmpl/msg-detail-deliveries.ejs | 0 .../priv/www/js/tmpl/msg-detail-publishes.ejs | 0 .../priv/www/js/tmpl/node.ejs | 0 .../priv/www/js/tmpl/overview.ejs | 35 +- .../priv/www/js/tmpl/partition.ejs | 0 .../priv/www/js/tmpl/paths.ejs | 0 .../priv/www/js/tmpl/permissions.ejs | 0 .../priv/www/js/tmpl/policies.ejs | 0 .../priv/www/js/tmpl/policy.ejs | 0 .../priv/www/js/tmpl/publish.ejs | 0 .../priv/www/js/tmpl/queue.ejs | 0 .../priv/www/js/tmpl/queues.ejs | 13 +- .../priv/www/js/tmpl/rate-options.ejs | 0 .../priv/www/js/tmpl/registry.ejs | 0 .../priv/www/js/tmpl/status.ejs | 0 .../priv/www/js/tmpl/user.ejs | 0 .../priv/www/js/tmpl/users.ejs | 0 .../priv/www/js/tmpl/vhost.ejs | 0 .../priv/www/js/tmpl/vhosts.ejs | 0 .../rabbitmq-components.mk | 331 + .../src/rabbit_mgmt_app.erl | 2 +- .../src/rabbit_mgmt_db.erl | 14 +- .../src/rabbit_mgmt_dispatcher.erl | 7 +- .../src/rabbit_mgmt_extension.erl | 0 .../src/rabbit_mgmt_format.erl | 53 +- .../src/rabbit_mgmt_load_definitions.erl | 2 +- .../src/rabbit_mgmt_reset_handler.erl | 2 +- .../src/rabbit_mgmt_stats.erl | 2 +- .../src/rabbit_mgmt_sup.erl | 0 .../src/rabbit_mgmt_sup_sup.erl | 9 +- .../src/rabbit_mgmt_util.erl | 230 +- .../src/rabbit_mgmt_wm_aliveness_test.erl | 2 +- .../src/rabbit_mgmt_wm_binding.erl | 2 +- .../src/rabbit_mgmt_wm_bindings.erl | 2 +- .../src/rabbit_mgmt_wm_channel.erl | 9 +- .../src/rabbit_mgmt_wm_channels.erl | 10 +- .../src/rabbit_mgmt_wm_channels_vhost.erl | 54 + .../src/rabbit_mgmt_wm_cluster_name.erl | 2 +- .../src/rabbit_mgmt_wm_connection.erl | 9 +- .../rabbit_mgmt_wm_connection_channels.erl | 11 +- .../src/rabbit_mgmt_wm_connections.erl | 10 +- .../src/rabbit_mgmt_wm_connections_vhost.erl | 54 + .../src/rabbit_mgmt_wm_consumers.erl | 2 +- .../src/rabbit_mgmt_wm_definitions.erl | 135 +- .../src/rabbit_mgmt_wm_exchange.erl | 13 +- .../src/rabbit_mgmt_wm_exchange_publish.erl | 0 .../src/rabbit_mgmt_wm_exchanges.erl | 10 +- .../src/rabbit_mgmt_wm_extensions.erl | 0 .../src/rabbit_mgmt_wm_node.erl | 2 +- .../src/rabbit_mgmt_wm_nodes.erl | 9 +- .../src/rabbit_mgmt_wm_overview.erl | 41 +- .../src/rabbit_mgmt_wm_parameter.erl | 2 +- .../src/rabbit_mgmt_wm_parameters.erl | 2 +- .../src/rabbit_mgmt_wm_permission.erl | 2 +- .../src/rabbit_mgmt_wm_permissions.erl | 2 +- .../src/rabbit_mgmt_wm_permissions_user.erl | 2 +- .../src/rabbit_mgmt_wm_permissions_vhost.erl | 2 +- .../src/rabbit_mgmt_wm_policies.erl | 2 +- .../src/rabbit_mgmt_wm_policy.erl | 2 +- .../src/rabbit_mgmt_wm_queue.erl | 13 +- .../src/rabbit_mgmt_wm_queue_actions.erl | 0 .../src/rabbit_mgmt_wm_queue_get.erl | 6 +- .../src/rabbit_mgmt_wm_queue_purge.erl | 2 +- .../src/rabbit_mgmt_wm_queues.erl | 11 +- .../src/rabbit_mgmt_wm_user.erl | 61 +- .../src/rabbit_mgmt_wm_users.erl | 2 +- .../src/rabbit_mgmt_wm_vhost.erl | 15 +- .../src/rabbit_mgmt_wm_vhosts.erl | 10 +- .../src/rabbit_mgmt_wm_whoami.erl | 2 +- .../src/rabbitmq_management.app.src | 2 +- .../test/src/default-config | 0 .../test/src/rabbit_mgmt_test_clustering.erl | 0 .../test/src/rabbit_mgmt_test_db.erl | 6 +- .../test/src/rabbit_mgmt_test_db_unit.erl | 0 .../test/src/rabbit_mgmt_test_http.erl | 799 +- .../test/src/rabbit_mgmt_test_unit.erl | 2 +- .../test/src/rabbit_mgmt_test_util.erl | 0 .../test/src/rabbitmqadmin-test-wrapper.sh | 0 .../test/src/rabbitmqadmin-test.py | 8 +- .../rabbitmq_management}/test/src/test-config | 0 .../CONTRIBUTING.md | 0 .../deps/rabbitmq_management_agent/LICENSE | 5 + .../LICENSE-MPL-RabbitMQ | 455 ++ .../deps/rabbitmq_management_agent/Makefile | 14 + .../deps/rabbitmq_management_agent/erlang.mk | 6640 +++++++++++++++++ .../rabbitmq-components.mk | 331 + .../src/rabbit_mgmt_agent_app.erl | 2 +- .../src/rabbit_mgmt_agent_sup.erl | 2 +- .../src/rabbit_mgmt_db_handler.erl | 2 +- .../src/rabbit_mgmt_external_stats.erl | 2 +- .../src/rabbitmq_management_agent.app.src | 2 +- .../CONTRIBUTING.md | 0 .../rabbitmq_management_visualiser}/LICENSE | 0 .../LICENSE-BSD-glMatrix | 0 .../LICENSE-MPL-RabbitMQ | 2 +- .../rabbitmq_management_visualiser/Makefile | 14 + .../rabbitmq_management_visualiser}/README | 0 .../rabbitmq_management_visualiser/erlang.mk | 6640 +++++++++++++++++ .../license_info | 0 .../priv/www/js/visualiser.js | 0 .../priv/www/visualiser/index.html | 0 .../priv/www/visualiser/js/glMatrix-min.js | 0 .../priv/www/visualiser/js/glMatrix.js | 0 .../priv/www/visualiser/js/main.js | 0 .../priv/www/visualiser/js/model.js | 0 .../priv/www/visualiser/js/octtree.js | 0 .../priv/www/visualiser/js/physics.js | 0 .../rabbitmq-components.mk | 331 + .../src/rabbit_mgmt_wm_all.erl | 2 +- .../src/rabbit_visualiser_mgmt.erl | 2 +- .../rabbitmq_management_visualiser.app.src | 2 +- .../rabbitmq_mqtt}/CONTRIBUTING.md | 0 rabbitmq-server/deps/rabbitmq_mqtt/Makefile | 38 + rabbitmq-server/deps/rabbitmq_mqtt/README.md | 38 + .../deps/rabbitmq_mqtt/build.config | 43 + rabbitmq-server/deps/rabbitmq_mqtt/erlang.mk | 6589 ++++++++++++++++ .../rabbitmq_mqtt}/include/rabbit_mqtt.hrl | 19 +- .../include/rabbit_mqtt_frame.hrl | 18 +- .../rabbit_mqtt_retained_msg_store.hrl | 16 + .../deps/rabbitmq_mqtt/rabbitmq-components.mk | 331 + .../rabbitmq_mqtt}/src/rabbit_mqtt.erl | 10 +- .../src/rabbit_mqtt_collector.erl | 2 +- .../src/rabbit_mqtt_connection_sup.erl | 21 +- .../rabbitmq_mqtt}/src/rabbit_mqtt_frame.erl | 2 +- .../src/rabbit_mqtt_processor.erl | 307 +- .../rabbitmq_mqtt}/src/rabbit_mqtt_reader.erl | 107 +- .../src/rabbit_mqtt_retained_msg_store.erl | 32 + .../rabbit_mqtt_retained_msg_store_dets.erl | 63 + .../rabbit_mqtt_retained_msg_store_ets.erl | 63 + .../src/rabbit_mqtt_retainer.erl | 111 + .../src/rabbit_mqtt_retainer_sup.erl | 62 + .../rabbitmq_mqtt}/src/rabbit_mqtt_sup.erl | 53 +- .../rabbitmq_mqtt}/src/rabbit_mqtt_util.erl | 31 +- .../src/rabbit_mqtt_vhost_event_handler.erl | 49 + .../rabbitmq_mqtt}/src/rabbitmq_mqtt.app.src | 15 +- .../rabbitmq_mqtt}/test/Makefile | 6 +- .../rabbitmq_mqtt}/test/build.properties | 0 .../rabbitmq_mqtt}/test/build.xml | 4 +- .../deps/rabbitmq_mqtt/test/lib/junit.jar | Bin 0 -> 314932 bytes .../rabbitmq_mqtt}/test/rabbit-test.sh | 0 .../rabbitmq_mqtt/test/setup-rabbit-test.sh | 2 + .../src/com/rabbitmq/mqtt/test/MqttTest.java | 167 +- .../src/com/rabbitmq/mqtt/test/rabbit-test.sh | 0 .../rabbitmq/mqtt/test/setup-rabbit-test.sh | 2 + .../rabbitmq/mqtt/test/tls/MqttSSLTest.java | 2 +- .../rabbitmq/mqtt/test/tls/MutualAuth.java | 0 .../test/src/rabbit_mqtt_processor_tests.erl | 21 + .../test/src/rabbit_mqtt_util_tests.erl | 2 +- .../test/src/rabbitmq_mqtt_standalone.app.src | 5 +- .../rabbitmq_mqtt}/test/src/test.config | 0 .../rabbitmq_mqtt}/test/test.sh | 0 .../LICENSE.md} | 8 +- .../rabbitmq_recent_history_exchange/Makefile | 23 + .../README.md | 68 + .../build.config | 43 + .../erlang.mk | 6589 ++++++++++++++++ .../etc/rabbit-hare.config | 3 + .../etc/rabbit-test.config | 3 + .../include/rabbit_recent_history.hrl | 19 + .../rabbitmq-components.mk | 331 + .../rabbit_exchange_type_recent_history.erl | 212 + .../rabbitmq_recent_history_exchange.app.src | 6 + ...bbit_exchange_type_recent_history_test.erl | 234 + ...exchange_type_recent_history_test_util.erl | 79 + .../deps/rabbitmq_sharding/LICENSE | 7 + .../rabbitmq_sharding/LICENSE-MPL-RabbitMQ | 467 ++ .../deps/rabbitmq_sharding/LICENSE-MPL2 | 373 + .../deps/rabbitmq_sharding/Makefile | 23 + .../deps/rabbitmq_sharding/README.extra.md | 79 + .../deps/rabbitmq_sharding/README.md | 159 + .../deps/rabbitmq_sharding/build.config | 43 + .../rabbitmq_sharding/docs/sharded_queues.png | Bin 0 -> 41926 bytes .../deps/rabbitmq_sharding/erlang.mk | 6589 ++++++++++++++++ .../rabbitmq_sharding/etc/rabbit-hare.config | 3 + .../rabbitmq_sharding/etc/rabbit-test.config | 3 + .../deps/rabbitmq_sharding/etc/rkey.sh | 8 + .../deps/rabbitmq_sharding/other_plugins | 1 + .../rabbitmq_sharding/rabbitmq-components.mk | 331 + .../rabbit_sharding_exchange_decorator.erl | 98 + ...it_sharding_exchange_type_modulus_hash.erl | 65 + .../src/rabbit_sharding_interceptor.erl | 179 + .../src/rabbit_sharding_policy_validator.erl | 70 + .../src/rabbit_sharding_shard.erl | 139 + .../src/rabbit_sharding_util.erl | 57 + .../src/rabbitmq_sharding.app.src | 6 + .../test/src/rabbit_hash_exchange_test.erl | 92 + .../test/src/rabbit_sharding_test.erl | 280 + .../test/src/rabbit_sharding_test_all.erl | 29 + .../test/src/rabbit_sharding_test_util.erl | 90 + .../rabbitmq_shovel}/CONTRIBUTING.md | 0 rabbitmq-server/deps/rabbitmq_shovel/Makefile | 16 + .../deps/rabbitmq_shovel/README.md | 22 + .../deps/rabbitmq_shovel/build.config | 43 + .../deps/rabbitmq_shovel/erlang.mk | 6589 ++++++++++++++++ .../include/rabbit_shovel.hrl | 2 +- .../rabbitmq_shovel/rabbitmq-components.mk | 331 + .../rabbitmq_shovel}/src/rabbit_shovel.erl | 2 +- .../src/rabbit_shovel_config.erl | 2 +- .../src/rabbit_shovel_dyn_worker_sup.erl | 2 +- .../src/rabbit_shovel_dyn_worker_sup_sup.erl | 2 +- .../src/rabbit_shovel_parameters.erl | 2 +- .../src/rabbit_shovel_status.erl | 2 +- .../src/rabbit_shovel_sup.erl | 2 +- .../src/rabbit_shovel_util.erl | 2 +- .../src/rabbit_shovel_worker.erl | 6 +- .../src/rabbit_shovel_worker_sup.erl | 2 +- .../src/rabbitmq_shovel.app.src | 2 +- .../test/src/rabbit_shovel_test.erl | 2 +- .../test/src/rabbit_shovel_test_all.erl | 0 .../test/src/rabbit_shovel_test_dyn.erl | 2 +- .../CONTRIBUTING.md | 0 .../deps/rabbitmq_shovel_management/Makefile | 18 + .../rabbitmq_shovel_management}/README | 0 .../rabbitmq_shovel_management/build.config | 43 + .../deps/rabbitmq_shovel_management/erlang.mk | 6589 ++++++++++++++++ .../etc/rabbit-test.config | 0 .../priv/www/js/shovel.js | 0 .../priv/www/js/tmpl/dynamic-shovel.ejs | 0 .../priv/www/js/tmpl/dynamic-shovels.ejs | 0 .../priv/www/js/tmpl/shovels.ejs | 0 .../rabbitmq-components.mk | 331 + .../src/rabbit_shovel_mgmt.erl | 2 +- .../src/rabbitmq_shovel_management.app.src | 2 +- .../test/src/rabbit_shovel_mgmt_test_all.erl | 0 .../test/src/rabbit_shovel_mgmt_test_http.erl | 0 .../rabbitmq_stomp}/CONTRIBUTING.md | 0 rabbitmq-server/deps/rabbitmq_stomp/Makefile | 45 + .../rabbitmq_stomp}/NOTES | 0 .../rabbitmq_stomp}/README.md | 4 + .../deps/rabbitmq_stomp/build.config | 43 + rabbitmq-server/deps/rabbitmq_stomp/erlang.mk | 6589 ++++++++++++++++ .../examples/perl/rabbitmq_stomp_recv.pl | 0 .../perl/rabbitmq_stomp_rpc_client.pl | 0 .../perl/rabbitmq_stomp_rpc_service.pl | 0 .../examples/perl/rabbitmq_stomp_send.pl | 0 .../examples/perl/rabbitmq_stomp_send_many.pl | 0 .../examples/perl/rabbitmq_stomp_slow_recv.pl | 0 .../examples/ruby/cb-receiver.rb | 0 .../examples/ruby/cb-sender.rb | 0 .../examples/ruby/cb-slow-receiver.rb | 0 .../examples/ruby/persistent-receiver.rb | 0 .../examples/ruby/persistent-sender.rb | 0 .../examples/ruby/topic-broadcast-receiver.rb | 0 .../ruby/topic-broadcast-with-unsubscribe.rb | 0 .../examples/ruby/topic-sender.rb | 0 .../rabbitmq_stomp}/include/rabbit_stomp.hrl | 2 +- .../include/rabbit_stomp_frame.hrl | 2 +- .../include/rabbit_stomp_headers.hrl | 11 + .../rabbitmq_stomp/rabbitmq-components.mk | 331 + .../rabbitmq_stomp}/src/rabbit_stomp.erl | 2 +- .../src/rabbit_stomp_client_sup.erl | 20 +- .../src/rabbit_stomp_frame.erl | 2 +- .../src/rabbit_stomp_processor.erl | 394 +- .../src/rabbit_stomp_reader.erl | 333 + .../rabbitmq_stomp}/src/rabbit_stomp_sup.erl | 51 +- .../rabbitmq_stomp}/src/rabbit_stomp_util.erl | 81 +- .../src/rabbitmq_stomp.app.src | 9 +- .../rabbitmq_stomp}/test/src/ack.py | 6 +- .../rabbitmq_stomp}/test/src/base.py | 11 +- .../test/src/connect_options.py | 4 +- .../rabbitmq_stomp}/test/src/destinations.py | 25 +- .../rabbitmq_stomp}/test/src/errors.py | 0 .../rabbitmq_stomp}/test/src/lifecycle.py | 0 .../rabbitmq_stomp}/test/src/parsing.py | 6 +- .../test/src/queue_properties.py | 0 .../test/src/rabbit_stomp_amqqueue_test.erl | 2 +- .../test/src/rabbit_stomp_client.erl | 6 +- .../test/src/rabbit_stomp_publish_test.erl | 23 +- .../test/src/rabbit_stomp_test.erl | 25 +- .../test/src/rabbit_stomp_test_frame.erl | 2 +- .../test/src/rabbit_stomp_test_util.erl | 2 +- .../rabbitmq_stomp}/test/src/redelivered.py | 0 .../rabbitmq_stomp}/test/src/reliability.py | 0 .../rabbitmq_stomp}/test/src/ssl_lifecycle.py | 10 +- .../rabbitmq_stomp}/test/src/test.config | 3 +- .../rabbitmq_stomp}/test/src/test.py | 1 + .../test/src/test_connect_options.py | 0 .../rabbitmq_stomp}/test/src/test_runner.py | 7 - .../rabbitmq_stomp}/test/src/test_ssl.py | 0 .../rabbitmq_stomp}/test/src/test_util.py | 4 +- .../rabbitmq_stomp}/test/src/transactions.py | 0 .../rabbitmq_stomp/test/src/x_queue_name.py | 63 + .../rabbitmq_tracing}/CONTRIBUTING.md | 0 .../deps/rabbitmq_tracing/Makefile | 21 + .../deps/rabbitmq_tracing/README.md | 56 + .../deps/rabbitmq_tracing/build.config | 43 + .../deps/rabbitmq_tracing/erlang.mk | 6589 ++++++++++++++++ .../priv/www/js/tmpl/traces.ejs | 0 .../rabbitmq_tracing}/priv/www/js/tracing.js | 0 .../rabbitmq_tracing/rabbitmq-components.mk | 331 + .../src/rabbit_tracing_app.erl | 2 +- .../src/rabbit_tracing_consumer.erl | 16 +- .../src/rabbit_tracing_consumer_sup.erl | 2 +- .../src/rabbit_tracing_files.erl | 2 +- .../src/rabbit_tracing_mgmt.erl | 2 +- .../src/rabbit_tracing_sup.erl | 2 +- .../src/rabbit_tracing_traces.erl | 2 +- .../src/rabbit_tracing_util.erl | 7 + .../src/rabbit_tracing_wm_file.erl | 2 +- .../src/rabbit_tracing_wm_files.erl | 2 +- .../src/rabbit_tracing_wm_trace.erl | 2 +- .../src/rabbit_tracing_wm_traces.erl | 2 +- .../src/rabbitmq_tracing.app.src | 6 +- .../test/src/rabbit_tracing_test.erl | 24 +- .../rabbitmq_web_dispatch}/CONTRIBUTING.md | 0 .../rabbitmq_web_dispatch}/LICENSE | 0 .../deps/rabbitmq_web_dispatch/Makefile | 18 + .../rabbitmq_web_dispatch}/README.md | 0 .../deps/rabbitmq_web_dispatch/build.config | 43 + .../deps/rabbitmq_web_dispatch/erlang.mk | 6589 ++++++++++++++++ .../rabbitmq-components.mk | 331 + .../src/rabbit_web_dispatch.erl | 6 +- .../src/rabbit_web_dispatch_app.erl | 2 +- .../src/rabbit_web_dispatch_registry.erl | 2 +- .../src/rabbit_web_dispatch_sup.erl | 2 +- .../src/rabbit_web_dispatch_util.erl | 4 +- .../src/rabbit_webmachine.erl | 0 .../src/rabbit_webmachine_error_handler.erl | 4 +- .../src/rabbitmq_web_dispatch.app.src | 2 +- .../test/priv/www/index.html | 0 .../test/src/rabbit_web_dispatch_test.erl | 4 +- .../src/rabbit_web_dispatch_test_unit.erl | 2 +- .../rabbitmq_web_stomp}/CONTRIBUTING.md | 0 .../rabbitmq_web_stomp}/LICENSE | 0 .../rabbitmq_web_stomp}/LICENSE-MPL-RabbitMQ | 0 .../deps/rabbitmq_web_stomp/Makefile | 48 + .../rabbitmq_web_stomp}/README.md | 2 +- .../deps/rabbitmq_web_stomp/build.config | 43 + .../deps/rabbitmq_web_stomp/erlang.mk | 6589 ++++++++++++++++ .../rabbitmq_web_stomp/rabbitmq-components.mk | 331 + .../rabbitmq_web_stomp}/src/rabbit_ws_app.erl | 2 +- .../src/rabbit_ws_client.erl | 199 + .../src/rabbit_ws_client_sup.erl | 40 + .../src/rabbit_ws_handler.erl | 90 + .../src/rabbit_ws_sockjs.erl | 51 +- .../rabbitmq_web_stomp}/src/rabbit_ws_sup.erl | 2 +- .../src/rabbitmq_web_stomp.app.src | 10 +- .../test/src/rabbit_ws_test_all.erl | 3 +- .../src/rabbit_ws_test_cowboy_websocket.erl | 115 + .../test/src/rabbit_ws_test_raw_websocket.erl | 4 +- .../src/rabbit_ws_test_sockjs_websocket.erl | 2 +- .../test/src/rfc6455_client.erl | 17 +- .../rabbitmq_web_stomp}/test/src/stomp.erl | 2 +- .../CONTRIBUTING.md | 0 .../rabbitmq_web_stomp_examples}/LICENSE | 0 .../LICENSE-APL2-Stomp-Websocket | 0 .../LICENSE-MPL-RabbitMQ | 0 .../deps/rabbitmq_web_stomp_examples/Makefile | 14 + .../rabbitmq_web_stomp_examples}/README.md | 0 .../rabbitmq_web_stomp_examples/erlang.mk | 6640 +++++++++++++++++ .../priv/bunny.html | 12 +- .../priv/bunny.png | Bin .../priv/echo.html | 11 +- .../priv/index.html | 7 + .../priv/main.css | 0 .../priv/pencil.cur | Bin .../rabbitmq_web_stomp_examples/priv/stomp.js | 501 ++ .../priv/temp-queue.html | 11 +- .../rabbitmq-components.mk | 331 + .../src/rabbit_web_stomp_examples_app.erl | 2 +- .../src/rabbitmq_web_stomp_examples.app.src | 2 +- rabbitmq-server/deps/ranch/AUTHORS | 33 + rabbitmq-server/deps/ranch/CHANGELOG.asciidoc | 29 + rabbitmq-server/deps/ranch/LICENSE | 13 + rabbitmq-server/deps/ranch/Makefile | 40 + rabbitmq-server/deps/ranch/README.asciidoc | 30 + rabbitmq-server/deps/ranch/appveyor.yml | 7 + rabbitmq-server/deps/ranch/circle.yml | 22 + .../deps/ranch/doc/src/guide/book.asciidoc | 20 + .../ranch/doc/src/guide/embedded.asciidoc | 48 + .../ranch/doc/src/guide/internals.asciidoc | 94 + .../ranch/doc/src/guide/introduction.asciidoc | 25 + .../ranch/doc/src/guide/listeners.asciidoc | 251 + .../deps/ranch/doc/src/guide/parsers.asciidoc | 92 + .../ranch/doc/src/guide/protocols.asciidoc | 125 + .../ranch/doc/src/guide/ssl_auth.asciidoc | 120 + .../ranch/doc/src/guide/transports.asciidoc | 169 + .../deps/ranch/doc/src/manual/ranch.asciidoc | 178 + .../ranch/doc/src/manual/ranch_app.asciidoc | 27 + .../doc/src/manual/ranch_protocol.asciidoc | 44 + .../ranch/doc/src/manual/ranch_ssl.asciidoc | 142 + .../ranch/doc/src/manual/ranch_tcp.asciidoc | 123 + .../doc/src/manual/ranch_transport.asciidoc | 194 + rabbitmq-server/deps/ranch/erlang.mk | 1 + .../deps/ranch/examples/tcp_echo/Makefile | 3 + .../deps/ranch/examples/tcp_echo/README.md | 27 + .../deps/ranch/examples/tcp_echo/relx.config | 2 + .../examples/tcp_echo/src/echo_protocol.erl | 24 + .../examples/tcp_echo/src/tcp_echo.app.src | 15 + .../examples/tcp_echo/src/tcp_echo_app.erl | 19 + .../examples/tcp_echo/src/tcp_echo_sup.erl | 22 + .../deps/ranch/examples/tcp_reverse/Makefile | 3 + .../deps/ranch/examples/tcp_reverse/README.md | 33 + .../ranch/examples/tcp_reverse/relx.config | 2 + .../tcp_reverse/src/reverse_protocol.erl | 73 + .../tcp_reverse/src/tcp_reverse.app.src | 15 + .../tcp_reverse/src/tcp_reverse_app.erl | 19 + .../tcp_reverse/src/tcp_reverse_sup.erl | 22 + rabbitmq-server/deps/ranch/src/ranch.app.src | 9 + rabbitmq-server/deps/ranch/src/ranch.erl | 196 + .../deps/ranch/src/ranch_acceptor.erl | 58 + .../deps/ranch/src/ranch_acceptors_sup.erl | 56 + .../ranch/src/ranch_app.erl} | 27 +- .../deps/ranch/src/ranch_conns_sup.erl | 284 + .../deps/ranch/src/ranch_listener_sup.erl | 42 + .../deps/ranch/src/ranch_protocol.erl | 24 + .../deps/ranch/src/ranch_server.erl | 154 + rabbitmq-server/deps/ranch/src/ranch_ssl.erl | 226 + .../ranch/src/ranch_sup.erl} | 30 +- rabbitmq-server/deps/ranch/src/ranch_tcp.erl | 204 + .../deps/ranch/src/ranch_transport.erl | 141 + .../deps/ranch/test/acceptor_SUITE.erl | 533 ++ .../deps/ranch/test/active_echo_protocol.erl | 26 + .../deps/ranch/test/check_tcp_options.erl | 15 + rabbitmq-server/deps/ranch/test/cover.spec | 1 + .../deps/ranch/test/crash_protocol.erl | 7 + .../deps/ranch/test/echo_protocol.erl | 22 + .../ranch/test/notify_and_wait_protocol.erl | 13 + .../deps/ranch/test/ranch_ct_hook.erl | 23 + .../test/remove_conn_and_wait_protocol.erl | 19 + .../deps/ranch/test/sendfile_SUITE.erl | 320 + .../deps/ranch/test/shutdown_SUITE.erl | 148 + .../deps/ranch/test/supervisor_separate.erl | 16 + .../deps/ranch/test/trap_exit_protocol.erl | 23 + .../sockjs-erlang-git => deps/sockjs}/COPYING | 4 +- .../sockjs}/Changelog | 0 .../sockjs/LICENSE-APL2-Rebar} | 0 rabbitmq-server/deps/sockjs/LICENSE-EPL-OTP | 286 + .../deps/sockjs/LICENSE-MIT-Mochiweb | 22 + .../sockjs/LICENSE-MIT-SockJS} | 4 +- rabbitmq-server/deps/sockjs/Makefile | 24 + .../Makefile => deps/sockjs/Makefile.orig.mk} | 0 .../sockjs}/README.md | 90 +- .../deps/sockjs/examples/cowboy_echo.erl | 56 + .../examples/cowboy_echo_authen_callback.erl | 86 + .../sockjs}/examples/cowboy_test_server.erl | 32 +- .../sockjs}/examples/echo.html | 6 +- .../sockjs/examples/echo_authen_callback.html | 72 + .../examples/multiplex/cowboy_multiplex.erl | 47 +- .../cowboy_multiplex_authen_callback.erl | 107 + .../sockjs}/examples/multiplex/index.html | 6 +- .../multiplex/index_authen_callback.html | 109 + rabbitmq-server/deps/sockjs/rebar | Bin 0 -> 175491 bytes .../sockjs}/rebar.config | 7 +- .../sockjs}/src/mochijson2_fork.erl | 0 .../sockjs}/src/mochinum_fork.erl | 0 .../deps/sockjs/src/sockjs.app.src | 7 + rabbitmq-server/deps/sockjs/src/sockjs.erl | 38 + .../sockjs}/src/sockjs_action.erl | 36 +- .../sockjs}/src/sockjs_app.erl | 4 +- .../sockjs}/src/sockjs_cowboy_handler.erl | 16 +- .../sockjs}/src/sockjs_filters.erl | 22 +- .../sockjs}/src/sockjs_handler.erl | 43 +- .../deps/sockjs/src/sockjs_http.erl | 144 + .../deps/sockjs/src/sockjs_internal.hrl | 33 + .../sockjs}/src/sockjs_json.erl | 4 +- .../deps/sockjs/src/sockjs_multiplex.erl | 143 + .../sockjs/src/sockjs_multiplex_channel.erl | 25 + .../sockjs}/src/sockjs_service.erl | 0 .../sockjs}/src/sockjs_session.erl | 64 +- .../sockjs}/src/sockjs_session_sup.erl | 2 +- .../sockjs}/src/sockjs_util.erl | 6 +- .../sockjs}/src/sockjs_ws_handler.erl | 6 +- .../webmachine}/Emakefile | 0 .../webmachine}/LICENSE | 0 rabbitmq-server/deps/webmachine/Makefile | 25 + .../webmachine/Makefile.orig.mk} | 0 .../webmachine}/README.org | 0 .../webmachine-git => deps/webmachine}/THANKS | 0 .../webmachine}/demo/Makefile | 0 .../webmachine}/demo/README | 0 .../webmachine}/demo/priv/dispatch.conf | 0 .../webmachine}/demo/rebar.config | 0 .../demo/src/webmachine_demo.app.src | 0 .../webmachine}/demo/src/webmachine_demo.erl | 0 .../demo/src/webmachine_demo_app.erl | 0 .../demo/src/webmachine_demo_fs_resource.erl | 0 .../demo/src/webmachine_demo_resource.erl | 0 .../demo/src/webmachine_demo_sup.erl | 0 .../webmachine}/demo/start.sh | 0 .../docs/http-headers-status-v3.png | Bin .../webmachine}/include/webmachine.hrl | 0 .../webmachine}/include/webmachine_logger.hrl | 0 .../webmachine}/include/wm_reqdata.hrl | 0 .../webmachine}/include/wm_reqstate.hrl | 0 .../webmachine}/include/wm_resource.hrl | 0 .../webmachine}/priv/templates/Makefile | 0 .../webmachine}/priv/templates/README | 0 .../priv/templates/priv/dispatch.conf | 0 .../webmachine}/priv/templates/rebar.config | 0 .../priv/templates/src/wmskel.app.src | 0 .../webmachine}/priv/templates/src/wmskel.erl | 0 .../priv/templates/src/wmskel_app.erl | 0 .../priv/templates/src/wmskel_resource.erl | 0 .../priv/templates/src/wmskel_sup.erl | 0 .../webmachine}/priv/templates/start.sh | 0 .../priv/templates/wmskel.template | 0 .../priv/trace/http-headers-status-v3.png | Bin .../webmachine}/priv/trace/wmtrace.css | 0 .../webmachine}/priv/trace/wmtrace.js | 0 .../webmachine}/priv/www/index.html | 0 .../webmachine-git => deps/webmachine}/rebar | Bin rabbitmq-server/deps/webmachine/rebar.config | 9 + .../webmachine}/rebar.config.script | 0 .../webmachine}/scripts/new_webmachine.sh | 0 .../deps/webmachine/src/webmachine.app.src | 8 + .../webmachine}/src/webmachine.erl | 0 .../webmachine}/src/webmachine_app.erl | 0 .../src/webmachine_decision_core.erl | 0 .../webmachine}/src/webmachine_deps.erl | 0 .../webmachine}/src/webmachine_dispatcher.erl | 0 .../webmachine}/src/webmachine_error.erl | 0 .../src/webmachine_error_handler.erl | 0 .../webmachine}/src/webmachine_log.erl | 0 .../src/webmachine_log_handler.erl | 0 .../src/webmachine_logger_watcher.erl | 0 .../src/webmachine_logger_watcher_sup.erl | 0 .../webmachine}/src/webmachine_mochiweb.erl | 0 .../webmachine}/src/webmachine_multipart.erl | 0 .../src/webmachine_perf_log_handler.erl | 0 .../webmachine}/src/webmachine_request.erl | 0 .../webmachine}/src/webmachine_resource.erl | 0 .../webmachine}/src/webmachine_router.erl | 0 .../webmachine}/src/webmachine_sup.erl | 0 .../webmachine}/src/webmachine_util.erl | 0 .../webmachine}/src/wmtrace_resource.erl | 0 .../webmachine}/src/wrq.erl | 0 .../webmachine}/start-dev.sh | 0 .../webmachine}/start.sh | 0 .../webmachine}/test/etag_test.erl | 0 .../webmachine}/www/blogs.html | 0 .../webmachine}/www/contact.html | 0 .../webmachine}/www/css/style-1c.css | 0 .../webmachine}/www/css/style.css | 0 .../webmachine}/www/debugging.html | 0 .../webmachine}/www/diagram.html | 0 .../webmachine}/www/dispatcher.html | 0 .../webmachine}/www/docs.html | 0 .../webmachine}/www/example_resources.html | 0 .../webmachine}/www/favicon.ico | Bin .../webmachine}/www/images/WM200-crop.png | Bin .../www/images/basho-landscape.gif | Bin .../www/images/basic-trace-decision-tab.png | Bin .../www/images/basic-trace-labeled.png | Bin .../www/images/basic-trace-request-tab.png | Bin .../www/images/basic-trace-response-tab.png | Bin .../webmachine}/www/images/bg.gif | Bin .../webmachine}/www/images/blankbox.gif | Bin .../webmachine}/www/images/chash.gif | Bin .../webmachine}/www/images/easy-ops.gif | Bin .../webmachine}/www/images/gossip4.gif | Bin .../webmachine}/www/images/halfblankbox.gif | Bin .../www/images/http-headers-status-v3.png | Bin .../webmachine}/www/images/more.gif | Bin .../webmachine}/www/images/site.gif | Bin .../webmachine}/www/images/splash250.gif | Bin .../webmachine}/www/images/vclock.gif | Bin .../webmachine}/www/index.html | 0 .../webmachine}/www/intros.html | 0 .../webmachine}/www/mechanics.html | 0 .../webmachine}/www/quickstart.html | 0 .../webmachine}/www/reftrans.html | 0 .../webmachine}/www/reqdata.html | 0 .../webmachine}/www/resources.html | 0 .../webmachine}/www/streambody.html | 0 rabbitmq-server/docs/README-for-packages | 20 + rabbitmq-server/docs/rabbitmq.config.example | 71 +- rabbitmq-server/docs/rabbitmqctl.1.xml | 141 +- rabbitmq-server/erlang.mk | 6617 ++++++++++++++++ rabbitmq-server/generate_app | 16 - rabbitmq-server/generate_deps | 57 - rabbitmq-server/git-revisions.txt | 30 + rabbitmq-server/include/rabbit_cli.hrl | 11 + rabbitmq-server/plugins-src/Makefile | 240 - rabbitmq-server/plugins-src/README | 1 - rabbitmq-server/plugins-src/all-packages.mk | 13 - rabbitmq-server/plugins-src/common.mk | 143 - .../plugins-src/cowboy-wrapper/.srcdist_done | 0 .../0001-R12-fake-iodata-type.patch | 40 - ...-drop-all-references-to-boolean-type.patch | 165 - ...rop-all-references-to-reference-type.patch | 55 - ...4-R12-drop-references-to-iodata-type.patch | 50 - ...-drop-references-to-Default-any-type.patch | 52 - ...er_to_list-and-lists-max-instead-of-.patch | 62 - ...R12-type-definitions-must-be-ordered.patch | 37 - .../0008-sec-websocket-protocol.patch | 16 - .../plugins-src/cowboy-wrapper/Makefile | 1 - .../plugins-src/cowboy-wrapper/README.md | 1 - .../cowboy-wrapper/cowboy-git/.done | 0 .../cowboy-wrapper/cowboy-git/.travis.yml | 7 - .../cowboy-wrapper/cowboy-git/AUTHORS | 18 - .../cowboy-wrapper/cowboy-git/CHANGELOG.md | 213 - .../cowboy-wrapper/cowboy-git/Makefile | 36 - .../cowboy-wrapper/cowboy-git/README.md | 290 - .../cowboy-git/doc/overview.edoc | 4 - .../cowboy-git/include/http.hrl | 55 - .../cowboy-wrapper/cowboy-git/rebar.config | 12 - .../cowboy-wrapper/cowboy-git/src/cowboy.erl | 85 - .../cowboy-git/src/cowboy_acceptor.erl | 59 - .../cowboy-git/src/cowboy_acceptors_sup.erl | 43 - .../cowboy-git/src/cowboy_cookies.erl | 392 - .../cowboy-git/src/cowboy_dispatcher.erl | 309 - .../cowboy-git/src/cowboy_http_handler.erl | 48 - .../cowboy-git/src/cowboy_http_protocol.erl | 472 -- .../cowboy-git/src/cowboy_http_req.erl | 820 -- .../cowboy-git/src/cowboy_http_static.erl | 456 -- .../cowboy-git/src/cowboy_http_websocket.erl | 530 -- .../src/cowboy_http_websocket_handler.erl | 60 - .../cowboy-git/src/cowboy_listener.erl | 174 - .../cowboy-git/src/cowboy_listener_sup.erl | 45 - .../cowboy-git/src/cowboy_multipart.erl | 249 - .../cowboy-git/src/cowboy_protocol.erl | 61 - .../cowboy-git/src/cowboy_ssl_transport.erl | 164 - .../cowboy-git/src/cowboy_tcp_transport.erl | 106 - .../cowboy-git/test/chunked_handler.erl | 17 - .../cowboy-git/test/dispatcher_prop.erl | 68 - .../cowboy-git/test/http_SUITE.erl | 613 -- .../cowboy-git/test/http_SUITE_data/cert.pem | 14 - .../cowboy-git/test/http_SUITE_data/key.pem | 18 - .../test/http_handler_init_shutdown.erl | 17 - .../test/http_handler_long_polling.erl | 22 - .../test/http_handler_multipart.erl | 29 - .../cowboy-git/test/http_handler_set_resp.erl | 33 - .../test/http_handler_stream_body.erl | 24 - .../cowboy-git/test/ws_SUITE.erl | 318 - .../plugins-src/cowboy-wrapper/hash.mk | 1 - .../plugins-src/cowboy-wrapper/package.mk | 24 - rabbitmq-server/plugins-src/do-package.mk | 578 -- .../plugins-src/eldap-wrapper/.srcdist_done | 0 .../plugins-src/eldap-wrapper/Makefile | 1 - .../eldap-wrapper/eldap-appify.patch | 14 - .../plugins-src/eldap-wrapper/eldap-git/.done | 0 .../eldap-wrapper/eldap-git/Makefile | 7 - .../eldap-wrapper/eldap-git/README | 33 - .../eldap-git/doc/README.example | 44 - .../eldap-wrapper/eldap-git/doc/short-desc | 1 - .../eldap-wrapper/eldap-git/ebin/eldap.app | 10 - .../eldap-wrapper/eldap-git/include/eldap.hrl | 32 - .../eldap-wrapper/eldap-git/src/ELDAPv3.asn | 291 - .../eldap-wrapper/eldap-git/src/Makefile | 26 - .../eldap-wrapper/eldap-git/src/eldap.erl | 1078 --- .../eldap-wrapper/eldap-git/test/README.test | 96 - .../eldap-wrapper/eldap-git/test/bill.ldif | 13 - .../eldap-git/test/bluetail.ldif | 18 - .../eldap-wrapper/eldap-git/test/crl.ldif | 5 - .../eldap-git/test/eldap_test.erl | 537 -- .../eldap-wrapper/eldap-git/test/ldap.rc | 103 - .../eldap-wrapper/eldap-git/test/people.ldif | 11 - .../eldap-git/test/post_danmark.ldif | 5 - .../eldap-wrapper/eldap-git/test/server1.crl | Bin 47075 -> 0 bytes .../eldap-wrapper/eldap-git/test/slapd.conf | 41 - .../eldap-wrapper/eldap-git/test/tobbe.ldif | 6 - .../eldap-wrapper/eldap-no-ssl-seed.patch | 17 - .../plugins-src/eldap-wrapper/hash.mk | 1 - .../plugins-src/eldap-wrapper/license_info | 3 - .../plugins-src/eldap-wrapper/package.mk | 30 - .../eldap-wrapper/remove-eldap-fsm.patch | 952 --- .../eldap-wrapper/remove-ietf-doc.patch | 3036 -------- rabbitmq-server/plugins-src/generate_app | 16 - rabbitmq-server/plugins-src/generate_deps | 61 - .../plugins-src/licensing/LICENSE-MIT-Mochi | 9 - .../licensing/license_info_eldap-wrapper | 3 - .../licensing/license_info_mochiweb-wrapper | 4 - .../licensing/license_info_webmachine-wrapper | 3 - .../mochiweb-wrapper/.srcdist_done | 0 .../mochiweb-wrapper/10-build-on-R12B-5.patch | 303 - .../mochiweb-wrapper/20-MAX_RECV_BODY.patch | 13 - .../30-remove-crypto-ssl-dependencies.patch | 104 - ...e-compiler-syntax_tools-dependencies.patch | 124 - .../mochiweb-wrapper/50-remove-json.patch | 1255 ---- .../mochiweb-wrapper/LICENSE-MIT-Mochi | 9 - .../plugins-src/mochiweb-wrapper/Makefile | 1 - .../plugins-src/mochiweb-wrapper/hash.mk | 1 - .../plugins-src/mochiweb-wrapper/license_info | 4 - .../mochiweb-wrapper/mochiweb-git/.done | 0 .../mochiweb-wrapper/mochiweb-git/.travis.yml | 7 - .../mochiweb-wrapper/mochiweb-git/CHANGES.md | 91 - .../mochiweb-wrapper/mochiweb-git/Makefile | 29 - .../mochiweb-wrapper/mochiweb-git/rebar | Bin 95259 -> 0 bytes .../mochiweb-git/scripts/new_mochiweb.erl | 23 - .../mochiweb-git/src/mochifmt_std.erl | 33 - .../mochiweb-git/src/mochiweb.app.src | 8 - .../mochiweb-git/src/mochiweb.erl | 76 - .../mochiweb-git/src/mochiweb_acceptor.erl | 50 - .../mochiweb-git/src/mochiweb_echo.erl | 41 - .../mochiweb-git/src/mochiweb_socket.erl | 84 - .../templates/mochiwebapp_skel/start-dev.sh | 6 - .../mochiweb-git/test/mochiweb_tests.erl | 199 - .../plugins-src/mochiweb-wrapper/package.mk | 40 - .../rabbitmq-amqp1.0/.srcdist_done | 0 .../plugins-src/rabbitmq-amqp1.0/Makefile | 1 - .../plugins-src/rabbitmq-amqp1.0/package.mk | 26 - .../test/swiftmq/run-tests.sh | 2 - .../rabbitmq-auth-backend-ldap/.srcdist_done | 0 .../rabbitmq-auth-backend-ldap/.travis.yml | 37 - .../rabbitmq-auth-backend-ldap/Makefile | 1 - .../rabbitmq-auth-backend-ldap/README | 20 - .../README-authorisation | 1 - .../example/groups.ldif | 8 - .../rabbitmq-auth-backend-ldap/package.mk | 9 - .../rabbitmq-auth-mechanism-ssl/.srcdist_done | 0 .../rabbitmq-auth-mechanism-ssl/Makefile | 1 - .../rabbitmq-auth-mechanism-ssl/package.mk | 2 - .../.srcdist_done | 0 .../.travis.yml | 32 - .../Makefile | 1 - .../package.mk | 3 - .../rabbitmq-erlang-client/.srcdist_done | 0 .../rabbitmq-erlang-client/Makefile | 125 - .../rabbitmq-erlang-client/Makefile.in | 26 - .../rabbitmq-erlang-client/common.mk | 201 - .../rabbit_common.app.in | 46 - .../rabbitmq-erlang-client/test.mk | 130 - .../.srcdist_done | 0 .../rabbitmq-federation-management/Makefile | 1 - .../rabbitmq-federation-management/README | 8 - .../rabbitmq-federation-management/package.mk | 7 - .../rabbitmq-federation/.srcdist_done | 0 .../plugins-src/rabbitmq-federation/Makefile | 2 - .../etc/setup-rabbit-test.sh | 2 - .../rabbitmq-federation/package.mk | 15 - .../rabbitmq-management-agent/.srcdist_done | 0 .../rabbitmq-management-agent/Makefile | 1 - .../rabbitmq-management-agent/package.mk | 1 - .../.srcdist_done | 0 .../rabbitmq-management-visualiser/Makefile | 1 - .../rabbitmq-management-visualiser/package.mk | 7 - .../rabbitmq-management/.srcdist_done | 0 .../rabbitmq-management/.travis.yml | 33 - .../plugins-src/rabbitmq-management/Makefile | 28 - .../plugins-src/rabbitmq-management/README | 12 - .../rabbitmq-management/package.mk | 25 - .../priv/www/js/sammy-0.6.0.min.js | 5 - .../priv/www/js/tmpl/channels.ejs | 5 - .../plugins-src/rabbitmq-mqtt/.srcdist_done | 0 .../plugins-src/rabbitmq-mqtt/Makefile | 1 - .../plugins-src/rabbitmq-mqtt/README.md | 9 - .../plugins-src/rabbitmq-mqtt/package.mk | 21 - .../rabbitmq-mqtt/test/lib/junit.jar | Bin 121070 -> 0 bytes .../rabbitmq-mqtt/test/setup-rabbit-test.sh | 2 - .../rabbitmq/mqtt/test/setup-rabbit-test.sh | 2 - .../rabbitmq-shovel-management/.srcdist_done | 0 .../rabbitmq-shovel-management/Makefile | 1 - .../rabbitmq-shovel-management/package.mk | 9 - .../plugins-src/rabbitmq-shovel/.srcdist_done | 0 .../plugins-src/rabbitmq-shovel/Makefile | 1 - .../plugins-src/rabbitmq-shovel/README | 4 - .../plugins-src/rabbitmq-shovel/generate_deps | 54 - .../plugins-src/rabbitmq-shovel/package.mk | 3 - .../plugins-src/rabbitmq-stomp/.srcdist_done | 0 .../plugins-src/rabbitmq-stomp/.travis.yml | 32 - .../plugins-src/rabbitmq-stomp/Makefile | 1 - .../rabbitmq-stomp/deps/pika/Makefile | 27 - .../rabbitmq-stomp/deps/stomppy/Makefile | 27 - .../plugins-src/rabbitmq-stomp/package.mk | 26 - .../src/rabbit_stomp_reader.erl | 243 - .../plugins-src/rabbitmq-test/.srcdist_done | 0 .../plugins-src/rabbitmq-test/Makefile | 210 - .../plugins-src/rabbitmq-test/README | 19 - .../plugins-src/rabbitmq-test/certs/Makefile | 58 - .../rabbitmq-test/certs/openssl.cnf | 54 - .../plugins-src/rabbitmq-test/package.mk | 11 - .../plugins-src/rabbitmq-test/qpid_config.py | 26 - .../plugins-src/rabbitmq-test/qpid_patch | 142 - .../rabbitmq-test/rabbit_failing.txt | 9 - .../rabbitmq-test/src/inet_proxy_dist.erl | 199 - .../rabbitmq-test/src/inet_tcp_proxy.erl | 106 - .../src/inet_tcp_proxy_manager.erl | 107 - .../src/rabbit_ha_test_consumer.erl | 114 - .../src/rabbit_ha_test_producer.erl | 119 - .../rabbitmq-test/src/rabbit_test_configs.erl | 279 - .../rabbitmq-test/src/rabbit_test_runner.erl | 230 - .../rabbitmq-test/src/rabbit_test_util.erl | 147 - .../rabbitmq-test/src/rabbitmq_test.app.src | 11 - .../rabbitmq-test/test/src/cluster_rename.erl | 194 - .../test/src/clustering_management.erl | 608 -- .../test/src/crashing_queues.erl | 213 - .../rabbitmq-test/test/src/dynamic_ha.erl | 254 - .../rabbitmq-test/test/src/eager_sync.erl | 205 - .../rabbitmq-test/test/src/many_node_ha.erl | 64 - .../rabbitmq-test/test/src/partitions.erl | 370 - .../test/src/rabbit_priority_queue_test.erl | 335 - .../rabbitmq-test/test/src/simple_ha.erl | 143 - .../rabbitmq-test/test/src/sync_detection.erl | 189 - .../rabbitmq-tracing/.srcdist_done | 0 .../plugins-src/rabbitmq-tracing/Makefile | 1 - .../plugins-src/rabbitmq-tracing/README | 42 - .../plugins-src/rabbitmq-tracing/package.mk | 8 - .../rabbitmq-web-dispatch/.srcdist_done | 0 .../rabbitmq-web-dispatch/CONTRIBUTING.md | 51 - .../rabbitmq-web-dispatch/Makefile | 1 - .../rabbitmq-web-dispatch/package.mk | 3 - .../rabbitmq-web-stomp-examples/.srcdist_done | 0 .../CONTRIBUTING.md | 51 - .../rabbitmq-web-stomp-examples/Makefile | 1 - .../rabbitmq-web-stomp-examples/package.mk | 6 - .../priv/sockjs-0.3.js | 2379 ------ .../rabbitmq-web-stomp-examples/priv/stomp.js | 396 - .../rabbitmq-web-stomp/.srcdist_done | 0 .../rabbitmq-web-stomp/CONTRIBUTING.md | 51 - .../plugins-src/rabbitmq-web-stomp/Makefile | 1 - .../plugins-src/rabbitmq-web-stomp/package.mk | 4 - .../src/rabbit_ws_client.erl | 97 - .../src/rabbit_ws_client_sup.erl | 72 - rabbitmq-server/plugins-src/release.mk | 273 - .../sockjs-erlang-wrapper/.srcdist_done | 0 .../0000-remove-spec-patch.diff | 816 -- .../sockjs-erlang-wrapper/0001-a2b-b2a.diff | 22 - .../0002-parameterised-modules-r16a.diff | 477 -- .../0003-websocket-subprotocol | 93 - .../sockjs-erlang-wrapper/CONTRIBUTING.md | 51 - .../sockjs-erlang-wrapper/Makefile | 1 - .../generate-0000-remove-spec-patch.sh | 10 - .../plugins-src/sockjs-erlang-wrapper/hash.mk | 1 - .../sockjs-erlang-wrapper/package.mk | 27 - .../sockjs-erlang-git/.done | 0 .../examples/cowboy_echo.erl | 50 - .../examples/multiplex/multiplex.js | 80 - .../sockjs-erlang-git/rebar | Bin 114094 -> 0 bytes .../sockjs-erlang-git/src/pmod_pt.erl | 461 -- .../sockjs-erlang-git/src/sockjs.app.src | 12 - .../sockjs-erlang-git/src/sockjs.erl | 24 - .../sockjs-erlang-git/src/sockjs_http.erl | 137 - .../sockjs-erlang-git/src/sockjs_internal.hrl | 33 - .../src/sockjs_multiplex.erl | 79 - .../src/sockjs_multiplex_channel.erl | 18 - rabbitmq-server/plugins-src/umbrella.mk | 55 - .../webmachine-wrapper/.srcdist_done | 0 .../10-remove-crypto-dependency.patch | 78 - .../webmachine-wrapper/CONTRIBUTING.md | 51 - .../plugins-src/webmachine-wrapper/Makefile | 1 - .../plugins-src/webmachine-wrapper/hash.mk | 1 - .../webmachine-wrapper/license_info | 3 - .../plugins-src/webmachine-wrapper/package.mk | 19 - .../webmachine-wrapper/webmachine-git/.done | 0 .../webmachine-git/.travis.yml | 9 - .../webmachine-git/rebar.config | 9 - .../webmachine-git/src/webmachine.app.src | 13 - rabbitmq-server/quickcheck | 41 + rabbitmq-server/rabbitmq-components.mk | 331 + rabbitmq-server/scripts/rabbitmq-defaults | 6 + rabbitmq-server/scripts/rabbitmq-defaults.bat | 88 +- rabbitmq-server/scripts/rabbitmq-echopid.bat | 110 +- rabbitmq-server/scripts/rabbitmq-env | 176 +- rabbitmq-server/scripts/rabbitmq-env.bat | 668 +- rabbitmq-server/scripts/rabbitmq-plugins | 7 +- rabbitmq-server/scripts/rabbitmq-plugins.bat | 9 +- .../scripts}/rabbitmq-script-wrapper | 22 +- rabbitmq-server/scripts/rabbitmq-server | 20 +- .../scripts/rabbitmq-server-ha.ocf | 2250 ++++++ rabbitmq-server/scripts/rabbitmq-server.bat | 34 +- .../scripts/rabbitmq-server.ocf | 4 +- rabbitmq-server/scripts/rabbitmq-service.bat | 9 +- rabbitmq-server/scripts/rabbitmqctl | 5 + rabbitmq-server/scripts/rabbitmqctl.bat | 13 +- .../scripts/set_rabbitmq_policy.sh | 5 + rabbitmq-server/src/background_gc.erl | 5 +- rabbitmq-server/src/delegate.erl | 10 +- rabbitmq-server/src/delegate_sup.erl | 2 +- rabbitmq-server/src/dtree.erl | 4 +- rabbitmq-server/src/file_handle_cache.erl | 62 +- .../src/file_handle_cache_stats.erl | 11 +- rabbitmq-server/src/gatherer.erl | 6 +- rabbitmq-server/src/gm.erl | 15 +- rabbitmq-server/src/mnesia_sync.erl | 4 +- rabbitmq-server/src/pg2_fixed.erl | 55 +- rabbitmq-server/src/pg_local.erl | 32 +- .../rabbit_app.in => src/rabbit.app.src} | 21 +- rabbitmq-server/src/rabbit.erl | 201 +- rabbitmq-server/src/rabbit_access_control.erl | 12 +- rabbitmq-server/src/rabbit_alarm.erl | 107 +- .../src/rabbit_amqqueue_process.erl | 33 +- rabbitmq-server/src/rabbit_amqqueue_sup.erl | 2 +- .../src/rabbit_amqqueue_sup_sup.erl | 2 +- .../src/rabbit_auth_backend_dummy.erl | 2 +- .../src/rabbit_auth_backend_internal.erl | 137 +- .../src/rabbit_auth_mechanism_amqplain.erl | 2 +- .../src/rabbit_auth_mechanism_cr_demo.erl | 2 +- .../src/rabbit_auth_mechanism_plain.erl | 2 +- rabbitmq-server/src/rabbit_autoheal.erl | 2 +- rabbitmq-server/src/rabbit_binding.erl | 10 +- rabbitmq-server/src/rabbit_boot_steps.erl | 97 + .../src/rabbit_channel_interceptor.erl | 91 - rabbitmq-server/src/rabbit_channel_sup.erl | 22 +- .../src/rabbit_channel_sup_sup.erl | 7 +- rabbitmq-server/src/rabbit_cli.erl | 81 +- rabbitmq-server/src/rabbit_client_sup.erl | 2 +- .../src/rabbit_connection_helper_sup.erl | 11 +- rabbitmq-server/src/rabbit_connection_sup.erl | 20 +- rabbitmq-server/src/rabbit_control_main.erl | 281 +- rabbitmq-server/src/rabbit_dead_letter.erl | 4 +- rabbitmq-server/src/rabbit_diagnostics.erl | 37 +- rabbitmq-server/src/rabbit_direct.erl | 14 +- rabbitmq-server/src/rabbit_disk_monitor.erl | 70 +- rabbitmq-server/src/rabbit_epmd_monitor.erl | 2 +- rabbitmq-server/src/rabbit_error_logger.erl | 8 +- .../src/rabbit_error_logger_file_h.erl | 5 +- rabbitmq-server/src/rabbit_exchange.erl | 97 +- .../src/rabbit_exchange_parameters.erl | 49 + .../src/rabbit_exchange_type_direct.erl | 2 +- .../src/rabbit_exchange_type_fanout.erl | 2 +- .../src/rabbit_exchange_type_headers.erl | 2 +- .../src/rabbit_exchange_type_invalid.erl | 2 +- .../src/rabbit_exchange_type_topic.erl | 2 +- rabbitmq-server/src/rabbit_framing.erl | 2 +- rabbitmq-server/src/rabbit_guid.erl | 2 +- rabbitmq-server/src/rabbit_hipe.erl | 98 + rabbitmq-server/src/rabbit_limiter.erl | 2 +- rabbitmq-server/src/rabbit_log.erl | 2 +- rabbitmq-server/src/rabbit_memory_monitor.erl | 10 +- .../src/rabbit_mirror_queue_coordinator.erl | 17 +- .../src/rabbit_mirror_queue_master.erl | 70 +- .../src/rabbit_mirror_queue_misc.erl | 81 +- .../src/rabbit_mirror_queue_mode_exactly.erl | 5 +- .../src/rabbit_mirror_queue_slave.erl | 55 +- .../src/rabbit_mirror_queue_sync.erl | 221 +- rabbitmq-server/src/rabbit_mnesia.erl | 39 +- rabbitmq-server/src/rabbit_mnesia_rename.erl | 2 +- rabbitmq-server/src/rabbit_msg_file.erl | 2 +- rabbitmq-server/src/rabbit_msg_store.erl | 105 +- .../src/rabbit_msg_store_ets_index.erl | 2 +- rabbitmq-server/src/rabbit_msg_store_gc.erl | 2 +- rabbitmq-server/src/rabbit_node_monitor.erl | 28 +- .../src/rabbit_parameter_validation.erl | 2 +- rabbitmq-server/src/rabbit_password.erl | 64 + .../src/rabbit_password_hashing_md5.erl | 28 + .../src/rabbit_password_hashing_sha256.erl | 24 + .../src/rabbit_password_hashing_sha512.erl | 24 + rabbitmq-server/src/rabbit_plugins.erl | 24 +- rabbitmq-server/src/rabbit_plugins_main.erl | 8 +- rabbitmq-server/src/rabbit_policies.erl | 18 +- rabbitmq-server/src/rabbit_policy.erl | 24 +- rabbitmq-server/src/rabbit_prelaunch.erl | 20 +- rabbitmq-server/src/rabbit_priority_queue.erl | 114 +- .../src/rabbit_queue_consumers.erl | 22 +- rabbitmq-server/src/rabbit_queue_index.erl | 56 +- .../rabbit_queue_location_client_local.erl | 40 + .../src/rabbit_queue_location_min_masters.erl | 79 + .../src/rabbit_queue_location_random.erl | 44 + .../src/rabbit_queue_location_validator.erl | 69 + .../src/rabbit_queue_master_location_misc.erl | 95 + rabbitmq-server/src/rabbit_recovery_terms.erl | 2 +- rabbitmq-server/src/rabbit_registry.erl | 5 +- .../src/rabbit_resource_monitor_misc.erl | 51 + .../src/rabbit_restartable_sup.erl | 2 +- rabbitmq-server/src/rabbit_router.erl | 2 +- .../src/rabbit_runtime_parameters.erl | 54 +- .../src/rabbit_sasl_report_file_h.erl | 4 +- rabbitmq-server/src/rabbit_ssl.erl | 2 +- rabbitmq-server/src/rabbit_sup.erl | 2 +- rabbitmq-server/src/rabbit_table.erl | 2 +- rabbitmq-server/src/rabbit_trace.erl | 2 +- rabbitmq-server/src/rabbit_types.erl | 5 +- rabbitmq-server/src/rabbit_upgrade.erl | 11 +- .../src/rabbit_upgrade_functions.erl | 25 +- rabbitmq-server/src/rabbit_variable_queue.erl | 506 +- rabbitmq-server/src/rabbit_version.erl | 33 +- rabbitmq-server/src/rabbit_vhost.erl | 11 +- rabbitmq-server/src/rabbit_vm.erl | 21 +- rabbitmq-server/src/supervised_lifecycle.erl | 2 +- rabbitmq-server/src/tcp_acceptor.erl | 105 - rabbitmq-server/src/tcp_acceptor_sup.erl | 43 - rabbitmq-server/src/tcp_listener.erl | 85 +- rabbitmq-server/src/tcp_listener_sup.erl | 64 +- rabbitmq-server/src/truncate.erl | 2 +- rabbitmq-server/src/vm_memory_monitor.erl | 46 +- rabbitmq-server/src/worker_pool.erl | 41 +- rabbitmq-server/src/worker_pool_sup.erl | 25 +- rabbitmq-server/src/worker_pool_worker.erl | 24 +- .../test/temp/head_message_timestamp_tests.py | 131 + rabbitmq-server/test/temp/rabbitmqadmin.py | 944 +++ rabbitmq-server/version.mk | 1 - tests/runtests.sh | 152 + 1519 files changed, 209808 insertions(+), 33778 deletions(-) delete mode 100644 debian/patches/detect-stuck-queue-on-declare.diff delete mode 100644 debian/patches/fix-management-startup-after-split.diff delete mode 100644 debian/patches/fix-pmon-demonitor-function.diff delete mode 100644 debian/patches/series delete mode 100644 debian/rabbitmq-env.conf delete mode 100755 debian/rabbitmq-server-wait delete mode 100644 debian/rabbitmq-server.install delete mode 100644 debian/rabbitmq-server.links create mode 100644 debian/rabbitmq-server.manpages rename rabbitmq-server/{codegen => }/CONTRIBUTING.md (100%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => }/LICENSE-APL2-Rebar (100%) create mode 100644 rabbitmq-server/LICENSE-EPL-OTP rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => }/LICENSE-MIT-Mochiweb (100%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => }/LICENSE-MIT-SockJS (100%) create mode 100644 rabbitmq-server/LICENSE-MPL2 create mode 100644 rabbitmq-server/README.md create mode 100644 rabbitmq-server/build.config delete mode 100755 rabbitmq-server/calculate-relative create mode 100755 rabbitmq-server/check_xref delete mode 100644 rabbitmq-server/codegen/license_info rename rabbitmq-server/{plugins-src/cowboy-wrapper => deps/amqp_client}/CONTRIBUTING.md (100%) create mode 100644 rabbitmq-server/deps/amqp_client/Makefile rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/README.in (100%) create mode 100644 rabbitmq-server/deps/amqp_client/build.config create mode 100644 rabbitmq-server/deps/amqp_client/erlang.mk rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/include/amqp_client.hrl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/include/amqp_client_internal.hrl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/include/amqp_gen_consumer_spec.hrl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/include/rabbit_routing_prefixes.hrl (100%) create mode 100644 rabbitmq-server/deps/amqp_client/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_auth_mechanisms.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_channel.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_channel_sup.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_channel_sup_sup.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_channels_manager.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client/ebin/amqp_client.app.in => deps/amqp_client/src/amqp_client.app.src} (72%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_client.erl (92%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_connection.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_connection_sup.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_connection_type_sup.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_direct_connection.erl (94%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_direct_consumer.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_gen_connection.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_gen_consumer.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_main_reader.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_network_connection.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_rpc_client.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_rpc_server.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_selective_consumer.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_sup.erl (95%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/amqp_uri.erl (92%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/overview.edoc.in (100%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/rabbit_routing_util.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/src/uri_parser.erl (100%) create mode 100644 rabbitmq-server/deps/amqp_client/test.mk rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/test/Makefile (100%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/test/amqp_client_SUITE.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/test/amqp_dbg.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/test/negative_test_util.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/amqp_client}/test/test_util.erl (95%) create mode 100644 rabbitmq-server/deps/cowboy/AUTHORS create mode 100644 rabbitmq-server/deps/cowboy/CHANGELOG.md create mode 100644 rabbitmq-server/deps/cowboy/CONTRIBUTING.md rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git => deps/cowboy}/LICENSE (92%) create mode 100644 rabbitmq-server/deps/cowboy/Makefile create mode 100644 rabbitmq-server/deps/cowboy/README.md create mode 100644 rabbitmq-server/deps/cowboy/ROADMAP.md create mode 100755 rabbitmq-server/deps/cowboy/all.sh create mode 100644 rabbitmq-server/deps/cowboy/circle.yml create mode 100644 rabbitmq-server/deps/cowboy/erlang.mk create mode 100644 rabbitmq-server/deps/cowboy/rebar.config create mode 100644 rabbitmq-server/deps/cowboy/src/cowboy.app.src create mode 100644 rabbitmq-server/deps/cowboy/src/cowboy.erl rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git/src/cowboy.app.src => deps/cowboy/src/cowboy_app.erl} (71%) rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git => deps/cowboy}/src/cowboy_bstr.erl (54%) rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git => deps/cowboy}/src/cowboy_clock.erl (74%) create mode 100644 rabbitmq-server/deps/cowboy/src/cowboy_handler.erl rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git => deps/cowboy}/src/cowboy_http.erl (69%) create mode 100644 rabbitmq-server/deps/cowboy/src/cowboy_http_handler.erl create mode 100644 rabbitmq-server/deps/cowboy/src/cowboy_loop_handler.erl create mode 100644 rabbitmq-server/deps/cowboy/src/cowboy_middleware.erl create mode 100644 rabbitmq-server/deps/cowboy/src/cowboy_protocol.erl create mode 100644 rabbitmq-server/deps/cowboy/src/cowboy_req.erl rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_rest.erl => deps/cowboy/src/cowboy_rest.erl} (54%) create mode 100644 rabbitmq-server/deps/cowboy/src/cowboy_router.erl create mode 100644 rabbitmq-server/deps/cowboy/src/cowboy_spdy.erl create mode 100644 rabbitmq-server/deps/cowboy/src/cowboy_static.erl create mode 100644 rabbitmq-server/deps/cowboy/src/cowboy_sub_protocol.erl rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git => deps/cowboy}/src/cowboy_sup.erl (75%) create mode 100644 rabbitmq-server/deps/cowboy/src/cowboy_websocket.erl create mode 100644 rabbitmq-server/deps/cowboy/src/cowboy_websocket_handler.erl rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git => deps/cowboy/test}/cover.spec (100%) create mode 100644 rabbitmq-server/deps/cowboy/test/cowboy_ct_hook.erl create mode 100644 rabbitmq-server/deps/cowboy/test/cowboy_error_h.erl create mode 100644 rabbitmq-server/deps/cowboy/test/cowboy_test.erl create mode 100644 rabbitmq-server/deps/cowboy/test/eunit_SUITE.erl create mode 100644 rabbitmq-server/deps/cowboy/test/handlers/input_crash_h.erl create mode 100644 rabbitmq-server/deps/cowboy/test/handlers/long_polling_h.erl create mode 100644 rabbitmq-server/deps/cowboy/test/handlers/loop_handler_body_h.erl create mode 100644 rabbitmq-server/deps/cowboy/test/handlers/loop_handler_timeout_h.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_body_qs.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_chunked.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_echo_body.erl rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_errors.erl => deps/cowboy/test/http_SUITE_data/http_errors.erl} (56%) rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git/test => deps/cowboy/test/http_SUITE_data}/http_handler.erl (76%) create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_init_shutdown.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_loop_stream_recv.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_multipart.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_multipart_stream.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_req_attr.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_set_resp.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_stream_body.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_streamed.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_empty_resource.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_expires.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_expires_binary.erl rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git/test => deps/cowboy/test/http_SUITE_data}/rest_forbidden_resource.erl (67%) create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_missing_callbacks.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_nodelete_resource.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_param_all.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_patch_resource.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_post_charset_resource.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_postonly_resource.erl create mode 100644 rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_resource_etags.erl rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git/test => deps/cowboy/test/http_SUITE_data}/rest_simple_resource.erl (88%) create mode 100644 rabbitmq-server/deps/cowboy/test/loop_handler_SUITE.erl create mode 100644 rabbitmq-server/deps/cowboy/test/spdy_SUITE.erl create mode 100644 rabbitmq-server/deps/cowboy/test/ws_SUITE.erl create mode 100644 rabbitmq-server/deps/cowboy/test/ws_SUITE_data/client.json create mode 100644 rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_echo.erl rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git/test/websocket_handler.erl => deps/cowboy/test/ws_SUITE_data/ws_echo_timer.erl} (71%) rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git/test/websocket_handler_init_shutdown.erl => deps/cowboy/test/ws_SUITE_data/ws_init_shutdown.erl} (55%) create mode 100644 rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_send_many.erl create mode 100644 rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_timeout_cancel.erl rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git/test/ws_timeout_hibernate_handler.erl => deps/cowboy/test/ws_SUITE_data/ws_timeout_hibernate.erl} (61%) create mode 100644 rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_upgrade_with_opts.erl create mode 100644 rabbitmq-server/deps/cowlib/AUTHORS create mode 100644 rabbitmq-server/deps/cowlib/CHANGELOG.md create mode 100644 rabbitmq-server/deps/cowlib/LICENSE create mode 100644 rabbitmq-server/deps/cowlib/Makefile create mode 100644 rabbitmq-server/deps/cowlib/README.md create mode 100755 rabbitmq-server/deps/cowlib/all.sh create mode 100644 rabbitmq-server/deps/cowlib/build.config create mode 100644 rabbitmq-server/deps/cowlib/erlang.mk create mode 100644 rabbitmq-server/deps/cowlib/include/cow_inline.hrl create mode 100644 rabbitmq-server/deps/cowlib/src/cow_cookie.erl create mode 100644 rabbitmq-server/deps/cowlib/src/cow_date.erl create mode 100644 rabbitmq-server/deps/cowlib/src/cow_http.erl create mode 100644 rabbitmq-server/deps/cowlib/src/cow_http_hd.erl create mode 100644 rabbitmq-server/deps/cowlib/src/cow_http_te.erl create mode 100644 rabbitmq-server/deps/cowlib/src/cow_mimetypes.erl create mode 100644 rabbitmq-server/deps/cowlib/src/cow_mimetypes.erl.src create mode 100644 rabbitmq-server/deps/cowlib/src/cow_multipart.erl create mode 100644 rabbitmq-server/deps/cowlib/src/cow_qs.erl create mode 100644 rabbitmq-server/deps/cowlib/src/cow_spdy.erl create mode 100644 rabbitmq-server/deps/cowlib/src/cow_spdy.hrl create mode 100644 rabbitmq-server/deps/cowlib/src/cowlib.app.src rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git/test/proper_SUITE.erl => deps/cowlib/test/eunit_SUITE.erl} (60%) rename rabbitmq-server/{plugins-src => deps}/licensing/LICENSE-APACHE2-ExplorerCanvas (100%) rename rabbitmq-server/{LICENSE-Apache-Basho => deps/licensing/LICENSE-APL2-Rebar} (100%) rename rabbitmq-server/{plugins-src => deps}/licensing/LICENSE-APL2-Stomp-Websocket (100%) rename rabbitmq-server/{plugins-src => deps}/licensing/LICENSE-BSD-base64js (100%) rename rabbitmq-server/{plugins-src => deps}/licensing/LICENSE-BSD-glMatrix (100%) create mode 100644 rabbitmq-server/deps/licensing/LICENSE-EPL-OTP rename rabbitmq-server/{plugins-src => deps}/licensing/LICENSE-MIT-EJS10 (100%) rename rabbitmq-server/{plugins-src => deps}/licensing/LICENSE-MIT-Flot (100%) create mode 100644 rabbitmq-server/deps/licensing/LICENSE-MIT-Mochiweb rename rabbitmq-server/{plugins-src => deps}/licensing/LICENSE-MIT-Sammy060 (100%) rename rabbitmq-server/{plugins-src/eldap-wrapper/LICENSE-MIT-eldap => deps/licensing/LICENSE-MIT-SockJS} (96%) rename rabbitmq-server/{plugins-src => deps}/licensing/LICENSE-MIT-jQuery164 (100%) rename rabbitmq-server/{codegen => deps/licensing}/LICENSE-MPL-RabbitMQ (100%) create mode 100644 rabbitmq-server/deps/licensing/LICENSE-MPL2 create mode 100644 rabbitmq-server/deps/licensing/license_info_rabbitmq_codegen rename rabbitmq-server/{plugins-src/licensing/license_info_rabbitmq-management => deps/licensing/license_info_rabbitmq_management} (100%) rename rabbitmq-server/{plugins-src/licensing/license_info_rabbitmq-management-visualiser => deps/licensing/license_info_rabbitmq_management_visualiser} (100%) create mode 100644 rabbitmq-server/deps/mochiweb/CHANGES.md rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/LICENSE (100%) create mode 100644 rabbitmq-server/deps/mochiweb/Makefile create mode 100644 rabbitmq-server/deps/mochiweb/Makefile.orig.mk rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/README (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/examples/hmac_api/README (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/examples/hmac_api/hmac_api.hrl (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/examples/hmac_api/hmac_api_client.erl (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/examples/hmac_api/hmac_api_lib.erl (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/examples/https/https_store.erl (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/examples/https/server_cert.pem (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/examples/https/server_key.pem (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/examples/keepalive/keepalive.erl (100%) create mode 100644 rabbitmq-server/deps/mochiweb/examples/websocket/index.html create mode 100644 rabbitmq-server/deps/mochiweb/examples/websocket/websocket.erl rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/include/internal.hrl (100%) create mode 100755 rabbitmq-server/deps/mochiweb/rebar rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/rebar.config (81%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/scripts/entities.erl (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochifmt.erl (93%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochifmt_records.erl (50%) create mode 100644 rabbitmq-server/deps/mochiweb/src/mochifmt_std.erl create mode 100644 rabbitmq-server/deps/mochiweb/src/mochiglobal.erl rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochihex.erl (65%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochijson.erl (94%) create mode 100644 rabbitmq-server/deps/mochiweb/src/mochijson2.erl rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochilists.erl (72%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochilogfile2.erl (77%) create mode 100644 rabbitmq-server/deps/mochiweb/src/mochinum.erl rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochitemp.erl (85%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiutf8.erl (84%) create mode 100644 rabbitmq-server/deps/mochiweb/src/mochiweb.app.src create mode 100644 rabbitmq-server/deps/mochiweb/src/mochiweb.erl create mode 100644 rabbitmq-server/deps/mochiweb/src/mochiweb_acceptor.erl rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_base64url.erl (71%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_charref.erl (98%) create mode 100644 rabbitmq-server/deps/mochiweb/src/mochiweb_clock.erl rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_cookies.erl (90%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_cover.erl (64%) create mode 100644 rabbitmq-server/deps/mochiweb/src/mochiweb_echo.erl rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_headers.erl (93%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_html.erl (91%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_http.erl (61%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_io.erl (50%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_mime.erl (90%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_multipart.erl (96%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_request.erl (76%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_response.erl (68%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_session.erl (69%) create mode 100644 rabbitmq-server/deps/mochiweb/src/mochiweb_socket.erl rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_socket_server.erl (76%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/mochiweb_util.erl (97%) create mode 100644 rabbitmq-server/deps/mochiweb/src/mochiweb_websocket.erl rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/src/reloader.erl (81%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/support/templates/mochiwebapp.template (92%) create mode 100755 rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/bench.sh rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/support/templates/mochiwebapp_skel/priv/www/index.html (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/support/templates/mochiwebapp_skel/rebar.config (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/support/templates/mochiwebapp_skel/src/mochiapp.app.src (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/support/templates/mochiwebapp_skel/src/mochiapp.erl (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/support/templates/mochiwebapp_skel/src/mochiapp_app.erl (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/support/templates/mochiwebapp_skel/src/mochiapp_deps.erl (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/support/templates/mochiwebapp_skel/src/mochiapp_sup.erl (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/support/templates/mochiwebapp_skel/src/mochiapp_web.erl (89%) create mode 100755 rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/start-dev.sh rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/support/test-materials/test_ssl_cert.pem (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/support/test-materials/test_ssl_key.pem (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/test/mochiweb_base64url_tests.erl (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/test/mochiweb_html_tests.erl (99%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git => deps/mochiweb}/test/mochiweb_http_tests.erl (100%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper/mochiweb-git/src => deps/mochiweb/test}/mochiweb_request_tests.erl (100%) create mode 100644 rabbitmq-server/deps/mochiweb/test/mochiweb_socket_server_tests.erl create mode 100644 rabbitmq-server/deps/mochiweb/test/mochiweb_test_util.erl create mode 100644 rabbitmq-server/deps/mochiweb/test/mochiweb_test_util.hrl create mode 100644 rabbitmq-server/deps/mochiweb/test/mochiweb_tests.erl create mode 100644 rabbitmq-server/deps/mochiweb/test/mochiweb_websocket_tests.erl create mode 100644 rabbitmq-server/deps/rabbit_common/LICENSE rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbit_common}/LICENSE-MPL-RabbitMQ (99%) create mode 100644 rabbitmq-server/deps/rabbit_common/Makefile create mode 100644 rabbitmq-server/deps/rabbit_common/build.config rename rabbitmq-server/{ => deps/rabbit_common}/codegen.py (72%) mode change 100644 => 100755 create mode 100644 rabbitmq-server/deps/rabbit_common/erlang.mk rename rabbitmq-server/{ => deps/rabbit_common}/include/rabbit.hrl (79%) rename rabbitmq-server/{ => deps/rabbit_common}/include/rabbit_msg_store.hrl (100%) create mode 100644 rabbitmq-server/deps/rabbit_common/mk/rabbitmq-components.mk create mode 100644 rabbitmq-server/deps/rabbit_common/mk/rabbitmq-dist.mk create mode 100644 rabbitmq-server/deps/rabbit_common/mk/rabbitmq-plugin.mk create mode 100644 rabbitmq-server/deps/rabbit_common/mk/rabbitmq-run.mk create mode 100644 rabbitmq-server/deps/rabbit_common/mk/rabbitmq-tests.mk create mode 100644 rabbitmq-server/deps/rabbit_common/mk/rabbitmq-tools.mk rename rabbitmq-server/{ => deps/rabbit_common}/src/app_utils.erl (98%) rename rabbitmq-server/{ => deps/rabbit_common}/src/credit_flow.erl (81%) rename rabbitmq-server/{ => deps/rabbit_common}/src/gen_server2.erl (98%) rename rabbitmq-server/{ => deps/rabbit_common}/src/mirrored_supervisor.erl (98%) rename rabbitmq-server/{ => deps/rabbit_common}/src/mochijson2.erl (100%) rename rabbitmq-server/{ => deps/rabbit_common}/src/pmon.erl (99%) rename rabbitmq-server/{ => deps/rabbit_common}/src/priority_queue.erl (99%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_amqqueue.erl (90%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_auth_mechanism.erl (96%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_authn_backend.erl (95%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_authz_backend.erl (97%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_backing_queue.erl (90%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_basic.erl (97%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_binary_generator.erl (89%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_binary_parser.erl (93%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_channel.erl (93%) create mode 100644 rabbitmq-server/deps/rabbit_common/src/rabbit_channel_interceptor.erl rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_command_assembler.erl (98%) create mode 100644 rabbitmq-server/deps/rabbit_common/src/rabbit_common.app.src create mode 100644 rabbitmq-server/deps/rabbit_common/src/rabbit_control_misc.erl create mode 100644 rabbitmq-server/deps/rabbit_common/src/rabbit_data_coercion.erl rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_event.erl (96%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_exchange_decorator.erl (98%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_exchange_type.erl (97%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_heartbeat.erl (99%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_misc.erl (93%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_msg_store_index.erl (96%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_net.erl (79%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_networking.erl (79%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_nodes.erl (96%) create mode 100644 rabbitmq-server/deps/rabbit_common/src/rabbit_password_hashing.erl rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_policy_validator.erl (94%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_queue_collector.erl (94%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_queue_decorator.erl (94%) create mode 100644 rabbitmq-server/deps/rabbit_common/src/rabbit_queue_master_locator.erl rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_reader.erl (81%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_runtime_parameter.erl (95%) rename rabbitmq-server/{ => deps/rabbit_common}/src/rabbit_writer.erl (90%) rename rabbitmq-server/{ => deps/rabbit_common}/src/ssl_compat.erl (97%) rename rabbitmq-server/{ => deps/rabbit_common}/src/supervisor2.erl (98%) rename rabbitmq-server/{ => deps/rabbit_common}/src/time_compat.erl (100%) rename rabbitmq-server/{plugins-src/eldap-wrapper => deps/rabbitmq_amqp1_0}/CONTRIBUTING.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_amqp1_0/Makefile rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/README.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_amqp1_0/build.config rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/codegen.py (72%) create mode 100644 rabbitmq-server/deps/rabbitmq_amqp1_0/erlang.mk rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/include/rabbit_amqp1_0.hrl (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_amqp1_0/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/spec/messaging.xml (100%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/spec/security.xml (100%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/spec/transactions.xml (100%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/spec/transport.xml (100%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/spec/types.xml (100%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_binary_generator.erl (81%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_binary_parser.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_channel.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_framing.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_incoming_link.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_link_util.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_message.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_outgoing_link.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_reader.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_session.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_session_process.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_session_sup.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_session_sup_sup.erl (95%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_util.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbit_amqp1_0_writer.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/src/rabbitmq_amqp1_0.app.src (93%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/test/lib-java/junit.jar (100%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/test/proton/Makefile (100%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/test/proton/build.xml (100%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/test/proton/test/com/rabbitmq/amqp1_0/tests/proton/ProtonTests.java (100%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/test/src/rabbit_amqp1_0_test.erl (95%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/test/swiftmq/Makefile (78%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/test/swiftmq/build.xml (100%) create mode 100755 rabbitmq-server/deps/rabbitmq_amqp1_0/test/swiftmq/run-tests.sh rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_amqp1_0}/test/swiftmq/test/com/rabbitmq/amqp1_0/tests/swiftmq/SwiftMQTests.java (90%) rename rabbitmq-server/{plugins-src/mochiweb-wrapper => deps/rabbitmq_auth_backend_ldap}/CONTRIBUTING.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_auth_backend_ldap/Makefile create mode 100644 rabbitmq-server/deps/rabbitmq_auth_backend_ldap/README-authorisation.md rename rabbitmq-server/{plugins-src/rabbitmq-auth-backend-ldap/README-tests => deps/rabbitmq_auth_backend_ldap/README-tests.md} (53%) create mode 100644 rabbitmq-server/deps/rabbitmq_auth_backend_ldap/README.md create mode 100644 rabbitmq-server/deps/rabbitmq_auth_backend_ldap/build.config create mode 100644 rabbitmq-server/deps/rabbitmq_auth_backend_ldap/erlang.mk rename rabbitmq-server/{plugins-src/rabbitmq-auth-backend-ldap => deps/rabbitmq_auth_backend_ldap}/etc/rabbit-test.config (100%) rename rabbitmq-server/{plugins-src/rabbitmq-auth-backend-ldap => deps/rabbitmq_auth_backend_ldap}/example/README (100%) rename rabbitmq-server/{plugins-src/rabbitmq-auth-backend-ldap => deps/rabbitmq_auth_backend_ldap}/example/global.ldif (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/groups.ldif rename rabbitmq-server/{plugins-src/rabbitmq-auth-backend-ldap => deps/rabbitmq_auth_backend_ldap}/example/people.ldif (56%) rename rabbitmq-server/{plugins-src/rabbitmq-auth-backend-ldap => deps/rabbitmq_auth_backend_ldap}/example/rabbit.ldif (100%) rename rabbitmq-server/{plugins-src/rabbitmq-auth-backend-ldap/example/setup.sh => deps/rabbitmq_auth_backend_ldap/example/seed.sh} (53%) create mode 100755 rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/setup.sh create mode 100644 rabbitmq-server/deps/rabbitmq_auth_backend_ldap/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-auth-backend-ldap => deps/rabbitmq_auth_backend_ldap}/src/rabbit_auth_backend_ldap.erl (90%) rename rabbitmq-server/{plugins-src/rabbitmq-auth-backend-ldap => deps/rabbitmq_auth_backend_ldap}/src/rabbit_auth_backend_ldap_app.erl (78%) rename rabbitmq-server/{plugins-src/rabbitmq-auth-backend-ldap => deps/rabbitmq_auth_backend_ldap}/src/rabbit_auth_backend_ldap_util.erl (94%) rename rabbitmq-server/{plugins-src/rabbitmq-auth-backend-ldap => deps/rabbitmq_auth_backend_ldap}/src/rabbitmq_auth_backend_ldap.app.src (89%) rename rabbitmq-server/{plugins-src/rabbitmq-auth-backend-ldap => deps/rabbitmq_auth_backend_ldap}/test/src/rabbit_auth_backend_ldap_test.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-auth-backend-ldap => deps/rabbitmq_auth_backend_ldap}/test/src/rabbit_auth_backend_ldap_unit_test.erl (95%) rename rabbitmq-server/{plugins-src/rabbitmq-amqp1.0 => deps/rabbitmq_auth_mechanism_ssl}/CONTRIBUTING.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/Makefile rename rabbitmq-server/{plugins-src/rabbitmq-auth-mechanism-ssl/README => deps/rabbitmq_auth_mechanism_ssl/README.md} (75%) create mode 100644 rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/erlang.mk create mode 100644 rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-auth-mechanism-ssl => deps/rabbitmq_auth_mechanism_ssl}/src/rabbit_auth_mechanism_ssl.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-auth-mechanism-ssl => deps/rabbitmq_auth_mechanism_ssl}/src/rabbit_auth_mechanism_ssl_app.erl (94%) rename rabbitmq-server/{plugins-src/rabbitmq-auth-mechanism-ssl => deps/rabbitmq_auth_mechanism_ssl}/src/rabbitmq_auth_mechanism_ssl.app.src (93%) rename rabbitmq-server/{plugins-src/rabbitmq-auth-backend-ldap => deps/rabbitmq_codegen}/CONTRIBUTING.md (100%) rename rabbitmq-server/{codegen => deps/rabbitmq_codegen}/LICENSE (100%) rename rabbitmq-server/{plugins-src/licensing => deps/rabbitmq_codegen}/LICENSE-MPL-RabbitMQ (100%) rename rabbitmq-server/{codegen => deps/rabbitmq_codegen}/Makefile (64%) rename rabbitmq-server/{codegen => deps/rabbitmq_codegen}/README.extensions.md (100%) rename rabbitmq-server/{codegen => deps/rabbitmq_codegen}/amqp-rabbitmq-0.8.json (99%) rename rabbitmq-server/{codegen => deps/rabbitmq_codegen}/amqp-rabbitmq-0.9.1.json (99%) rename rabbitmq-server/{codegen => deps/rabbitmq_codegen}/amqp_codegen.py (93%) rename rabbitmq-server/{codegen => deps/rabbitmq_codegen}/credit_extension.json (97%) rename rabbitmq-server/{codegen => deps/rabbitmq_codegen}/demo_extension.json (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_codegen/license_info rename rabbitmq-server/{plugins-src/rabbitmq-auth-mechanism-ssl => deps/rabbitmq_consistent_hash_exchange}/CONTRIBUTING.md (100%) rename rabbitmq-server/{plugins-src/rabbitmq-consistent-hash-exchange => deps/rabbitmq_consistent_hash_exchange}/LICENSE (100%) rename rabbitmq-server/{plugins-src/rabbitmq-consistent-hash-exchange => deps/rabbitmq_consistent_hash_exchange}/LICENSE-MPL-RabbitMQ (99%) create mode 100644 rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/Makefile rename rabbitmq-server/{plugins-src/rabbitmq-consistent-hash-exchange => deps/rabbitmq_consistent_hash_exchange}/README.md (58%) create mode 100644 rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/erlang.mk create mode 100644 rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-consistent-hash-exchange => deps/rabbitmq_consistent_hash_exchange}/src/rabbit_exchange_type_consistent_hash.erl (74%) rename rabbitmq-server/{plugins-src/rabbitmq-consistent-hash-exchange => deps/rabbitmq_consistent_hash_exchange}/src/rabbitmq_consistent_hash_exchange.app.src (89%) rename rabbitmq-server/{plugins-src/rabbitmq-consistent-hash-exchange => deps/rabbitmq_consistent_hash_exchange}/test/src/rabbit_exchange_type_consistent_hash_test.erl (62%) create mode 100644 rabbitmq-server/deps/rabbitmq_event_exchange/LICENSE create mode 100644 rabbitmq-server/deps/rabbitmq_event_exchange/LICENSE-MPL-RabbitMQ create mode 100644 rabbitmq-server/deps/rabbitmq_event_exchange/Makefile create mode 100644 rabbitmq-server/deps/rabbitmq_event_exchange/README.md create mode 100644 rabbitmq-server/deps/rabbitmq_event_exchange/build.config create mode 100644 rabbitmq-server/deps/rabbitmq_event_exchange/erlang.mk create mode 100644 rabbitmq-server/deps/rabbitmq_event_exchange/examples/java/QueueEvents.java create mode 100644 rabbitmq-server/deps/rabbitmq_event_exchange/rabbitmq-components.mk create mode 100644 rabbitmq-server/deps/rabbitmq_event_exchange/src/rabbit_exchange_type_event.erl create mode 100644 rabbitmq-server/deps/rabbitmq_event_exchange/src/rabbitmq_event_exchange.app.src create mode 100644 rabbitmq-server/deps/rabbitmq_event_exchange/test/src/rabbit_exchange_type_event_test.erl create mode 100644 rabbitmq-server/deps/rabbitmq_event_exchange/test/src/rabbit_exchange_type_event_test_all.erl create mode 100644 rabbitmq-server/deps/rabbitmq_event_exchange/test/src/rabbit_exchange_type_event_unit_test.erl rename rabbitmq-server/{plugins-src/rabbitmq-consistent-hash-exchange => deps/rabbitmq_federation}/CONTRIBUTING.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_federation/Makefile rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/README-hacking (100%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/README.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_federation/build.config create mode 100644 rabbitmq-server/deps/rabbitmq_federation/erlang.mk rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/etc/rabbit-test.sh (100%) create mode 100755 rabbitmq-server/deps/rabbitmq_federation/etc/setup-rabbit-test.sh rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/include/rabbit_federation.hrl (95%) create mode 100644 rabbitmq-server/deps/rabbitmq_federation/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_app.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_db.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_event.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_exchange.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_exchange_link.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_exchange_link_sup_sup.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_link_sup.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_link_util.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_parameters.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_queue.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_queue_link.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_queue_link_sup_sup.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_status.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_sup.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_upstream.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_upstream_exchange.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbit_federation_util.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/src/rabbitmq_federation.app.src (92%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/test/src/rabbit_federation_exchange_test.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/test/src/rabbit_federation_queue_test.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/test/src/rabbit_federation_test_util.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_federation}/test/src/rabbit_federation_unit_test.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-erlang-client => deps/rabbitmq_federation_management}/CONTRIBUTING.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_federation_management/LICENSE rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_federation_management}/LICENSE-APACHE2-ExplorerCanvas (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_federation_management}/LICENSE-BSD-base64js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_federation_management}/LICENSE-MIT-EJS10 (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_federation_management}/LICENSE-MIT-Flot (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_federation_management}/LICENSE-MIT-Sammy060 (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_federation_management}/LICENSE-MIT-jQuery164 (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-MPL-RabbitMQ create mode 100644 rabbitmq-server/deps/rabbitmq_federation_management/Makefile create mode 100644 rabbitmq-server/deps/rabbitmq_federation_management/README.md create mode 100644 rabbitmq-server/deps/rabbitmq_federation_management/erlang.mk rename rabbitmq-server/{plugins-src/rabbitmq-federation-management => deps/rabbitmq_federation_management}/priv/www/js/federation.js (90%) rename rabbitmq-server/{plugins-src/rabbitmq-federation-management => deps/rabbitmq_federation_management}/priv/www/js/tmpl/federation-upstream.ejs (72%) rename rabbitmq-server/{plugins-src/rabbitmq-federation-management => deps/rabbitmq_federation_management}/priv/www/js/tmpl/federation-upstreams.ejs (81%) rename rabbitmq-server/{plugins-src/rabbitmq-federation-management => deps/rabbitmq_federation_management}/priv/www/js/tmpl/federation.ejs (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_federation_management/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-federation-management => deps/rabbitmq_federation_management}/src/rabbit_federation_mgmt.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-federation-management => deps/rabbitmq_federation_management}/src/rabbitmq_federation_management.app.src (91%) rename rabbitmq-server/{plugins-src/rabbitmq-federation-management => deps/rabbitmq_management}/CONTRIBUTING.md (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/LICENSE (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/LICENSE-Apache-Basho => deps/rabbitmq_management/LICENSE-APACHE2-ExplorerCanvas} (89%) create mode 100644 rabbitmq-server/deps/rabbitmq_management/LICENSE-BSD-base64js rename rabbitmq-server/{LICENSE-MIT-eldap => deps/rabbitmq_management/LICENSE-MIT-EJS10} (94%) create mode 100644 rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-Flot create mode 100644 rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-Sammy060 create mode 100644 rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-jQuery164 create mode 100644 rabbitmq-server/deps/rabbitmq_management/LICENSE-MPL-RabbitMQ create mode 100644 rabbitmq-server/deps/rabbitmq_management/Makefile create mode 100644 rabbitmq-server/deps/rabbitmq_management/README.md rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/bin/rabbitmqadmin (92%) create mode 100644 rabbitmq-server/deps/rabbitmq_management/build.config create mode 100644 rabbitmq-server/deps/rabbitmq_management/erlang.mk rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/etc/bunny.config (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/etc/hare.config (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/etc/rabbit-test.config (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/include/rabbit_mgmt.hrl (79%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/include/rabbit_mgmt_test.hrl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/license_info (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/api/index.html (93%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/cli/index.html (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/css/evil.css (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/css/main.css (99%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/doc/stats.html (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/favicon.ico (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/img/bg-binary.png (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/img/bg-green-dark.png (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/img/bg-red-dark.png (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/img/bg-red.png (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/img/bg-yellow-dark.png (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/img/collapse.png (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/img/expand.png (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/img/rabbitmqlogo.png (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/index.html (93%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/base64.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/charts.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/dispatcher.js (93%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/ejs.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/ejs.min.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/excanvas.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/excanvas.min.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/formatters.js (83%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/global.js (98%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/help.js (98%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/jquery-1.6.4.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/jquery-1.6.4.min.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/jquery.flot.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/jquery.flot.min.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/jquery.flot.time.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/jquery.flot.time.min.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/json2.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/main.js (85%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/prefs.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management/priv/www/js/sammy-0.6.0.js => deps/rabbitmq_management/priv/www/js/sammy.js} (62%) mode change 100644 => 100755 create mode 100755 rabbitmq-server/deps/rabbitmq_management/priv/www/js/sammy.min.js rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/404.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/add-binding.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/binary.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/bindings.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/channel.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/channels-list.ejs (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_management/priv/www/js/tmpl/channels.ejs rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/cluster-name.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/columns-options.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/connection.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/connections.ejs (95%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/consumers.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/error-popup.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/exchange.ejs (97%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/exchanges.ejs (95%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/import-succeeded.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/layout.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/login.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/memory-bar.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/memory-table.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/memory.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/messages.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/msg-detail-deliveries.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/msg-detail-publishes.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/node.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/overview.ejs (88%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/partition.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/paths.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/permissions.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/policies.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/policy.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/publish.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/queue.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/queues.ejs (98%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/rate-options.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/registry.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/status.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/user.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/users.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/vhost.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/priv/www/js/tmpl/vhosts.ejs (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_management/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_app.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_db.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_dispatcher.erl (91%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_extension.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_format.erl (87%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_load_definitions.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_reset_handler.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_stats.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_sup.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_sup_sup.erl (84%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_util.erl (75%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_aliveness_test.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_binding.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_bindings.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_channel.erl (84%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_channels.erl (81%) create mode 100644 rabbitmq-server/deps/rabbitmq_management/src/rabbit_mgmt_wm_channels_vhost.erl rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_cluster_name.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_connection.erl (88%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_connection_channels.erl (84%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_connections.erl (81%) create mode 100644 rabbitmq-server/deps/rabbitmq_management/src/rabbit_mgmt_wm_connections_vhost.erl rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_consumers.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_definitions.erl (68%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_exchange.erl (87%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_exchange_publish.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_exchanges.erl (84%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_extensions.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_node.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_nodes.erl (85%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_overview.erl (74%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_parameter.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_parameters.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_permission.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_permissions.erl (95%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_permissions_user.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_permissions_vhost.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_policies.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_policy.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_queue.erl (86%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_queue_actions.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_queue_get.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_queue_purge.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_queues.erl (86%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_user.erl (62%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_users.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_vhost.erl (85%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_vhosts.erl (82%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbit_mgmt_wm_whoami.erl (95%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/src/rabbitmq_management.app.src (97%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/test/src/default-config (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/test/src/rabbit_mgmt_test_clustering.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/test/src/rabbit_mgmt_test_db.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/test/src/rabbit_mgmt_test_db_unit.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/test/src/rabbit_mgmt_test_http.erl (59%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/test/src/rabbit_mgmt_test_unit.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/test/src/rabbit_mgmt_test_util.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/test/src/rabbitmqadmin-test-wrapper.sh (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/test/src/rabbitmqadmin-test.py (97%) rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_management}/test/src/test-config (100%) rename rabbitmq-server/{plugins-src/rabbitmq-federation => deps/rabbitmq_management_agent}/CONTRIBUTING.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_management_agent/LICENSE create mode 100644 rabbitmq-server/deps/rabbitmq_management_agent/LICENSE-MPL-RabbitMQ create mode 100644 rabbitmq-server/deps/rabbitmq_management_agent/Makefile create mode 100644 rabbitmq-server/deps/rabbitmq_management_agent/erlang.mk create mode 100644 rabbitmq-server/deps/rabbitmq_management_agent/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-management-agent => deps/rabbitmq_management_agent}/src/rabbit_mgmt_agent_app.erl (92%) rename rabbitmq-server/{plugins-src/rabbitmq-management-agent => deps/rabbitmq_management_agent}/src/rabbit_mgmt_agent_sup.erl (94%) rename rabbitmq-server/{plugins-src/rabbitmq-management-agent => deps/rabbitmq_management_agent}/src/rabbit_mgmt_db_handler.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-management-agent => deps/rabbitmq_management_agent}/src/rabbit_mgmt_external_stats.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-management-agent => deps/rabbitmq_management_agent}/src/rabbitmq_management_agent.app.src (91%) rename rabbitmq-server/{plugins-src/rabbitmq-management-agent => deps/rabbitmq_management_visualiser}/CONTRIBUTING.md (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/LICENSE (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/LICENSE-BSD-glMatrix (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/LICENSE-MPL-RabbitMQ (99%) create mode 100644 rabbitmq-server/deps/rabbitmq_management_visualiser/Makefile rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/README (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_management_visualiser/erlang.mk rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/license_info (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/priv/www/js/visualiser.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/priv/www/visualiser/index.html (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/priv/www/visualiser/js/glMatrix-min.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/priv/www/visualiser/js/glMatrix.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/priv/www/visualiser/js/main.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/priv/www/visualiser/js/model.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/priv/www/visualiser/js/octtree.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/priv/www/visualiser/js/physics.js (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_management_visualiser/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/src/rabbit_mgmt_wm_all.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/src/rabbit_visualiser_mgmt.erl (92%) rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_management_visualiser}/src/rabbitmq_management_visualiser.app.src (90%) rename rabbitmq-server/{plugins-src/rabbitmq-management-visualiser => deps/rabbitmq_mqtt}/CONTRIBUTING.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/Makefile create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/README.md create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/build.config create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/erlang.mk rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/include/rabbit_mqtt.hrl (70%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/include/rabbit_mqtt_frame.hrl (89%) create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/include/rabbit_mqtt_retained_msg_store.hrl create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/src/rabbit_mqtt.erl (68%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/src/rabbit_mqtt_collector.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/src/rabbit_mqtt_connection_sup.erl (81%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/src/rabbit_mqtt_frame.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/src/rabbit_mqtt_processor.erl (64%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/src/rabbit_mqtt_reader.erl (77%) create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store.erl create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_dets.erl create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/src/rabbit_mqtt_retained_msg_store_ets.erl create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer.erl create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/src/rabbit_mqtt_retainer_sup.erl rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/src/rabbit_mqtt_sup.erl (55%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/src/rabbit_mqtt_util.erl (63%) create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/src/rabbit_mqtt_vhost_event_handler.erl rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/src/rabbitmq_mqtt.app.src (55%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/test/Makefile (84%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/test/build.properties (100%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/test/build.xml (97%) create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/test/lib/junit.jar rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/test/rabbit-test.sh (100%) create mode 100755 rabbitmq-server/deps/rabbitmq_mqtt/test/setup-rabbit-test.sh rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/test/src/com/rabbitmq/mqtt/test/MqttTest.java (70%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/test/src/com/rabbitmq/mqtt/test/rabbit-test.sh (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/test/src/com/rabbitmq/mqtt/test/setup-rabbit-test.sh rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/test/src/com/rabbitmq/mqtt/test/tls/MqttSSLTest.java (98%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/test/src/com/rabbitmq/mqtt/test/tls/MutualAuth.java (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_mqtt/test/src/rabbit_mqtt_processor_tests.erl rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/test/src/rabbit_mqtt_util_tests.erl (95%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/test/src/rabbitmq_mqtt_standalone.app.src (76%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/test/src/test.config (100%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_mqtt}/test/test.sh (100%) rename rabbitmq-server/{plugins-src/licensing/LICENSE-MIT-eldap => deps/rabbitmq_recent_history_exchange/LICENSE.md} (94%) create mode 100644 rabbitmq-server/deps/rabbitmq_recent_history_exchange/Makefile create mode 100644 rabbitmq-server/deps/rabbitmq_recent_history_exchange/README.md create mode 100644 rabbitmq-server/deps/rabbitmq_recent_history_exchange/build.config create mode 100644 rabbitmq-server/deps/rabbitmq_recent_history_exchange/erlang.mk create mode 100644 rabbitmq-server/deps/rabbitmq_recent_history_exchange/etc/rabbit-hare.config create mode 100644 rabbitmq-server/deps/rabbitmq_recent_history_exchange/etc/rabbit-test.config create mode 100644 rabbitmq-server/deps/rabbitmq_recent_history_exchange/include/rabbit_recent_history.hrl create mode 100644 rabbitmq-server/deps/rabbitmq_recent_history_exchange/rabbitmq-components.mk create mode 100644 rabbitmq-server/deps/rabbitmq_recent_history_exchange/src/rabbit_exchange_type_recent_history.erl create mode 100644 rabbitmq-server/deps/rabbitmq_recent_history_exchange/src/rabbitmq_recent_history_exchange.app.src create mode 100644 rabbitmq-server/deps/rabbitmq_recent_history_exchange/test/src/rabbit_exchange_type_recent_history_test.erl create mode 100644 rabbitmq-server/deps/rabbitmq_recent_history_exchange/test/src/rabbit_exchange_type_recent_history_test_util.erl create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/LICENSE create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/LICENSE-MPL-RabbitMQ create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/LICENSE-MPL2 create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/Makefile create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/README.extra.md create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/README.md create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/build.config create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/docs/sharded_queues.png create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/erlang.mk create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/etc/rabbit-hare.config create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/etc/rabbit-test.config create mode 100755 rabbitmq-server/deps/rabbitmq_sharding/etc/rkey.sh create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/other_plugins create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/rabbitmq-components.mk create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/src/rabbit_sharding_exchange_decorator.erl create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/src/rabbit_sharding_exchange_type_modulus_hash.erl create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/src/rabbit_sharding_interceptor.erl create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/src/rabbit_sharding_policy_validator.erl create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/src/rabbit_sharding_shard.erl create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/src/rabbit_sharding_util.erl create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/src/rabbitmq_sharding.app.src create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/test/src/rabbit_hash_exchange_test.erl create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/test/src/rabbit_sharding_test.erl create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/test/src/rabbit_sharding_test_all.erl create mode 100644 rabbitmq-server/deps/rabbitmq_sharding/test/src/rabbit_sharding_test_util.erl rename rabbitmq-server/{plugins-src/rabbitmq-management => deps/rabbitmq_shovel}/CONTRIBUTING.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_shovel/Makefile create mode 100644 rabbitmq-server/deps/rabbitmq_shovel/README.md create mode 100644 rabbitmq-server/deps/rabbitmq_shovel/build.config create mode 100644 rabbitmq-server/deps/rabbitmq_shovel/erlang.mk rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/include/rabbit_shovel.hrl (93%) create mode 100644 rabbitmq-server/deps/rabbitmq_shovel/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/src/rabbit_shovel.erl (92%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/src/rabbit_shovel_config.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/src/rabbit_shovel_dyn_worker_sup.erl (95%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/src/rabbit_shovel_dyn_worker_sup_sup.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/src/rabbit_shovel_parameters.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/src/rabbit_shovel_status.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/src/rabbit_shovel_sup.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/src/rabbit_shovel_util.erl (94%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/src/rabbit_shovel_worker.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/src/rabbit_shovel_worker_sup.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/src/rabbitmq_shovel.app.src (95%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/test/src/rabbit_shovel_test.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/test/src/rabbit_shovel_test_all.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_shovel}/test/src/rabbit_shovel_test_dyn.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-mqtt => deps/rabbitmq_shovel_management}/CONTRIBUTING.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_shovel_management/Makefile rename rabbitmq-server/{plugins-src/rabbitmq-shovel-management => deps/rabbitmq_shovel_management}/README (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_shovel_management/build.config create mode 100644 rabbitmq-server/deps/rabbitmq_shovel_management/erlang.mk rename rabbitmq-server/{plugins-src/rabbitmq-shovel-management => deps/rabbitmq_shovel_management}/etc/rabbit-test.config (100%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel-management => deps/rabbitmq_shovel_management}/priv/www/js/shovel.js (100%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel-management => deps/rabbitmq_shovel_management}/priv/www/js/tmpl/dynamic-shovel.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel-management => deps/rabbitmq_shovel_management}/priv/www/js/tmpl/dynamic-shovels.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel-management => deps/rabbitmq_shovel_management}/priv/www/js/tmpl/shovels.ejs (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_shovel_management/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-shovel-management => deps/rabbitmq_shovel_management}/src/rabbit_shovel_mgmt.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel-management => deps/rabbitmq_shovel_management}/src/rabbitmq_shovel_management.app.src (90%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel-management => deps/rabbitmq_shovel_management}/test/src/rabbit_shovel_mgmt_test_all.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel-management => deps/rabbitmq_shovel_management}/test/src/rabbit_shovel_mgmt_test_http.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-shovel-management => deps/rabbitmq_stomp}/CONTRIBUTING.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_stomp/Makefile rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/NOTES (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/README.md (70%) create mode 100644 rabbitmq-server/deps/rabbitmq_stomp/build.config create mode 100644 rabbitmq-server/deps/rabbitmq_stomp/erlang.mk rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/examples/perl/rabbitmq_stomp_recv.pl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/examples/perl/rabbitmq_stomp_rpc_client.pl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/examples/perl/rabbitmq_stomp_rpc_service.pl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/examples/perl/rabbitmq_stomp_send.pl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/examples/perl/rabbitmq_stomp_send_many.pl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/examples/perl/rabbitmq_stomp_slow_recv.pl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/examples/ruby/cb-receiver.rb (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/examples/ruby/cb-sender.rb (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/examples/ruby/cb-slow-receiver.rb (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/examples/ruby/persistent-receiver.rb (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/examples/ruby/persistent-sender.rb (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/examples/ruby/topic-broadcast-receiver.rb (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/examples/ruby/topic-broadcast-with-unsubscribe.rb (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/examples/ruby/topic-sender.rb (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/include/rabbit_stomp.hrl (92%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/include/rabbit_stomp_frame.hrl (91%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/include/rabbit_stomp_headers.hrl (87%) create mode 100644 rabbitmq-server/deps/rabbitmq_stomp/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/src/rabbit_stomp.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/src/rabbit_stomp_client_sup.erl (70%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/src/rabbit_stomp_frame.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/src/rabbit_stomp_processor.erl (80%) create mode 100644 rabbitmq-server/deps/rabbitmq_stomp/src/rabbit_stomp_reader.erl rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/src/rabbit_stomp_sup.erl (51%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/src/rabbit_stomp_util.erl (85%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/src/rabbitmq_stomp.app.src (73%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/ack.py (97%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/base.py (94%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/connect_options.py (90%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/destinations.py (95%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/errors.py (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/lifecycle.py (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/parsing.py (97%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/queue_properties.py (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/rabbit_stomp_amqqueue_test.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/rabbit_stomp_client.erl (89%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/rabbit_stomp_publish_test.erl (76%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/rabbit_stomp_test.erl (68%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/rabbit_stomp_test_frame.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/rabbit_stomp_test_util.erl (99%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/redelivered.py (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/reliability.py (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/ssl_lifecycle.py (88%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/test.config (83%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/test.py (93%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/test_connect_options.py (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/test_runner.py (66%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/test_ssl.py (100%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/test_util.py (88%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_stomp}/test/src/transactions.py (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_stomp/test/src/x_queue_name.py rename rabbitmq-server/{plugins-src/rabbitmq-shovel => deps/rabbitmq_tracing}/CONTRIBUTING.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_tracing/Makefile create mode 100644 rabbitmq-server/deps/rabbitmq_tracing/README.md create mode 100644 rabbitmq-server/deps/rabbitmq_tracing/build.config create mode 100644 rabbitmq-server/deps/rabbitmq_tracing/erlang.mk rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/priv/www/js/tmpl/traces.ejs (100%) rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/priv/www/js/tracing.js (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_tracing/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/src/rabbit_tracing_app.erl (92%) rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/src/rabbit_tracing_consumer.erl (92%) rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/src/rabbit_tracing_consumer_sup.erl (94%) rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/src/rabbit_tracing_files.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/src/rabbit_tracing_mgmt.erl (94%) rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/src/rabbit_tracing_sup.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/src/rabbit_tracing_traces.erl (98%) create mode 100644 rabbitmq-server/deps/rabbitmq_tracing/src/rabbit_tracing_util.erl rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/src/rabbit_tracing_wm_file.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/src/rabbit_tracing_wm_files.erl (95%) rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/src/rabbit_tracing_wm_trace.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/src/rabbit_tracing_wm_traces.erl (95%) rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/src/rabbitmq_tracing.app.src (61%) rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_tracing}/test/src/rabbit_tracing_test.erl (88%) rename rabbitmq-server/{plugins-src/rabbitmq-stomp => deps/rabbitmq_web_dispatch}/CONTRIBUTING.md (100%) rename rabbitmq-server/{plugins-src/rabbitmq-web-dispatch => deps/rabbitmq_web_dispatch}/LICENSE (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_web_dispatch/Makefile rename rabbitmq-server/{plugins-src/rabbitmq-web-dispatch => deps/rabbitmq_web_dispatch}/README.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_web_dispatch/build.config create mode 100644 rabbitmq-server/deps/rabbitmq_web_dispatch/erlang.mk create mode 100644 rabbitmq-server/deps/rabbitmq_web_dispatch/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-web-dispatch => deps/rabbitmq_web_dispatch}/src/rabbit_web_dispatch.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-web-dispatch => deps/rabbitmq_web_dispatch}/src/rabbit_web_dispatch_app.erl (93%) rename rabbitmq-server/{plugins-src/rabbitmq-web-dispatch => deps/rabbitmq_web_dispatch}/src/rabbit_web_dispatch_registry.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-web-dispatch => deps/rabbitmq_web_dispatch}/src/rabbit_web_dispatch_sup.erl (98%) rename rabbitmq-server/{plugins-src/rabbitmq-web-dispatch => deps/rabbitmq_web_dispatch}/src/rabbit_web_dispatch_util.erl (94%) rename rabbitmq-server/{plugins-src/rabbitmq-web-dispatch => deps/rabbitmq_web_dispatch}/src/rabbit_webmachine.erl (100%) rename rabbitmq-server/{plugins-src/rabbitmq-web-dispatch => deps/rabbitmq_web_dispatch}/src/rabbit_webmachine_error_handler.erl (93%) rename rabbitmq-server/{plugins-src/rabbitmq-web-dispatch => deps/rabbitmq_web_dispatch}/src/rabbitmq_web_dispatch.app.src (92%) rename rabbitmq-server/{plugins-src/rabbitmq-web-dispatch => deps/rabbitmq_web_dispatch}/test/priv/www/index.html (100%) rename rabbitmq-server/{plugins-src/rabbitmq-web-dispatch => deps/rabbitmq_web_dispatch}/test/src/rabbit_web_dispatch_test.erl (90%) rename rabbitmq-server/{plugins-src/rabbitmq-web-dispatch => deps/rabbitmq_web_dispatch}/test/src/rabbit_web_dispatch_test_unit.erl (95%) rename rabbitmq-server/{plugins-src/rabbitmq-test => deps/rabbitmq_web_stomp}/CONTRIBUTING.md (100%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp => deps/rabbitmq_web_stomp}/LICENSE (100%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp-examples => deps/rabbitmq_web_stomp}/LICENSE-MPL-RabbitMQ (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_web_stomp/Makefile rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp => deps/rabbitmq_web_stomp}/README.md (98%) create mode 100644 rabbitmq-server/deps/rabbitmq_web_stomp/build.config create mode 100644 rabbitmq-server/deps/rabbitmq_web_stomp/erlang.mk create mode 100644 rabbitmq-server/deps/rabbitmq_web_stomp/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp => deps/rabbitmq_web_stomp}/src/rabbit_ws_app.erl (92%) create mode 100644 rabbitmq-server/deps/rabbitmq_web_stomp/src/rabbit_ws_client.erl create mode 100644 rabbitmq-server/deps/rabbitmq_web_stomp/src/rabbit_ws_client_sup.erl create mode 100644 rabbitmq-server/deps/rabbitmq_web_stomp/src/rabbit_ws_handler.erl rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp => deps/rabbitmq_web_stomp}/src/rabbit_ws_sockjs.erl (53%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp => deps/rabbitmq_web_stomp}/src/rabbit_ws_sup.erl (94%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp => deps/rabbitmq_web_stomp}/src/rabbitmq_web_stomp.app.src (55%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp => deps/rabbitmq_web_stomp}/test/src/rabbit_ws_test_all.erl (85%) create mode 100644 rabbitmq-server/deps/rabbitmq_web_stomp/test/src/rabbit_ws_test_cowboy_websocket.erl rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp => deps/rabbitmq_web_stomp}/test/src/rabbit_ws_test_raw_websocket.erl (94%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp => deps/rabbitmq_web_stomp}/test/src/rabbit_ws_test_sockjs_websocket.erl (97%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp => deps/rabbitmq_web_stomp}/test/src/rfc6455_client.erl (93%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp => deps/rabbitmq_web_stomp}/test/src/stomp.erl (96%) rename rabbitmq-server/{plugins-src/rabbitmq-tracing => deps/rabbitmq_web_stomp_examples}/CONTRIBUTING.md (100%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp-examples => deps/rabbitmq_web_stomp_examples}/LICENSE (100%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp-examples => deps/rabbitmq_web_stomp_examples}/LICENSE-APL2-Stomp-Websocket (100%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp => deps/rabbitmq_web_stomp_examples}/LICENSE-MPL-RabbitMQ (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_web_stomp_examples/Makefile rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp-examples => deps/rabbitmq_web_stomp_examples}/README.md (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_web_stomp_examples/erlang.mk rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp-examples => deps/rabbitmq_web_stomp_examples}/priv/bunny.html (92%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp-examples => deps/rabbitmq_web_stomp_examples}/priv/bunny.png (100%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp-examples => deps/rabbitmq_web_stomp_examples}/priv/echo.html (90%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp-examples => deps/rabbitmq_web_stomp_examples}/priv/index.html (62%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp-examples => deps/rabbitmq_web_stomp_examples}/priv/main.css (100%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp-examples => deps/rabbitmq_web_stomp_examples}/priv/pencil.cur (100%) create mode 100644 rabbitmq-server/deps/rabbitmq_web_stomp_examples/priv/stomp.js rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp-examples => deps/rabbitmq_web_stomp_examples}/priv/temp-queue.html (91%) create mode 100644 rabbitmq-server/deps/rabbitmq_web_stomp_examples/rabbitmq-components.mk rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp-examples => deps/rabbitmq_web_stomp_examples}/src/rabbit_web_stomp_examples_app.erl (95%) rename rabbitmq-server/{plugins-src/rabbitmq-web-stomp-examples => deps/rabbitmq_web_stomp_examples}/src/rabbitmq_web_stomp_examples.app.src (93%) create mode 100644 rabbitmq-server/deps/ranch/AUTHORS create mode 100644 rabbitmq-server/deps/ranch/CHANGELOG.asciidoc create mode 100644 rabbitmq-server/deps/ranch/LICENSE create mode 100644 rabbitmq-server/deps/ranch/Makefile create mode 100644 rabbitmq-server/deps/ranch/README.asciidoc create mode 100644 rabbitmq-server/deps/ranch/appveyor.yml create mode 100644 rabbitmq-server/deps/ranch/circle.yml create mode 100644 rabbitmq-server/deps/ranch/doc/src/guide/book.asciidoc create mode 100644 rabbitmq-server/deps/ranch/doc/src/guide/embedded.asciidoc create mode 100644 rabbitmq-server/deps/ranch/doc/src/guide/internals.asciidoc create mode 100644 rabbitmq-server/deps/ranch/doc/src/guide/introduction.asciidoc create mode 100644 rabbitmq-server/deps/ranch/doc/src/guide/listeners.asciidoc create mode 100644 rabbitmq-server/deps/ranch/doc/src/guide/parsers.asciidoc create mode 100644 rabbitmq-server/deps/ranch/doc/src/guide/protocols.asciidoc create mode 100644 rabbitmq-server/deps/ranch/doc/src/guide/ssl_auth.asciidoc create mode 100644 rabbitmq-server/deps/ranch/doc/src/guide/transports.asciidoc create mode 100644 rabbitmq-server/deps/ranch/doc/src/manual/ranch.asciidoc create mode 100644 rabbitmq-server/deps/ranch/doc/src/manual/ranch_app.asciidoc create mode 100644 rabbitmq-server/deps/ranch/doc/src/manual/ranch_protocol.asciidoc create mode 100644 rabbitmq-server/deps/ranch/doc/src/manual/ranch_ssl.asciidoc create mode 100644 rabbitmq-server/deps/ranch/doc/src/manual/ranch_tcp.asciidoc create mode 100644 rabbitmq-server/deps/ranch/doc/src/manual/ranch_transport.asciidoc create mode 100644 rabbitmq-server/deps/ranch/erlang.mk create mode 100644 rabbitmq-server/deps/ranch/examples/tcp_echo/Makefile create mode 100644 rabbitmq-server/deps/ranch/examples/tcp_echo/README.md create mode 100644 rabbitmq-server/deps/ranch/examples/tcp_echo/relx.config create mode 100644 rabbitmq-server/deps/ranch/examples/tcp_echo/src/echo_protocol.erl create mode 100644 rabbitmq-server/deps/ranch/examples/tcp_echo/src/tcp_echo.app.src create mode 100644 rabbitmq-server/deps/ranch/examples/tcp_echo/src/tcp_echo_app.erl create mode 100644 rabbitmq-server/deps/ranch/examples/tcp_echo/src/tcp_echo_sup.erl create mode 100644 rabbitmq-server/deps/ranch/examples/tcp_reverse/Makefile create mode 100644 rabbitmq-server/deps/ranch/examples/tcp_reverse/README.md create mode 100644 rabbitmq-server/deps/ranch/examples/tcp_reverse/relx.config create mode 100644 rabbitmq-server/deps/ranch/examples/tcp_reverse/src/reverse_protocol.erl create mode 100644 rabbitmq-server/deps/ranch/examples/tcp_reverse/src/tcp_reverse.app.src create mode 100644 rabbitmq-server/deps/ranch/examples/tcp_reverse/src/tcp_reverse_app.erl create mode 100644 rabbitmq-server/deps/ranch/examples/tcp_reverse/src/tcp_reverse_sup.erl create mode 100644 rabbitmq-server/deps/ranch/src/ranch.app.src create mode 100644 rabbitmq-server/deps/ranch/src/ranch.erl create mode 100644 rabbitmq-server/deps/ranch/src/ranch_acceptor.erl create mode 100644 rabbitmq-server/deps/ranch/src/ranch_acceptors_sup.erl rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_app.erl => deps/ranch/src/ranch_app.erl} (69%) create mode 100644 rabbitmq-server/deps/ranch/src/ranch_conns_sup.erl create mode 100644 rabbitmq-server/deps/ranch/src/ranch_listener_sup.erl create mode 100644 rabbitmq-server/deps/ranch/src/ranch_protocol.erl create mode 100644 rabbitmq-server/deps/ranch/src/ranch_server.erl create mode 100644 rabbitmq-server/deps/ranch/src/ranch_ssl.erl rename rabbitmq-server/{plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_requests_sup.erl => deps/ranch/src/ranch_sup.erl} (55%) create mode 100644 rabbitmq-server/deps/ranch/src/ranch_tcp.erl create mode 100644 rabbitmq-server/deps/ranch/src/ranch_transport.erl create mode 100644 rabbitmq-server/deps/ranch/test/acceptor_SUITE.erl create mode 100644 rabbitmq-server/deps/ranch/test/active_echo_protocol.erl create mode 100644 rabbitmq-server/deps/ranch/test/check_tcp_options.erl create mode 100644 rabbitmq-server/deps/ranch/test/cover.spec create mode 100644 rabbitmq-server/deps/ranch/test/crash_protocol.erl create mode 100644 rabbitmq-server/deps/ranch/test/echo_protocol.erl create mode 100644 rabbitmq-server/deps/ranch/test/notify_and_wait_protocol.erl create mode 100644 rabbitmq-server/deps/ranch/test/ranch_ct_hook.erl create mode 100644 rabbitmq-server/deps/ranch/test/remove_conn_and_wait_protocol.erl create mode 100644 rabbitmq-server/deps/ranch/test/sendfile_SUITE.erl create mode 100644 rabbitmq-server/deps/ranch/test/shutdown_SUITE.erl create mode 100644 rabbitmq-server/deps/ranch/test/supervisor_separate.erl create mode 100644 rabbitmq-server/deps/ranch/test/trap_exit_protocol.erl rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/COPYING (73%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/Changelog (100%) rename rabbitmq-server/{plugins-src/licensing/LICENSE-Apache-Basho => deps/sockjs/LICENSE-APL2-Rebar} (100%) create mode 100644 rabbitmq-server/deps/sockjs/LICENSE-EPL-OTP create mode 100644 rabbitmq-server/deps/sockjs/LICENSE-MIT-Mochiweb rename rabbitmq-server/{plugins-src/eldap-wrapper/eldap-git/LICENSE => deps/sockjs/LICENSE-MIT-SockJS} (96%) create mode 100644 rabbitmq-server/deps/sockjs/Makefile rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/Makefile => deps/sockjs/Makefile.orig.mk} (100%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/README.md (69%) create mode 100755 rabbitmq-server/deps/sockjs/examples/cowboy_echo.erl create mode 100755 rabbitmq-server/deps/sockjs/examples/cowboy_echo_authen_callback.erl rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/examples/cowboy_test_server.erl (73%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/examples/echo.html (93%) create mode 100644 rabbitmq-server/deps/sockjs/examples/echo_authen_callback.html rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/examples/multiplex/cowboy_multiplex.erl (58%) create mode 100755 rabbitmq-server/deps/sockjs/examples/multiplex/cowboy_multiplex_authen_callback.erl rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/examples/multiplex/index.html (92%) create mode 100644 rabbitmq-server/deps/sockjs/examples/multiplex/index_authen_callback.html create mode 100755 rabbitmq-server/deps/sockjs/rebar rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/rebar.config (53%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/src/mochijson2_fork.erl (100%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/src/mochinum_fork.erl (100%) create mode 100644 rabbitmq-server/deps/sockjs/src/sockjs.app.src create mode 100644 rabbitmq-server/deps/sockjs/src/sockjs.erl rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/src/sockjs_action.erl (90%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/src/sockjs_app.erl (76%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/src/sockjs_cowboy_handler.erl (91%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/src/sockjs_filters.erl (79%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/src/sockjs_handler.erl (86%) create mode 100644 rabbitmq-server/deps/sockjs/src/sockjs_http.erl create mode 100644 rabbitmq-server/deps/sockjs/src/sockjs_internal.hrl rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/src/sockjs_json.erl (76%) create mode 100644 rabbitmq-server/deps/sockjs/src/sockjs_multiplex.erl create mode 100644 rabbitmq-server/deps/sockjs/src/sockjs_multiplex_channel.erl rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/src/sockjs_service.erl (100%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/src/sockjs_session.erl (86%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/src/sockjs_session_sup.erl (88%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/src/sockjs_util.erl (88%) rename rabbitmq-server/{plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git => deps/sockjs}/src/sockjs_ws_handler.erl (88%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/Emakefile (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/LICENSE (100%) create mode 100644 rabbitmq-server/deps/webmachine/Makefile rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git/Makefile => deps/webmachine/Makefile.orig.mk} (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/README.org (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/THANKS (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/demo/Makefile (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/demo/README (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/demo/priv/dispatch.conf (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/demo/rebar.config (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/demo/src/webmachine_demo.app.src (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/demo/src/webmachine_demo.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/demo/src/webmachine_demo_app.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/demo/src/webmachine_demo_fs_resource.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/demo/src/webmachine_demo_resource.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/demo/src/webmachine_demo_sup.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/demo/start.sh (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/docs/http-headers-status-v3.png (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/include/webmachine.hrl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/include/webmachine_logger.hrl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/include/wm_reqdata.hrl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/include/wm_reqstate.hrl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/include/wm_resource.hrl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/templates/Makefile (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/templates/README (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/templates/priv/dispatch.conf (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/templates/rebar.config (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/templates/src/wmskel.app.src (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/templates/src/wmskel.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/templates/src/wmskel_app.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/templates/src/wmskel_resource.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/templates/src/wmskel_sup.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/templates/start.sh (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/templates/wmskel.template (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/trace/http-headers-status-v3.png (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/trace/wmtrace.css (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/trace/wmtrace.js (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/priv/www/index.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/rebar (100%) create mode 100644 rabbitmq-server/deps/webmachine/rebar.config rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/rebar.config.script (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/scripts/new_webmachine.sh (100%) create mode 100644 rabbitmq-server/deps/webmachine/src/webmachine.app.src rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_app.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_decision_core.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_deps.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_dispatcher.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_error.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_error_handler.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_log.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_log_handler.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_logger_watcher.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_logger_watcher_sup.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_mochiweb.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_multipart.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_perf_log_handler.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_request.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_resource.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_router.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_sup.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/webmachine_util.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/wmtrace_resource.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/src/wrq.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/start-dev.sh (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/start.sh (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/test/etag_test.erl (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/blogs.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/contact.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/css/style-1c.css (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/css/style.css (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/debugging.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/diagram.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/dispatcher.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/docs.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/example_resources.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/favicon.ico (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/WM200-crop.png (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/basho-landscape.gif (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/basic-trace-decision-tab.png (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/basic-trace-labeled.png (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/basic-trace-request-tab.png (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/basic-trace-response-tab.png (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/bg.gif (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/blankbox.gif (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/chash.gif (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/easy-ops.gif (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/gossip4.gif (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/halfblankbox.gif (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/http-headers-status-v3.png (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/more.gif (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/site.gif (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/splash250.gif (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/images/vclock.gif (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/index.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/intros.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/mechanics.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/quickstart.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/reftrans.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/reqdata.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/resources.html (100%) rename rabbitmq-server/{plugins-src/webmachine-wrapper/webmachine-git => deps/webmachine}/www/streambody.html (100%) create mode 100644 rabbitmq-server/docs/README-for-packages create mode 100644 rabbitmq-server/erlang.mk delete mode 100644 rabbitmq-server/generate_app delete mode 100644 rabbitmq-server/generate_deps create mode 100644 rabbitmq-server/git-revisions.txt delete mode 100644 rabbitmq-server/plugins-src/Makefile delete mode 100644 rabbitmq-server/plugins-src/README delete mode 100644 rabbitmq-server/plugins-src/all-packages.mk delete mode 100644 rabbitmq-server/plugins-src/common.mk delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/0001-R12-fake-iodata-type.patch delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/0002-R12-drop-all-references-to-boolean-type.patch delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/0003-R12-drop-all-references-to-reference-type.patch delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/0004-R12-drop-references-to-iodata-type.patch delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/0005-R12-drop-references-to-Default-any-type.patch delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/0006-Use-erlang-integer_to_list-and-lists-max-instead-of-.patch delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/0007-R12-type-definitions-must-be-ordered.patch delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/0008-sec-websocket-protocol.patch delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/Makefile delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/README.md delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/.done delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/.travis.yml delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/AUTHORS delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/CHANGELOG.md delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/Makefile delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/README.md delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/doc/overview.edoc delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/include/http.hrl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/rebar.config delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_acceptor.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_acceptors_sup.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_cookies.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_dispatcher.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_handler.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_protocol.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_req.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_static.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_websocket.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_websocket_handler.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_listener.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_listener_sup.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_multipart.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_protocol.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_ssl_transport.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_tcp_transport.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/chunked_handler.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/dispatcher_prop.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE_data/cert.pem delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE_data/key.pem delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_init_shutdown.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_long_polling.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_multipart.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_set_resp.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_stream_body.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/ws_SUITE.erl delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/hash.mk delete mode 100644 rabbitmq-server/plugins-src/cowboy-wrapper/package.mk delete mode 100644 rabbitmq-server/plugins-src/do-package.mk delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/Makefile delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-appify.patch delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/.done delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/Makefile delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/README delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/doc/README.example delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/doc/short-desc delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/ebin/eldap.app delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/include/eldap.hrl delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/ELDAPv3.asn delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/Makefile delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/eldap.erl delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/README.test delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/bill.ldif delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/bluetail.ldif delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/crl.ldif delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/eldap_test.erl delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/ldap.rc delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/people.ldif delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/post_danmark.ldif delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/server1.crl delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/slapd.conf delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/tobbe.ldif delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/eldap-no-ssl-seed.patch delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/hash.mk delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/license_info delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/package.mk delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/remove-eldap-fsm.patch delete mode 100644 rabbitmq-server/plugins-src/eldap-wrapper/remove-ietf-doc.patch delete mode 100644 rabbitmq-server/plugins-src/generate_app delete mode 100644 rabbitmq-server/plugins-src/generate_deps delete mode 100644 rabbitmq-server/plugins-src/licensing/LICENSE-MIT-Mochi delete mode 100644 rabbitmq-server/plugins-src/licensing/license_info_eldap-wrapper delete mode 100644 rabbitmq-server/plugins-src/licensing/license_info_mochiweb-wrapper delete mode 100644 rabbitmq-server/plugins-src/licensing/license_info_webmachine-wrapper delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/10-build-on-R12B-5.patch delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/20-MAX_RECV_BODY.patch delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/30-remove-crypto-ssl-dependencies.patch delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/40-remove-compiler-syntax_tools-dependencies.patch delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/50-remove-json.patch delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/LICENSE-MIT-Mochi delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/Makefile delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/hash.mk delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/license_info delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/.done delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/.travis.yml delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/CHANGES.md delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/Makefile delete mode 100755 rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/rebar delete mode 100755 rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/scripts/new_mochiweb.erl delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochifmt_std.erl delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb.app.src delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb.erl delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_acceptor.erl delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_echo.erl delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_socket.erl delete mode 100755 rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/start-dev.sh delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_tests.erl delete mode 100644 rabbitmq-server/plugins-src/mochiweb-wrapper/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-amqp1.0/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-amqp1.0/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-amqp1.0/package.mk delete mode 100755 rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/run-tests.sh delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/.travis.yml delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README-authorisation delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/groups.ldif delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/.travis.yml delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-erlang-client/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-erlang-client/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-erlang-client/Makefile.in delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-erlang-client/common.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-erlang-client/rabbit_common.app.in delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-erlang-client/test.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-federation-management/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-federation-management/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-federation-management/README delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-federation-management/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-federation/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-federation/Makefile delete mode 100755 rabbitmq-server/plugins-src/rabbitmq-federation/etc/setup-rabbit-test.sh delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-federation/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-management-agent/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-management-agent/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-management-agent/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-management-visualiser/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-management-visualiser/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-management-visualiser/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-management/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-management/.travis.yml delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-management/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-management/README delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-management/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/sammy-0.6.0.min.js delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/channels.ejs delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-mqtt/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-mqtt/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-mqtt/README.md delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-mqtt/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-mqtt/test/lib/junit.jar delete mode 100755 rabbitmq-server/plugins-src/rabbitmq-mqtt/test/setup-rabbit-test.sh delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/setup-rabbit-test.sh delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-shovel-management/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-shovel-management/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-shovel-management/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-shovel/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-shovel/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-shovel/README delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-shovel/generate_deps delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-shovel/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-stomp/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-stomp/.travis.yml delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-stomp/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-stomp/deps/pika/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-stomp/deps/stomppy/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-stomp/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_reader.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/README delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/certs/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/certs/openssl.cnf delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/qpid_config.py delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/qpid_patch delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/rabbit_failing.txt delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/src/inet_proxy_dist.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/src/inet_tcp_proxy.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/src/inet_tcp_proxy_manager.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_ha_test_consumer.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_ha_test_producer.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_configs.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_runner.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_util.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/src/rabbitmq_test.app.src delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/test/src/cluster_rename.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/test/src/clustering_management.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/test/src/crashing_queues.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/test/src/dynamic_ha.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/test/src/eager_sync.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/test/src/many_node_ha.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/test/src/partitions.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/test/src/rabbit_priority_queue_test.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/test/src/simple_ha.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-test/test/src/sync_detection.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-tracing/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-tracing/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-tracing/README delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-tracing/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-dispatch/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-dispatch/CONTRIBUTING.md delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-dispatch/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-dispatch/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/CONTRIBUTING.md delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/sockjs-0.3.js delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/stomp.js delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-stomp/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-stomp/CONTRIBUTING.md delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-stomp/Makefile delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-stomp/package.mk delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbit_ws_client.erl delete mode 100644 rabbitmq-server/plugins-src/rabbitmq-web-stomp/src/rabbit_ws_client_sup.erl delete mode 100644 rabbitmq-server/plugins-src/release.mk delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0000-remove-spec-patch.diff delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0001-a2b-b2a.diff delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0002-parameterised-modules-r16a.diff delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/0003-websocket-subprotocol delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/CONTRIBUTING.md delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/Makefile delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/generate-0000-remove-spec-patch.sh delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/hash.mk delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/package.mk delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/.done delete mode 100755 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/cowboy_echo.erl delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/multiplex/multiplex.js delete mode 100755 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/rebar delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/pmod_pt.erl delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs.app.src delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs.erl delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_http.erl delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_internal.hrl delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_multiplex.erl delete mode 100644 rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_multiplex_channel.erl delete mode 100644 rabbitmq-server/plugins-src/umbrella.mk delete mode 100644 rabbitmq-server/plugins-src/webmachine-wrapper/.srcdist_done delete mode 100644 rabbitmq-server/plugins-src/webmachine-wrapper/10-remove-crypto-dependency.patch delete mode 100644 rabbitmq-server/plugins-src/webmachine-wrapper/CONTRIBUTING.md delete mode 100644 rabbitmq-server/plugins-src/webmachine-wrapper/Makefile delete mode 100644 rabbitmq-server/plugins-src/webmachine-wrapper/hash.mk delete mode 100644 rabbitmq-server/plugins-src/webmachine-wrapper/license_info delete mode 100644 rabbitmq-server/plugins-src/webmachine-wrapper/package.mk delete mode 100644 rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/.done delete mode 100644 rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/.travis.yml delete mode 100644 rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/rebar.config delete mode 100644 rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine.app.src create mode 100755 rabbitmq-server/quickcheck create mode 100644 rabbitmq-server/rabbitmq-components.mk mode change 100755 => 100644 rabbitmq-server/scripts/rabbitmq-defaults mode change 100755 => 100644 rabbitmq-server/scripts/rabbitmq-defaults.bat mode change 100755 => 100644 rabbitmq-server/scripts/rabbitmq-echopid.bat mode change 100755 => 100644 rabbitmq-server/scripts/rabbitmq-env mode change 100755 => 100644 rabbitmq-server/scripts/rabbitmq-env.bat mode change 100755 => 100644 rabbitmq-server/scripts/rabbitmq-plugins.bat rename {debian => rabbitmq-server/scripts}/rabbitmq-script-wrapper (63%) mode change 100755 => 100644 create mode 100755 rabbitmq-server/scripts/rabbitmq-server-ha.ocf mode change 100755 => 100644 rabbitmq-server/scripts/rabbitmq-server.bat rename debian/ocf/rabbitmq-server => rabbitmq-server/scripts/rabbitmq-server.ocf (98%) mode change 100755 => 100644 rabbitmq-server/scripts/rabbitmq-service.bat mode change 100755 => 100644 rabbitmq-server/scripts/rabbitmqctl.bat create mode 100644 rabbitmq-server/scripts/set_rabbitmq_policy.sh rename rabbitmq-server/{ebin/rabbit_app.in => src/rabbit.app.src} (88%) create mode 100644 rabbitmq-server/src/rabbit_boot_steps.erl delete mode 100644 rabbitmq-server/src/rabbit_channel_interceptor.erl create mode 100644 rabbitmq-server/src/rabbit_exchange_parameters.erl create mode 100644 rabbitmq-server/src/rabbit_hipe.erl create mode 100644 rabbitmq-server/src/rabbit_password.erl create mode 100644 rabbitmq-server/src/rabbit_password_hashing_md5.erl create mode 100644 rabbitmq-server/src/rabbit_password_hashing_sha256.erl create mode 100644 rabbitmq-server/src/rabbit_password_hashing_sha512.erl create mode 100644 rabbitmq-server/src/rabbit_queue_location_client_local.erl create mode 100644 rabbitmq-server/src/rabbit_queue_location_min_masters.erl create mode 100644 rabbitmq-server/src/rabbit_queue_location_random.erl create mode 100644 rabbitmq-server/src/rabbit_queue_location_validator.erl create mode 100644 rabbitmq-server/src/rabbit_queue_master_location_misc.erl create mode 100644 rabbitmq-server/src/rabbit_resource_monitor_misc.erl delete mode 100644 rabbitmq-server/src/tcp_acceptor.erl delete mode 100644 rabbitmq-server/src/tcp_acceptor_sup.erl create mode 100755 rabbitmq-server/test/temp/head_message_timestamp_tests.py create mode 100755 rabbitmq-server/test/temp/rabbitmqadmin.py delete mode 100644 rabbitmq-server/version.mk create mode 100755 tests/runtests.sh diff --git a/debian/changelog b/debian/changelog index cc4dff8..61b1bd2 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +rabbitmq-server (3.6.1-1~u14.04+mos1) mos8.0; urgency=medium + + * New upstream release. + + -- Alexey Lebedeff Tue, 16 Feb 2016 13:27:12 +0300 + rabbitmq-server (3.5.6-1~u14.04+mos4) mos8.0; urgency=medium * Backport https://github.com/rabbitmq/rabbitmq-common/pull/26 diff --git a/debian/control b/debian/control index dd61545..66cb55f 100644 --- a/debian/control +++ b/debian/control @@ -9,6 +9,7 @@ Build-Depends: debhelper (>= 9~), erlang-nox (>= 1:13.b.3), erlang-src (>= 1:13.b.3), python-simplejson, + rsync, unzip, xmlto, xsltproc, diff --git a/debian/patches/detect-stuck-queue-on-declare.diff b/debian/patches/detect-stuck-queue-on-declare.diff deleted file mode 100644 index 00c43f3..0000000 --- a/debian/patches/detect-stuck-queue-on-declare.diff +++ /dev/null @@ -1,35 +0,0 @@ -Description: Detect infinite loop in AMQP channel code - Sudden death of cluster node could result in a stuck queue process - this will result in - redeclare attempts to hang. With this patch such condition will be detected - AMQP channel will - be closed and error will be logged. And probably it could help us to discover underlying bug, by - localizing the event in time. -Author: Alexey Lebedeff -Origin: upstream, https://github.com/rabbitmq/rabbitmq-common/pull/26 -Bug:https://github.com/rabbitmq/rabbitmq-server/issues/349 ---- -This patch header follows DEP-3: http://dep.debian.net/deps/dep3/ ---- a/src/rabbit_amqqueue.erl -+++ b/src/rabbit_amqqueue.erl -@@ -385,6 +385,11 @@ not_found_or_absent_dirty(Name) -> - end. - - with(Name, F, E) -> -+ with(Name, F, E, 2000). -+ -+with(Name, _F, E, 0) -> -+ E(not_found_or_absent_dirty(Name)); -+with(Name, F, E, RetriesLeft) -> - case lookup(Name) of - {ok, Q = #amqqueue{state = crashed}} -> - E({absent, Q, crashed}); -@@ -397,8 +402,8 @@ with(Name, F, E) -> - %% the retry loop. - rabbit_misc:with_exit_handler( - fun () -> false = rabbit_mnesia:is_process_alive(QPid), -- timer:sleep(25), -- with(Name, F, E) -+ timer:sleep(30), -+ with(Name, F, E, RetriesLeft - 1) - end, fun () -> F(Q) end); - {error, not_found} -> - E(not_found_or_absent_dirty(Name)) diff --git a/debian/patches/fix-management-startup-after-split.diff b/debian/patches/fix-management-startup-after-split.diff deleted file mode 100644 index 252e893..0000000 --- a/debian/patches/fix-management-startup-after-split.diff +++ /dev/null @@ -1,20 +0,0 @@ -Description: Backport management plugin fix - Backport management plugin fix for bug that prevented node from starting after network split. -Author: Alexey Lebedeff -Origin: backport, https://github.com/rabbitmq/rabbitmq-management/pull/84 -Bug: https://github.com/rabbitmq/rabbitmq-management/issues/81 -Applied-Upstream: 3.5.7 ---- -This patch header follows DEP-3: http://dep.debian.net/deps/dep3/ ---- a/plugins-src/rabbitmq-management/src/rabbit_mgmt_sup_sup.erl -+++ b/plugins-src/rabbitmq-management/src/rabbit_mgmt_sup_sup.erl -@@ -57,7 +57,8 @@ start_child() -> supervisor2:start_child - %%---------------------------------------------------------------------------- - - init([]) -> -- {ok, {{one_for_one, 0, 1}, [sup()]}}. -+ timer:apply_after(0, ?MODULE, start_child, []), -+ {ok, {{one_for_one, 0, 1}, []}}. - - sup() -> - {rabbit_mgmt_sup, {rabbit_mgmt_sup, start_link, []}, diff --git a/debian/patches/fix-pmon-demonitor-function.diff b/debian/patches/fix-pmon-demonitor-function.diff deleted file mode 100644 index 5f114b3..0000000 --- a/debian/patches/fix-pmon-demonitor-function.diff +++ /dev/null @@ -1,19 +0,0 @@ -Description: Fix internal state corruption introduced by pmon:demonitor/2 -Author: Alexey Lebedeff -Origin: backport, https://github.com/rabbitmq/rabbitmq-common/pull/18 -Applied-Upstream: 3.5.7, 3.6.0 ---- -This patch header follows DEP-3: http://dep.debian.net/deps/dep3/ -Index: rabbitmq-server/src/pmon.erl -=================================================================== ---- rabbitmq-server.orig/src/pmon.erl -+++ rabbitmq-server/src/pmon.erl -@@ -84,7 +84,7 @@ demonitor(Item, S = #state{dict = M, mod - case dict:find(Item, M) of - {ok, MRef} -> Module:demonitor(MRef), - S#state{dict = dict:erase(Item, M)}; -- error -> M -+ error -> S - end. - - is_monitored(Item, #state{dict = M}) -> dict:is_key(Item, M). diff --git a/debian/patches/series b/debian/patches/series deleted file mode 100644 index 651a942..0000000 --- a/debian/patches/series +++ /dev/null @@ -1,3 +0,0 @@ -fix-pmon-demonitor-function.diff -fix-management-startup-after-split.diff -detect-stuck-queue-on-declare.diff diff --git a/debian/rabbitmq-env.conf b/debian/rabbitmq-env.conf deleted file mode 100644 index bebe2ab..0000000 --- a/debian/rabbitmq-env.conf +++ /dev/null @@ -1,13 +0,0 @@ -# Defaults to rabbit. This can be useful if you want to run more than one node -# per machine - RABBITMQ_NODENAME should be unique per erlang-node-and-machine -# combination. See the clustering on a single machine guide for details: -# http://www.rabbitmq.com/clustering.html#single-machine -#NODENAME=rabbit - -# By default RabbitMQ will bind to all interfaces, on IPv4 and IPv6 if -# available. Set this if you only want to bind to one network interface or# -# address family. -#NODE_IP_ADDRESS=127.0.0.1 - -# Defaults to 5672. -#NODE_PORT=5672 diff --git a/debian/rabbitmq-server-wait b/debian/rabbitmq-server-wait deleted file mode 100755 index cdf53e5..0000000 --- a/debian/rabbitmq-server-wait +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/sh -e -## The contents of this file are subject to the Mozilla Public License -## Version 1.1 (the "License"); you may not use this file except in -## compliance with the License. You may obtain a copy of the License -## at http://www.mozilla.org/MPL/ -## -## Software distributed under the License is distributed on an "AS IS" -## basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -## the License for the specific language governing rights and -## limitations under the License. -## -## The Original Code is RabbitMQ. -## -## The Initial Developer of the Original Code is GoPivotal, Inc. -## Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -## - -# Get default settings with user overrides for (RABBITMQ_) -# Non-empty defaults should be set in rabbitmq-env -. `dirname $0`/rabbitmq-env - -/usr/lib/rabbitmq/bin/rabbitmqctl wait $RABBITMQ_PID_FILE diff --git a/debian/rabbitmq-server.init b/debian/rabbitmq-server.init index b2d3f86..fce2d16 100644 --- a/debian/rabbitmq-server.init +++ b/debian/rabbitmq-server.init @@ -3,7 +3,7 @@ # rabbitmq-server RabbitMQ broker # # chkconfig: - 80 05 -# description: Enable AMQP service provided by RabbitMQ +# description: Manages RabbitMQ server # ### BEGIN INIT INFO @@ -13,7 +13,7 @@ # Default-Start: 2 3 4 5 # Default-Stop: 0 1 6 # Description: RabbitMQ broker -# Short-Description: Enable AMQP service provided by RabbitMQ broker +# Short-Description: Manages RabbitMQ server ### END INIT INFO PATH=/sbin:/usr/sbin:/bin:/usr/bin @@ -23,9 +23,8 @@ CONTROL=/usr/sbin/rabbitmqctl DESC="message broker" USER=rabbitmq ROTATE_SUFFIX= -INIT_LOG_DIR=/var/log/rabbitmq PID_FILE=/var/run/rabbitmq/pid - +RABBITMQ_ENV=/usr/lib/rabbitmq/bin/rabbitmq-env test -x $DAEMON || exit 0 test -x $CONTROL || exit 0 @@ -35,6 +34,9 @@ set -e [ -f /etc/default/${NAME} ] && . /etc/default/${NAME} +RABBITMQ_SCRIPTS_DIR=$(dirname "$RABBITMQ_ENV") +. "$RABBITMQ_ENV" + . /lib/lsb/init-functions . /lib/init/vars.sh @@ -60,7 +62,7 @@ start_rabbitmq () { set +e RABBITMQ_PID_FILE=$PID_FILE start-stop-daemon --quiet \ --chuid rabbitmq --start --exec $DAEMON \ - --pidfile "$RABBITMQ_PID_FILE" --background + --pidfile "$PID_FILE" --background $CONTROL wait $PID_FILE >/dev/null 2>&1 RETVAL=$? set -e @@ -76,7 +78,9 @@ stop_rabbitmq () { status_rabbitmq quiet if [ $RETVAL = 0 ] ; then set +e - $CONTROL stop ${PID_FILE} > ${INIT_LOG_DIR}/shutdown_log 2> ${INIT_LOG_DIR}/shutdown_err + $CONTROL stop ${PID_FILE} \ + > ${RABBITMQ_LOG_BASE}/shutdown_log \ + 2> ${RABBITMQ_LOG_BASE}/shutdown_err RETVAL=$? set -e if [ $RETVAL = 0 ] ; then @@ -143,7 +147,7 @@ start_stop_end() { RETVAL=0 ;; *) - log_warning_msg "FAILED - check ${INIT_LOG_DIR}/startup_\{log, _err\}" + log_warning_msg "FAILED - check ${RABBITMQ_LOG_BASE}/startup_\{log, _err\}" log_end_msg 1 ;; esac diff --git a/debian/rabbitmq-server.install b/debian/rabbitmq-server.install deleted file mode 100644 index 902f3dd..0000000 --- a/debian/rabbitmq-server.install +++ /dev/null @@ -1,4 +0,0 @@ -debian/ocf/rabbitmq-server /usr/lib/ocf/resource.d/rabbitmq/ -debian/rabbitmq-server-wait /usr/lib/rabbitmq/bin -debian/rabbitmq-script-wrapper /usr/lib/rabbitmq/bin -debian/rabbitmq-env.conf /etc/rabbitmq diff --git a/debian/rabbitmq-server.links b/debian/rabbitmq-server.links deleted file mode 100644 index 0bfa1c5..0000000 --- a/debian/rabbitmq-server.links +++ /dev/null @@ -1,3 +0,0 @@ -/usr/lib/rabbitmq/bin/rabbitmq-script-wrapper /usr/sbin/rabbitmqctl -/usr/lib/rabbitmq/bin/rabbitmq-script-wrapper /usr/sbin/rabbitmq-server -/usr/lib/rabbitmq/bin/rabbitmq-script-wrapper /usr/sbin/rabbitmq-plugins diff --git a/debian/rabbitmq-server.manpages b/debian/rabbitmq-server.manpages new file mode 100644 index 0000000..e0220b4 --- /dev/null +++ b/debian/rabbitmq-server.manpages @@ -0,0 +1,4 @@ +docs/rabbitmq-env.conf.5 +docs/rabbitmq-plugins.1 +docs/rabbitmq-server.1 +docs/rabbitmqctl.1 diff --git a/debian/rabbitmq-server.service b/debian/rabbitmq-server.service index faa73c1..2d6d389 100644 --- a/debian/rabbitmq-server.service +++ b/debian/rabbitmq-server.service @@ -3,12 +3,12 @@ Description=RabbitMQ Messaging Server After=network.target [Service] -Type=simple +Type=notify +NotifyAccess=all User=rabbitmq SyslogIdentifier=rabbitmq LimitNOFILE=65536 ExecStart=/usr/sbin/rabbitmq-server -ExecStartPost=/usr/lib/rabbitmq/bin/rabbitmq-server-wait ExecStop=/usr/sbin/rabbitmqctl stop [Install] diff --git a/debian/rules b/debian/rules index bee4e81..9ec4287 100755 --- a/debian/rules +++ b/debian/rules @@ -1,7 +1,12 @@ #!/usr/bin/make -f # -*- makefile -*- + +# Uncomment this to turn on verbose mode. #export DH_VERBOSE=1 +DEB_DESTDIR = debian/rabbitmq-server +VERSION = $(shell dpkg-parsechangelog | awk '/^Version:/ {version=$$0; sub(/Version: /, "", version); sub(/-.*/, "", version); print version;}') + %: dh $@ --parallel --with systemd @@ -14,18 +19,48 @@ override_dh_systemd_start: override_dh_installinit: dh_installinit --noscripts +override_dh_auto_clean: + $(MAKE) clean distclean-manpages + +override_dh_auto_build: + $(MAKE) dist manpages + +override_dh_auto_test: + @: -DEB_UPSTREAM_VERSION=$(shell dpkg-parsechangelog | sed -rne 's,^Version: ([^+]+)-.*,\1,p') -DEB_DESTDIR=debian/rabbitmq-server -RABBIT_LIB=$(DEB_DESTDIR)/usr/lib/rabbitmq/lib/rabbitmq_server-$(DEB_UPSTREAM_VERSION) -RABBIT_BIN=$(DEB_DESTDIR)/usr/lib/rabbitmq/bin -DOCDIR=$(DEB_DESTDIR)/usr/share/doc/rabbitmq-server +export PREFIX RMQ_ROOTDIR +override_dh_auto_install: PREFIX = /usr +override_dh_auto_install: RMQ_ROOTDIR = $(PREFIX)/lib/rabbitmq +override_dh_auto_install: RMQ_ERLAPP_DIR = $(RMQ_ROOTDIR)/lib/rabbitmq_server-$(VERSION) override_dh_auto_install: - dh_auto_install -- TARGET_DIR=$(RABBIT_LIB) SBIN_DIR=$(RABBIT_BIN) \ - DOC_INSTALL_DIR=$(DOCDIR) MAN_DIR=$(DEB_DESTDIR)/usr/share/man - rm -f $(RABBIT_LIB)/LICENSE* $(RABBIT_LIB)/INSTALL* + dh_auto_install -override_dh_auto_clean: - rm -f plugins-src/rabbitmq-server plugins/README - dh_auto_clean + $(MAKE) install-bin DESTDIR=$(DEB_DESTDIR) + + mkdir -p $(DEB_DESTDIR)$(PREFIX)/sbin + sed -e 's|@SU_RABBITMQ_SH_C@|su rabbitmq -s /bin/sh -c|' \ + -e 's|@STDOUT_STDERR_REDIRECTION@|> "$$RABBITMQ_LOG_BASE/startup_log" 2> "$$RABBITMQ_LOG_BASE/startup_err"|' \ + < scripts/rabbitmq-script-wrapper \ + > $(DEB_DESTDIR)$(PREFIX)/sbin/rabbitmqctl + chmod 0755 $(DEB_DESTDIR)$(PREFIX)/sbin/rabbitmqctl + for script in rabbitmq-server rabbitmq-plugins; do \ + cp -a $(DEB_DESTDIR)$(PREFIX)/sbin/rabbitmqctl \ + $(DEB_DESTDIR)$(PREFIX)/sbin/$$script; \ + done + + install -p -D -m 0644 debian/rabbitmq-server.default \ + $(DEB_DESTDIR)/etc/default/rabbitmq-server + + install -p -D -m 0755 scripts/rabbitmq-server.ocf \ + $(DEB_DESTDIR)$(PREFIX)/lib/ocf/resource.d/rabbitmq/rabbitmq-server + install -p -D -m 0755 scripts/rabbitmq-server-ha.ocf \ + $(DEB_DESTDIR)$(PREFIX)/lib/ocf/resource.d/rabbitmq/rabbitmq-server-ha + install -p -D -m 0644 scripts/set_rabbitmq_policy.sh \ + $(DEB_DESTDIR)$(PREFIX)/lib/ocf/resource.d/rabbitmq/set_rabbitmq_policy.sh.example + + rm $(DEB_DESTDIR)$(RMQ_ERLAPP_DIR)/LICENSE* \ + $(DEB_DESTDIR)$(RMQ_ERLAPP_DIR)/INSTALL + + rmdir $(DEB_DESTDIR)$(PREFIX)/lib/erlang/lib \ + $(DEB_DESTDIR)$(PREFIX)/lib/erlang diff --git a/rabbitmq-server/codegen/CONTRIBUTING.md b/rabbitmq-server/CONTRIBUTING.md similarity index 100% rename from rabbitmq-server/codegen/CONTRIBUTING.md rename to rabbitmq-server/CONTRIBUTING.md diff --git a/rabbitmq-server/LICENSE b/rabbitmq-server/LICENSE index 9deeb23..1834aa5 100644 --- a/rabbitmq-server/LICENSE +++ b/rabbitmq-server/LICENSE @@ -4,7 +4,7 @@ If you have any questions regarding licensing, please contact us at info@rabbitmq.com. The files amqp-rabbitmq-0.8.json and amqp-rabbitmq-0.9.1.json are -"Copyright (C) 2008-2013 GoPivotal", Inc. and are covered by the MIT +"Copyright (C) 2008-2016 Pivotal Software, Inc", Inc. and are covered by the MIT license. jQuery is "Copyright (c) 2010 John Resig" and is covered by the MIT @@ -24,16 +24,6 @@ http://code.google.com/p/explorercanvas/ Flot is "Copyright (c) 2007-2013 IOLA and Ole Laursen" and is covered by the MIT license. It was downloaded from http://www.flotcharts.org/ -Webmachine is Copyright (c) Basho Technologies and is covered by the -Apache License 2.0. It was downloaded from http://webmachine.basho.com/ - -Eldap is "Copyright (c) 2010, Torbjorn Tornkvist" and is covered by -the MIT license. It was downloaded from https://github.com/etnt/eldap - -Mochiweb is "Copyright (c) 2007 Mochi Media, Inc." and is covered by -the MIT license. It was downloaded from -http://github.com/mochi/mochiweb/ - glMatrix is "Copyright (c) 2011, Brandon Jones" and is covered by the BSD 2-Clause license. It was downloaded from http://code.google.com/p/glmatrix/ diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/LICENSE-APL2-Rebar b/rabbitmq-server/LICENSE-APL2-Rebar similarity index 100% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/LICENSE-APL2-Rebar rename to rabbitmq-server/LICENSE-APL2-Rebar diff --git a/rabbitmq-server/LICENSE-EPL-OTP b/rabbitmq-server/LICENSE-EPL-OTP new file mode 100644 index 0000000..2257751 --- /dev/null +++ b/rabbitmq-server/LICENSE-EPL-OTP @@ -0,0 +1,286 @@ +ERLANG PUBLIC LICENSE +Version 1.1 + +1. Definitions. + +1.1. ``Contributor'' means each entity that creates or contributes to +the creation of Modifications. + +1.2. ``Contributor Version'' means the combination of the Original +Code, prior Modifications used by a Contributor, and the Modifications +made by that particular Contributor. + +1.3. ``Covered Code'' means the Original Code or Modifications or the +combination of the Original Code and Modifications, in each case +including portions thereof. + +1.4. ``Electronic Distribution Mechanism'' means a mechanism generally +accepted in the software development community for the electronic +transfer of data. + +1.5. ``Executable'' means Covered Code in any form other than Source +Code. + +1.6. ``Initial Developer'' means the individual or entity identified +as the Initial Developer in the Source Code notice required by Exhibit +A. + +1.7. ``Larger Work'' means a work which combines Covered Code or +portions thereof with code not governed by the terms of this License. + +1.8. ``License'' means this document. + +1.9. ``Modifications'' means any addition to or deletion from the +substance or structure of either the Original Code or any previous +Modifications. When Covered Code is released as a series of files, a +Modification is: + +A. Any addition to or deletion from the contents of a file containing + Original Code or previous Modifications. + +B. Any new file that contains any part of the Original Code or + previous Modifications. + +1.10. ``Original Code'' means Source Code of computer software code +which is described in the Source Code notice required by Exhibit A as +Original Code, and which, at the time of its release under this +License is not already Covered Code governed by this License. + +1.11. ``Source Code'' means the preferred form of the Covered Code for +making modifications to it, including all modules it contains, plus +any associated interface definition files, scripts used to control +compilation and installation of an Executable, or a list of source +code differential comparisons against either the Original Code or +another well known, available Covered Code of the Contributor's +choice. The Source Code can be in a compressed or archival form, +provided the appropriate decompression or de-archiving software is +widely available for no charge. + +1.12. ``You'' means an individual or a legal entity exercising rights +under, and complying with all of the terms of, this License. For legal +entities,``You'' includes any entity which controls, is controlled by, +or is under common control with You. For purposes of this definition, +``control'' means (a) the power, direct or indirect, to cause the +direction or management of such entity, whether by contract or +otherwise, or (b) ownership of fifty percent (50%) or more of the +outstanding shares or beneficial ownership of such entity. + +2. Source Code License. + +2.1. The Initial Developer Grant. +The Initial Developer hereby grants You a world-wide, royalty-free, +non-exclusive license, subject to third party intellectual property +claims: + +(a) to use, reproduce, modify, display, perform, sublicense and + distribute the Original Code (or portions thereof) with or without + Modifications, or as part of a Larger Work; and + +(b) under patents now or hereafter owned or controlled by Initial + Developer, to make, have made, use and sell (``Utilize'') the + Original Code (or portions thereof), but solely to the extent that + any such patent is reasonably necessary to enable You to Utilize + the Original Code (or portions thereof) and not to any greater + extent that may be necessary to Utilize further Modifications or + combinations. + +2.2. Contributor Grant. +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license, subject to third party intellectual property +claims: + +(a) to use, reproduce, modify, display, perform, sublicense and + distribute the Modifications created by such Contributor (or + portions thereof) either on an unmodified basis, with other + Modifications, as Covered Code or as part of a Larger Work; and + +(b) under patents now or hereafter owned or controlled by Contributor, + to Utilize the Contributor Version (or portions thereof), but + solely to the extent that any such patent is reasonably necessary + to enable You to Utilize the Contributor Version (or portions + thereof), and not to any greater extent that may be necessary to + Utilize further Modifications or combinations. + +3. Distribution Obligations. + +3.1. Application of License. +The Modifications which You contribute are governed by the terms of +this License, including without limitation Section 2.2. The Source +Code version of Covered Code may be distributed only under the terms +of this License, and You must include a copy of this License with +every copy of the Source Code You distribute. You may not offer or +impose any terms on any Source Code version that alters or restricts +the applicable version of this License or the recipients' rights +hereunder. However, You may include an additional document offering +the additional rights described in Section 3.5. + +3.2. Availability of Source Code. +Any Modification which You contribute must be made available in Source +Code form under the terms of this License either on the same media as +an Executable version or via an accepted Electronic Distribution +Mechanism to anyone to whom you made an Executable version available; +and if made available via Electronic Distribution Mechanism, must +remain available for at least twelve (12) months after the date it +initially became available, or at least six (6) months after a +subsequent version of that particular Modification has been made +available to such recipients. You are responsible for ensuring that +the Source Code version remains available even if the Electronic +Distribution Mechanism is maintained by a third party. + +3.3. Description of Modifications. +You must cause all Covered Code to which you contribute to contain a +file documenting the changes You made to create that Covered Code and +the date of any change. You must include a prominent statement that +the Modification is derived, directly or indirectly, from Original +Code provided by the Initial Developer and including the name of the +Initial Developer in (a) the Source Code, and (b) in any notice in an +Executable version or related documentation in which You describe the +origin or ownership of the Covered Code. + +3.4. Intellectual Property Matters + +(a) Third Party Claims. + If You have knowledge that a party claims an intellectual property + right in particular functionality or code (or its utilization + under this License), you must include a text file with the source + code distribution titled ``LEGAL'' which describes the claim and + the party making the claim in sufficient detail that a recipient + will know whom to contact. If you obtain such knowledge after You + make Your Modification available as described in Section 3.2, You + shall promptly modify the LEGAL file in all copies You make + available thereafter and shall take other steps (such as notifying + appropriate mailing lists or newsgroups) reasonably calculated to + inform those who received the Covered Code that new knowledge has + been obtained. + +(b) Contributor APIs. + If Your Modification is an application programming interface and + You own or control patents which are reasonably necessary to + implement that API, you must also include this information in the + LEGAL file. + +3.5. Required Notices. +You must duplicate the notice in Exhibit A in each file of the Source +Code, and this License in any documentation for the Source Code, where +You describe recipients' rights relating to Covered Code. If You +created one or more Modification(s), You may add your name as a +Contributor to the notice described in Exhibit A. If it is not +possible to put such notice in a particular Source Code file due to +its structure, then you must include such notice in a location (such +as a relevant directory file) where a user would be likely to look for +such a notice. You may choose to offer, and to charge a fee for, +warranty, support, indemnity or liability obligations to one or more +recipients of Covered Code. However, You may do so only on Your own +behalf, and not on behalf of the Initial Developer or any +Contributor. You must make it absolutely clear than any such warranty, +support, indemnity or liability obligation is offered by You alone, +and You hereby agree to indemnify the Initial Developer and every +Contributor for any liability incurred by the Initial Developer or +such Contributor as a result of warranty, support, indemnity or +liability terms You offer. + +3.6. Distribution of Executable Versions. +You may distribute Covered Code in Executable form only if the +requirements of Section 3.1-3.5 have been met for that Covered Code, +and if You include a notice stating that the Source Code version of +the Covered Code is available under the terms of this License, +including a description of how and where You have fulfilled the +obligations of Section 3.2. The notice must be conspicuously included +in any notice in an Executable version, related documentation or +collateral in which You describe recipients' rights relating to the +Covered Code. You may distribute the Executable version of Covered +Code under a license of Your choice, which may contain terms different +from this License, provided that You are in compliance with the terms +of this License and that the license for the Executable version does +not attempt to limit or alter the recipient's rights in the Source +Code version from the rights set forth in this License. If You +distribute the Executable version under a different license You must +make it absolutely clear that any terms which differ from this License +are offered by You alone, not by the Initial Developer or any +Contributor. You hereby agree to indemnify the Initial Developer and +every Contributor for any liability incurred by the Initial Developer +or such Contributor as a result of any such terms You offer. + +3.7. Larger Works. +You may create a Larger Work by combining Covered Code with other code +not governed by the terms of this License and distribute the Larger +Work as a single product. In such a case, You must make sure the +requirements of this License are fulfilled for the Covered Code. + +4. Inability to Comply Due to Statute or Regulation. +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Code due to statute +or regulation then You must: (a) comply with the terms of this License +to the maximum extent possible; and (b) describe the limitations and +the code they affect. Such description must be included in the LEGAL +file described in Section 3.4 and must be included with all +distributions of the Source Code. Except to the extent prohibited by +statute or regulation, such description must be sufficiently detailed +for a recipient of ordinary skill to be able to understand it. + +5. Application of this License. + +This License applies to code to which the Initial Developer has +attached the notice in Exhibit A, and to related Covered Code. + +6. CONNECTION TO MOZILLA PUBLIC LICENSE + +This Erlang License is a derivative work of the Mozilla Public +License, Version 1.0. It contains terms which differ from the Mozilla +Public License, Version 1.0. + +7. DISCLAIMER OF WARRANTY. + +COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN ``AS IS'' BASIS, +WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, +WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF +DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR +NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF +THE COVERED CODE IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE +IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER +CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR +CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART +OF THIS LICENSE. NO USE OF ANY COVERED CODE IS AUTHORIZED HEREUNDER +EXCEPT UNDER THIS DISCLAIMER. + +8. TERMINATION. +This License and the rights granted hereunder will terminate +automatically if You fail to comply with terms herein and fail to cure +such breach within 30 days of becoming aware of the breach. All +sublicenses to the Covered Code which are properly granted shall +survive any termination of this License. Provisions which, by their +nature, must remain in effect beyond the termination of this License +shall survive. + +9. DISCLAIMER OF LIABILITY +Any utilization of Covered Code shall not cause the Initial Developer +or any Contributor to be liable for any damages (neither direct nor +indirect). + +10. MISCELLANEOUS +This License represents the complete agreement concerning the subject +matter hereof. If any provision is held to be unenforceable, such +provision shall be reformed only to the extent necessary to make it +enforceable. This License shall be construed by and in accordance with +the substantive laws of Sweden. Any dispute, controversy or claim +arising out of or relating to this License, or the breach, termination +or invalidity thereof, shall be subject to the exclusive jurisdiction +of Swedish courts, with the Stockholm City Court as the first +instance. + +EXHIBIT A. + +``The contents of this file are subject to the Erlang Public License, +Version 1.1, (the "License"); you may not use this file except in +compliance with the License. You should have received a copy of the +Erlang Public License along with this software. If not, it can be +retrieved via the world wide web at http://www.erlang.org/. + +Software distributed under the License is distributed on an "AS IS" +basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +the License for the specific language governing rights and limitations +under the License. + +The Initial Developer of the Original Code is Ericsson AB. +Portions created by Ericsson are Copyright 2013, Ericsson AB. +All Rights Reserved.'' diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/LICENSE-MIT-Mochiweb b/rabbitmq-server/LICENSE-MIT-Mochiweb similarity index 100% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/LICENSE-MIT-Mochiweb rename to rabbitmq-server/LICENSE-MIT-Mochiweb diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/LICENSE-MIT-SockJS b/rabbitmq-server/LICENSE-MIT-SockJS similarity index 100% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/LICENSE-MIT-SockJS rename to rabbitmq-server/LICENSE-MIT-SockJS diff --git a/rabbitmq-server/LICENSE-MPL2 b/rabbitmq-server/LICENSE-MPL2 new file mode 100644 index 0000000..14e2f77 --- /dev/null +++ b/rabbitmq-server/LICENSE-MPL2 @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/rabbitmq-server/Makefile b/rabbitmq-server/Makefile index c2cae4a..833334d 100644 --- a/rabbitmq-server/Makefile +++ b/rabbitmq-server/Makefile @@ -1,437 +1,455 @@ -TMPDIR ?= /tmp - -RABBITMQ_NODENAME ?= rabbit -RABBITMQ_SERVER_START_ARGS ?= -RABBITMQ_MNESIA_DIR ?= $(TMPDIR)/rabbitmq-$(RABBITMQ_NODENAME)-mnesia -RABBITMQ_PLUGINS_EXPAND_DIR ?= $(TMPDIR)/rabbitmq-$(RABBITMQ_NODENAME)-plugins-scratch -RABBITMQ_LOG_BASE ?= $(TMPDIR) - -DEPS_FILE=deps.mk -SOURCE_DIR=src -TEST_DIR=test/src -EBIN_DIR=ebin -TEST_EBIN_DIR=test/ebin -INCLUDE_DIR=include -DOCS_DIR=docs -INCLUDES=$(wildcard $(INCLUDE_DIR)/*.hrl) $(INCLUDE_DIR)/rabbit_framing.hrl -SOURCES=$(wildcard $(SOURCE_DIR)/*.erl) $(SOURCE_DIR)/rabbit_framing_amqp_0_9_1.erl $(SOURCE_DIR)/rabbit_framing_amqp_0_8.erl $(USAGES_ERL) -TEST_SOURCES=$(wildcard $(TEST_DIR)/*.erl) -BEAM_TARGETS=$(patsubst $(SOURCE_DIR)/%.erl, $(EBIN_DIR)/%.beam, $(SOURCES)) -TEST_BEAM_TARGETS=$(patsubst $(TEST_DIR)/%.erl, $(TEST_EBIN_DIR)/%.beam, $(TEST_SOURCES)) -TARGETS=$(EBIN_DIR)/rabbit.app $(INCLUDE_DIR)/rabbit_framing.hrl $(BEAM_TARGETS) plugins -TEST_TARGETS=$(TEST_BEAM_TARGETS) -WEB_URL=http://www.rabbitmq.com/ -MANPAGES=$(patsubst %.xml, %.gz, $(wildcard $(DOCS_DIR)/*.[0-9].xml)) -WEB_MANPAGES=$(patsubst %.xml, %.man.xml, $(wildcard $(DOCS_DIR)/*.[0-9].xml) $(DOCS_DIR)/rabbitmq-service.xml $(DOCS_DIR)/rabbitmq-echopid.xml) -USAGES_XML=$(DOCS_DIR)/rabbitmqctl.1.xml $(DOCS_DIR)/rabbitmq-plugins.1.xml -USAGES_ERL=$(foreach XML, $(USAGES_XML), $(call usage_xml_to_erl, $(XML))) - -ifeq ($(shell python -c 'import simplejson' 2>/dev/null && echo yes),yes) -PYTHON=python -else -ifeq ($(shell python2.7 -c 'import json' 2>/dev/null && echo yes),yes) -PYTHON=python2.7 -else -ifeq ($(shell python2.6 -c 'import simplejson' 2>/dev/null && echo yes),yes) -PYTHON=python2.6 -else -ifeq ($(shell python2.5 -c 'import simplejson' 2>/dev/null && echo yes),yes) -PYTHON=python2.5 -else -# Hmm. Missing simplejson? -PYTHON=python -endif -endif -endif -endif +PROJECT = rabbit +VERSION ?= $(call get_app_version,src/$(PROJECT).app.src) -BASIC_PLT=basic.plt -RABBIT_PLT=rabbit.plt +# Release artifacts are put in $(PACKAGES_DIR). +PACKAGES_DIR ?= $(abspath PACKAGES) -ifndef USE_PROPER_QC -# PropEr needs to be installed for property checking -# http://proper.softlab.ntua.gr/ -USE_PROPER_QC=$(shell erl -noshell -eval 'io:format({module, proper} =:= code:ensure_loaded(proper)), halt().') -endif +DEPS = ranch $(PLUGINS) -#other args: +native +"{hipe,[o3,verbose]}" -Ddebug=true +debug_info +no_strict_record_tests -ERLC_OPTS=-I $(INCLUDE_DIR) -Wall +warn_export_vars -v +debug_info $(call boolean_macro,$(USE_SPECS),use_specs) $(call boolean_macro,$(USE_PROPER_QC),use_proper_qc) - -# Our type specs rely on dict:dict/0 etc, which are only available in -# 17.0 upwards. -define compare_version -$(shell awk 'BEGIN { - split("$(1)", v1, "\."); - version1 = v1[1] * 1000000 + v1[2] * 10000 + v1[3] * 100 + v1[4]; - - split("$(2)", v2, "\."); - version2 = v2[1] * 1000000 + v2[2] * 10000 + v2[3] * 100 + v2[4]; - - if (version1 $(3) version2) { - print "true"; - } else { - print "false"; - } -}') +define usage_xml_to_erl +$(subst __,_,$(patsubst $(DOCS_DIR)/rabbitmq%.1.xml, src/rabbit_%_usage.erl, $(subst -,_,$(1)))) endef -ERTS_VER = $(shell erl -version 2>&1 | sed -E 's/.* version //') -USE_SPECS_MIN_ERTS_VER = 5.11 -ifeq ($(call compare_version,$(ERTS_VER),$(USE_SPECS_MIN_ERTS_VER),>=),true) -ERLC_OPTS += -Duse_specs +DOCS_DIR = docs +MANPAGES = $(patsubst %.xml, %, $(wildcard $(DOCS_DIR)/*.[0-9].xml)) +WEB_MANPAGES = $(patsubst %.xml, %.man.xml, $(wildcard $(DOCS_DIR)/*.[0-9].xml) $(DOCS_DIR)/rabbitmq-service.xml $(DOCS_DIR)/rabbitmq-echopid.xml) +USAGES_XML = $(DOCS_DIR)/rabbitmqctl.1.xml $(DOCS_DIR)/rabbitmq-plugins.1.xml +USAGES_ERL = $(foreach XML, $(USAGES_XML), $(call usage_xml_to_erl, $(XML))) + +EXTRA_SOURCES += $(USAGES_ERL) + +.DEFAULT_GOAL = all +$(PROJECT).d:: $(EXTRA_SOURCES) + +DEP_PLUGINS = rabbit_common/mk/rabbitmq-run.mk \ + rabbit_common/mk/rabbitmq-dist.mk \ + rabbit_common/mk/rabbitmq-tools.mk + +# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be +# reviewed and merged. + +ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git +ERLANG_MK_COMMIT = rabbitmq-tmp + +include rabbitmq-components.mk + +# List of plugins to include in a RabbitMQ release. +DISTRIBUTED_DEPS := rabbitmq_amqp1_0 \ + rabbitmq_auth_backend_ldap \ + rabbitmq_auth_mechanism_ssl \ + rabbitmq_consistent_hash_exchange \ + rabbitmq_event_exchange \ + rabbitmq_federation \ + rabbitmq_federation_management \ + rabbitmq_management \ + rabbitmq_management_agent \ + rabbitmq_management_visualiser \ + rabbitmq_mqtt \ + rabbitmq_recent_history_exchange \ + rabbitmq_sharding \ + rabbitmq_shovel \ + rabbitmq_shovel_management \ + rabbitmq_stomp \ + rabbitmq_tracing \ + rabbitmq_web_dispatch \ + rabbitmq_web_stomp \ + rabbitmq_web_stomp_examples + +ifneq ($(IS_DEP),1) +ifneq ($(filter source-dist packages package-%,$(MAKECMDGOALS)),) +DEPS += $(DISTRIBUTED_DEPS) +endif +ifneq ($(wildcard git-revisions.txt),) +DEPS += $(DISTRIBUTED_DEPS) endif - -ifdef INSTRUMENT_FOR_QC -ERLC_OPTS += -DINSTR_MOD=gm_qc -else -ERLC_OPTS += -DINSTR_MOD=gm endif -include version.mk - -PLUGINS_SRC_DIR?=$(shell [ -d "plugins-src" ] && echo "plugins-src" || echo ) -PLUGINS_DIR=plugins -TARBALL_NAME=rabbitmq-server-$(VERSION) -TARGET_SRC_DIR=dist/$(TARBALL_NAME) - -SIBLING_CODEGEN_DIR=../rabbitmq-codegen/ -AMQP_CODEGEN_DIR=$(shell [ -d $(SIBLING_CODEGEN_DIR) ] && echo $(SIBLING_CODEGEN_DIR) || echo codegen) -AMQP_SPEC_JSON_FILES_0_9_1=$(AMQP_CODEGEN_DIR)/amqp-rabbitmq-0.9.1.json $(AMQP_CODEGEN_DIR)/credit_extension.json -AMQP_SPEC_JSON_FILES_0_8=$(AMQP_CODEGEN_DIR)/amqp-rabbitmq-0.8.json - -ERL_CALL=erl_call -sname $(RABBITMQ_NODENAME) -e - -ERL_EBIN=erl -noinput -pa $(EBIN_DIR) +include erlang.mk -define usage_xml_to_erl - $(subst __,_,$(patsubst $(DOCS_DIR)/rabbitmq%.1.xml, $(SOURCE_DIR)/rabbit_%_usage.erl, $(subst -,_,$(1)))) -endef +# -------------------------------------------------------------------- +# Compilation. +# -------------------------------------------------------------------- -define usage_dep - $(call usage_xml_to_erl, $(1)): $(1) $(DOCS_DIR)/usage.xsl -endef - -define boolean_macro -$(if $(filter true,$(1)),-D$(2)) -endef +RMQ_ERLC_OPTS += -I $(DEPS_DIR)/rabbit_common/include -ifneq "$(SBIN_DIR)" "" -ifneq "$(TARGET_DIR)" "" -SCRIPTS_REL_PATH=$(shell ./calculate-relative $(TARGET_DIR)/sbin $(SBIN_DIR)) +ifdef INSTRUMENT_FOR_QC +RMQ_ERLC_OPTS += -DINSTR_MOD=gm_qc +else +RMQ_ERLC_OPTS += -DINSTR_MOD=gm endif + +ifdef CREDIT_FLOW_TRACING +RMQ_ERLC_OPTS += -DCREDIT_FLOW_TRACING=true endif -# Versions prior to this are not supported -NEED_MAKE := 3.80 -ifneq "$(NEED_MAKE)" "$(firstword $(sort $(NEED_MAKE) $(MAKE_VERSION)))" -$(error Versions of make prior to $(NEED_MAKE) are not supported) +ERTS_VER := $(shell erl -version 2>&1 | sed -E 's/.* version //') +USE_SPECS_MIN_ERTS_VER = 5.11 +ifeq ($(call compare_version,$(ERTS_VER),$(USE_SPECS_MIN_ERTS_VER),>=),true) +RMQ_ERLC_OPTS += -Duse_specs endif -# .DEFAULT_GOAL introduced in 3.81 -DEFAULT_GOAL_MAKE := 3.81 -ifneq "$(DEFAULT_GOAL_MAKE)" "$(firstword $(sort $(DEFAULT_GOAL_MAKE) $(MAKE_VERSION)))" -.DEFAULT_GOAL=all +ifndef USE_PROPER_QC +# PropEr needs to be installed for property checking +# http://proper.softlab.ntua.gr/ +USE_PROPER_QC := $(shell $(ERL) -eval 'io:format({module, proper} =:= code:ensure_loaded(proper)), halt().') +RMQ_ERLC_OPTS += $(if $(filter true,$(USE_PROPER_QC)),-Duse_proper_qc) endif -all: $(TARGETS) $(TEST_TARGETS) +ERLC_OPTS += $(RMQ_ERLC_OPTS) -.PHONY: plugins check-xref -ifneq "$(PLUGINS_SRC_DIR)" "" -plugins: - [ -d "$(PLUGINS_SRC_DIR)/rabbitmq-server" ] || ln -s "$(CURDIR)" "$(PLUGINS_SRC_DIR)/rabbitmq-server" - mkdir -p $(PLUGINS_DIR) - PLUGINS_SRC_DIR="" $(MAKE) -C "$(PLUGINS_SRC_DIR)" plugins-dist PLUGINS_DIST_DIR="$(CURDIR)/$(PLUGINS_DIR)" VERSION=$(VERSION) - echo "Put your EZs here and use rabbitmq-plugins to enable them." > $(PLUGINS_DIR)/README - rm -f $(PLUGINS_DIR)/rabbit_common*.ez +clean:: clean-extra-sources -# add -q to remove printout of warnings.... -check-xref: $(BEAM_TARGETS) $(PLUGINS_DIR) - rm -rf lib - ./check_xref $(PLUGINS_DIR) -q +clean-extra-sources: + $(gen_verbose) rm -f $(EXTRA_SOURCES) -else -plugins: -# Not building plugins +# -------------------------------------------------------------------- +# Tests. +# -------------------------------------------------------------------- -check-xref: - $(info xref checks are disabled as there is no plugins-src directory) +TARGETS_IN_RABBITMQ_TEST = $(patsubst %,%-in-rabbitmq_test,\ + tests full unit lite conformance16 lazy-vq-tests) -endif +.PHONY: $(TARGETS_IN_RABBITMQ_TEST) -$(DEPS_FILE): $(SOURCES) $(INCLUDES) - rm -f $@ - echo $(subst : ,:,$(foreach FILE,$^,$(FILE):)) | escript generate_deps $@ $(EBIN_DIR) - -$(EBIN_DIR)/rabbit.app: $(EBIN_DIR)/rabbit_app.in $(SOURCES) generate_app - escript generate_app $< $@ $(SOURCE_DIR) - -$(EBIN_DIR)/%.beam: $(SOURCE_DIR)/%.erl | $(DEPS_FILE) - erlc -o $(EBIN_DIR) $(ERLC_OPTS) -pa $(EBIN_DIR) $< - -$(TEST_EBIN_DIR)/%.beam: $(TEST_DIR)/%.erl | $(TEST_EBIN_DIR) - erlc -o $(TEST_EBIN_DIR) $(ERLC_OPTS) -pa $(EBIN_DIR) -pa $(TEST_EBIN_DIR) $< - -$(TEST_EBIN_DIR): - mkdir -p $(TEST_EBIN_DIR) - -$(INCLUDE_DIR)/rabbit_framing.hrl: codegen.py $(AMQP_CODEGEN_DIR)/amqp_codegen.py $(AMQP_SPEC_JSON_FILES_0_9_1) $(AMQP_SPEC_JSON_FILES_0_8) - $(PYTHON) codegen.py --ignore-conflicts header $(AMQP_SPEC_JSON_FILES_0_9_1) $(AMQP_SPEC_JSON_FILES_0_8) $@ - -$(SOURCE_DIR)/rabbit_framing_amqp_0_9_1.erl: codegen.py $(AMQP_CODEGEN_DIR)/amqp_codegen.py $(AMQP_SPEC_JSON_FILES_0_9_1) - $(PYTHON) codegen.py body $(AMQP_SPEC_JSON_FILES_0_9_1) $@ - -$(SOURCE_DIR)/rabbit_framing_amqp_0_8.erl: codegen.py $(AMQP_CODEGEN_DIR)/amqp_codegen.py $(AMQP_SPEC_JSON_FILES_0_8) - $(PYTHON) codegen.py body $(AMQP_SPEC_JSON_FILES_0_8) $@ - -dialyze: $(BEAM_TARGETS) $(BASIC_PLT) - dialyzer --plt $(BASIC_PLT) --no_native --fullpath \ - $(BEAM_TARGETS) - -# rabbit.plt is used by rabbitmq-erlang-client's dialyze make target -create-plt: $(RABBIT_PLT) - -$(RABBIT_PLT): $(BEAM_TARGETS) $(BASIC_PLT) - dialyzer --plt $(BASIC_PLT) --output_plt $@ --no_native \ - --add_to_plt $(BEAM_TARGETS) - -$(BASIC_PLT): $(BEAM_TARGETS) - if [ -f $@ ]; then \ - touch $@; \ - else \ - dialyzer --output_plt $@ --build_plt \ - --apps erts kernel stdlib compiler sasl os_mon mnesia tools \ - public_key crypto ssl xmerl; \ - fi - -clean: - rm -f $(EBIN_DIR)/*.beam - rm -f $(EBIN_DIR)/rabbit.app $(EBIN_DIR)/rabbit.boot $(EBIN_DIR)/rabbit.script $(EBIN_DIR)/rabbit.rel - rm -rf $(TEST_EBIN_DIR) - rm -f $(PLUGINS_DIR)/*.ez - [ -d "$(PLUGINS_SRC_DIR)" ] && PLUGINS_SRC_DIR="" PRESERVE_CLONE_DIR=1 make -C $(PLUGINS_SRC_DIR) clean || true - rm -f $(INCLUDE_DIR)/rabbit_framing.hrl $(SOURCE_DIR)/rabbit_framing_amqp_*.erl codegen.pyc - rm -f $(DOCS_DIR)/*.[0-9].gz $(DOCS_DIR)/*.man.xml $(DOCS_DIR)/*.erl $(USAGES_ERL) - rm -f $(RABBIT_PLT) - rm -f $(DEPS_FILE) - -cleandb: - rm -rf $(RABBITMQ_MNESIA_DIR)/* - -############ various tasks to interact with RabbitMQ ################### - -BASIC_SCRIPT_ENVIRONMENT_SETTINGS=\ - RABBITMQ_NODE_IP_ADDRESS="$(RABBITMQ_NODE_IP_ADDRESS)" \ - RABBITMQ_NODE_PORT="$(RABBITMQ_NODE_PORT)" \ - RABBITMQ_LOG_BASE="$(RABBITMQ_LOG_BASE)" \ - RABBITMQ_MNESIA_DIR="$(RABBITMQ_MNESIA_DIR)" \ - RABBITMQ_PLUGINS_EXPAND_DIR="$(RABBITMQ_PLUGINS_EXPAND_DIR)" - -run: all - $(BASIC_SCRIPT_ENVIRONMENT_SETTINGS) \ - RABBITMQ_ALLOW_INPUT=true \ - RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS)" \ - ./scripts/rabbitmq-server - -run-background: all - $(BASIC_SCRIPT_ENVIRONMENT_SETTINGS) \ - RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS)" \ - ./scripts/rabbitmq-server -detached - -run-node: all - $(BASIC_SCRIPT_ENVIRONMENT_SETTINGS) \ - RABBITMQ_NODE_ONLY=true \ - RABBITMQ_ALLOW_INPUT=true \ - RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS)" \ - ./scripts/rabbitmq-server - -run-background-node: all - $(BASIC_SCRIPT_ENVIRONMENT_SETTINGS) \ - RABBITMQ_NODE_ONLY=true \ - RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS)" \ - ./scripts/rabbitmq-server -detached - -run-tests: all - echo 'code:add_path("$(TEST_EBIN_DIR)").' | $(ERL_CALL) - echo 'code:add_path("$(TEST_EBIN_DIR)").' | $(ERL_CALL) -n hare || true - OUT=$$(echo "rabbit_tests:all_tests()." | $(ERL_CALL)) ; \ - echo $$OUT ; echo $$OUT | grep '^{ok, passed}$$' > /dev/null - -run-qc: all - echo 'code:add_path("$(TEST_EBIN_DIR)").' | $(ERL_CALL) - ./quickcheck $(RABBITMQ_NODENAME) rabbit_backing_queue_qc 100 40 - ./quickcheck $(RABBITMQ_NODENAME) gm_qc 1000 200 - -start-background-node: all - -rm -f $(RABBITMQ_MNESIA_DIR).pid - mkdir -p $(RABBITMQ_MNESIA_DIR) - $(BASIC_SCRIPT_ENVIRONMENT_SETTINGS) \ - RABBITMQ_NODE_ONLY=true \ - RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS)" \ - ./scripts/rabbitmq-server \ - > $(RABBITMQ_MNESIA_DIR)/startup_log \ - 2> $(RABBITMQ_MNESIA_DIR)/startup_err & - ./scripts/rabbitmqctl -n $(RABBITMQ_NODENAME) wait $(RABBITMQ_MNESIA_DIR).pid kernel - -start-rabbit-on-node: all - echo "rabbit:start()." | $(ERL_CALL) - ./scripts/rabbitmqctl -n $(RABBITMQ_NODENAME) wait $(RABBITMQ_MNESIA_DIR).pid - -stop-rabbit-on-node: all - echo "rabbit:stop()." | $(ERL_CALL) - -set-resource-alarm: all - echo "rabbit_alarm:set_alarm({{resource_limit, $(SOURCE), node()}, []})." | \ - $(ERL_CALL) - -clear-resource-alarm: all - echo "rabbit_alarm:clear_alarm({resource_limit, $(SOURCE), node()})." | \ - $(ERL_CALL) - -stop-node: - -( \ - pid=$$(./scripts/rabbitmqctl -n $(RABBITMQ_NODENAME) eval 'os:getpid().') && \ - $(ERL_CALL) -q && \ - while ps -p $$pid >/dev/null 2>&1; do sleep 1; done \ - ) - -# code coverage will be created for subdirectory "ebin" of COVER_DIR -COVER_DIR=. - -start-cover: all - echo "rabbit_misc:start_cover([\"rabbit\", \"hare\"])." | $(ERL_CALL) - echo "rabbit_misc:enable_cover([\"$(COVER_DIR)\"])." | $(ERL_CALL) - -stop-cover: all - echo "rabbit_misc:report_cover(), cover:stop()." | $(ERL_CALL) - cat cover/summary.txt - -######################################################################## - -srcdist: distclean - mkdir -p $(TARGET_SRC_DIR)/codegen - cp -r ebin src include LICENSE LICENSE-MPL-RabbitMQ INSTALL README $(TARGET_SRC_DIR) - sed 's/%%VSN%%/$(VERSION)/' $(TARGET_SRC_DIR)/ebin/rabbit_app.in > $(TARGET_SRC_DIR)/ebin/rabbit_app.in.tmp && \ - mv $(TARGET_SRC_DIR)/ebin/rabbit_app.in.tmp $(TARGET_SRC_DIR)/ebin/rabbit_app.in - - cp -r $(AMQP_CODEGEN_DIR)/* $(TARGET_SRC_DIR)/codegen/ - cp codegen.py Makefile generate_app generate_deps calculate-relative $(TARGET_SRC_DIR) - - echo "VERSION?=${VERSION}" > $(TARGET_SRC_DIR)/version.mk - - cp -r scripts $(TARGET_SRC_DIR) - cp -r $(DOCS_DIR) $(TARGET_SRC_DIR) - chmod 0755 $(TARGET_SRC_DIR)/scripts/* - -ifneq "$(PLUGINS_SRC_DIR)" "" - cp -r $(PLUGINS_SRC_DIR) $(TARGET_SRC_DIR)/plugins-src - rm $(TARGET_SRC_DIR)/LICENSE - cat packaging/common/LICENSE.head >> $(TARGET_SRC_DIR)/LICENSE - cat $(AMQP_CODEGEN_DIR)/license_info >> $(TARGET_SRC_DIR)/LICENSE - find $(PLUGINS_SRC_DIR)/licensing -name "license_info_*" -exec cat '{}' >> $(TARGET_SRC_DIR)/LICENSE \; - cat packaging/common/LICENSE.tail >> $(TARGET_SRC_DIR)/LICENSE - find $(PLUGINS_SRC_DIR)/licensing -name "LICENSE-*" -exec cp '{}' $(TARGET_SRC_DIR) \; - rm -rf $(TARGET_SRC_DIR)/licensing -else - @echo No plugins source distribution found -endif +TEST_ERLC_OPTS += $(RMQ_ERLC_OPTS) - (cd dist; tar -zchf $(TARBALL_NAME).tar.gz $(TARBALL_NAME)) - (cd dist; zip -q -r $(TARBALL_NAME).zip $(TARBALL_NAME)) - rm -rf $(TARGET_SRC_DIR) +tests:: tests-in-rabbitmq_test -distclean: clean - $(MAKE) -C $(AMQP_CODEGEN_DIR) distclean - rm -rf dist - find . -regex '.*\(~\|#\|\.swp\|\.dump\)' -exec rm {} \; +$(TARGETS_IN_RABBITMQ_TEST): $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ + test-build $(DEPS_DIR)/rabbitmq_test + $(MAKE) -C $(DEPS_DIR)/rabbitmq_test \ + IS_DEP=1 \ + RABBITMQ_BROKER_DIR=$(RABBITMQ_BROKER_DIR) \ + $(patsubst %-in-rabbitmq_test,%,$@) + +# -------------------------------------------------------------------- +# Documentation. +# -------------------------------------------------------------------- # xmlto can not read from standard input, so we mess with a tmp file. -%.gz: %.xml $(DOCS_DIR)/examples-to-end.xsl - xmlto --version | grep -E '^xmlto version 0\.0\.([0-9]|1[1-8])$$' >/dev/null || opt='--stringparam man.indent.verbatims=0' ; \ - xsltproc --novalid $(DOCS_DIR)/examples-to-end.xsl $< > $<.tmp && \ - xmlto -o $(DOCS_DIR) $$opt man $<.tmp && \ - gzip -f $(DOCS_DIR)/`basename $< .xml` - rm -f $<.tmp +%: %.xml $(DOCS_DIR)/examples-to-end.xsl + $(gen_verbose) xmlto --version | \ + grep -E '^xmlto version 0\.0\.([0-9]|1[1-8])$$' >/dev/null || \ + opt='--stringparam man.indent.verbatims=0' ; \ + xsltproc --novalid $(DOCS_DIR)/examples-to-end.xsl $< > $<.tmp && \ + xmlto -vv -o $(DOCS_DIR) $$opt man $< 2>&1 | (grep -v '^Note: Writing' || :) && \ + test -f $@ && \ + rm $<.tmp # Use tmp files rather than a pipeline so that we get meaningful errors # Do not fold the cp into previous line, it's there to stop the file being # generated but empty if we fail -$(SOURCE_DIR)/%_usage.erl: - xsltproc --novalid --stringparam modulename "`basename $@ .erl`" \ - $(DOCS_DIR)/usage.xsl $< > $@.tmp - sed -e 's/"/\\"/g' -e 's/%QUOTE%/"/g' $@.tmp > $@.tmp2 - fold -s $@.tmp2 > $@.tmp3 - mv $@.tmp3 $@ - rm $@.tmp $@.tmp2 +define usage_dep +$(call usage_xml_to_erl, $(1)):: $(1) $(DOCS_DIR)/usage.xsl + $$(gen_verbose) xsltproc --novalid --stringparam modulename "`basename $$@ .erl`" \ + $(DOCS_DIR)/usage.xsl $$< > $$@.tmp && \ + sed -e 's/"/\\"/g' -e 's/%QUOTE%/"/g' $$@.tmp > $$@.tmp2 && \ + fold -s $$@.tmp2 > $$@.tmp3 && \ + mv $$@.tmp3 $$@ && \ + rm $$@.tmp $$@.tmp2 +endef + +$(foreach XML,$(USAGES_XML),$(eval $(call usage_dep, $(XML)))) # We rename the file before xmlto sees it since xmlto will use the name of # the file to make internal links. %.man.xml: %.xml $(DOCS_DIR)/html-to-website-xml.xsl - cp $< `basename $< .xml`.xml && \ - xmlto xhtml-nochunks `basename $< .xml`.xml ; rm `basename $< .xml`.xml + $(gen_verbose) cp $< `basename $< .xml`.xml && \ + xmlto xhtml-nochunks `basename $< .xml`.xml ; \ + rm `basename $< .xml`.xml && \ cat `basename $< .xml`.html | \ xsltproc --novalid $(DOCS_DIR)/remove-namespaces.xsl - | \ - xsltproc --novalid --stringparam original `basename $<` $(DOCS_DIR)/html-to-website-xml.xsl - | \ - xmllint --format - > $@ + xsltproc --novalid --stringparam original `basename $<` $(DOCS_DIR)/html-to-website-xml.xsl - | \ + xmllint --format - > $@ && \ rm `basename $< .xml`.html -docs_all: $(MANPAGES) $(WEB_MANPAGES) - -install: install_bin install_docs - -install_bin: all install_dirs - cp -r ebin include LICENSE* INSTALL $(TARGET_DIR) +.PHONY: manpages web-manpages distclean-manpages + +docs:: manpages web-manpages + +manpages: $(MANPAGES) + @: + +web-manpages: $(WEB_MANPAGES) + @: + +distclean:: distclean-manpages + +distclean-manpages:: + $(gen_verbose) rm -f $(MANPAGES) $(WEB_MANPAGES) + +# -------------------------------------------------------------------- +# Distribution. +# -------------------------------------------------------------------- + +.PHONY: source-dist clean-source-dist + +SOURCE_DIST_BASE ?= rabbitmq-server +SOURCE_DIST_SUFFIXES ?= tar.xz zip +SOURCE_DIST ?= $(PACKAGES_DIR)/$(SOURCE_DIST_BASE)-$(VERSION) + +# The first source distribution file is used by packages: if the archive +# type changes, you must update all packages' Makefile. +SOURCE_DIST_FILES = $(addprefix $(SOURCE_DIST).,$(SOURCE_DIST_SUFFIXES)) + +.PHONY: $(SOURCE_DIST_FILES) + +source-dist: $(SOURCE_DIST_FILES) + @: + +RSYNC ?= rsync +RSYNC_V_0 = +RSYNC_V_1 = -v +RSYNC_V_2 = -v +RSYNC_V = $(RSYNC_V_$(V)) +RSYNC_FLAGS += -a $(RSYNC_V) \ + --exclude '.sw?' --exclude '.*.sw?' \ + --exclude '*.beam' \ + --exclude '*.pyc' \ + --exclude '.git*' \ + --exclude '.hg*' \ + --exclude '.travis.yml' \ + --exclude '.*.plt' \ + --exclude '$(notdir $(ERLANG_MK_TMP))' \ + --exclude 'ebin' \ + --exclude 'packaging' \ + --exclude 'erl_crash.dump' \ + --exclude 'MnesiaCore.*' \ + --exclude 'cover/' \ + --exclude 'deps/' \ + --exclude '$(notdir $(DEPS_DIR))/' \ + --exclude 'plugins/' \ + --exclude '$(notdir $(DIST_DIR))/' \ + --exclude '/$(notdir $(PACKAGES_DIR))/' \ + --exclude '/cowboy/doc/' \ + --exclude '/cowboy/examples/' \ + --exclude '/rabbitmq_amqp1_0/test/swiftmq/build/'\ + --exclude '/rabbitmq_amqp1_0/test/swiftmq/swiftmq*'\ + --exclude '/rabbitmq_mqtt/test/build/' \ + --exclude '/rabbitmq_mqtt/test/test_client/'\ + --delete \ + --delete-excluded + +TAR ?= tar +TAR_V_0 = +TAR_V_1 = -v +TAR_V_2 = -v +TAR_V = $(TAR_V_$(V)) + +GZIP ?= gzip +BZIP2 ?= bzip2 +XZ ?= xz + +ZIP ?= zip +ZIP_V_0 = -q +ZIP_V_1 = +ZIP_V_2 = +ZIP_V = $(ZIP_V_$(V)) + +.PHONY: $(SOURCE_DIST) + +$(SOURCE_DIST): $(ERLANG_MK_RECURSIVE_DEPS_LIST) + $(verbose) mkdir -p $(dir $@) + $(gen_verbose) $(RSYNC) $(RSYNC_FLAGS) ./ $@/ + $(verbose) sed -E -i.bak \ + -e 's/[{]vsn[[:blank:]]*,[^}]+}/{vsn, "$(VERSION)"}/' \ + $@/src/$(PROJECT).app.src && \ + rm $@/src/$(PROJECT).app.src.bak + $(verbose) cat packaging/common/LICENSE.head > $@/LICENSE + $(verbose) mkdir -p $@/deps/licensing + $(verbose) for dep in $$(cat $(ERLANG_MK_RECURSIVE_DEPS_LIST) | grep -v '/$(PROJECT)$$' | LC_COLLATE=C sort); do \ + $(RSYNC) $(RSYNC_FLAGS) \ + $$dep \ + $@/deps; \ + if test -f $@/deps/$$(basename $$dep)/erlang.mk && \ + test "$$(wc -l $@/deps/$$(basename $$dep)/erlang.mk | awk '{print $$1;}')" = "1" && \ + grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" $@/deps/$$(basename $$dep)/erlang.mk; then \ + echo "include ../../erlang.mk" > $@/deps/$$(basename $$dep)/erlang.mk; \ + fi; \ + sed -E -i.bak "s|^[[:blank:]]*include[[:blank:]]+\.\./.*erlang.mk$$|include ../../erlang.mk|" \ + $@/deps/$$(basename $$dep)/Makefile && \ + rm $@/deps/$$(basename $$dep)/Makefile.bak; \ + if test -f "$$dep/license_info"; then \ + cp "$$dep/license_info" "$@/deps/licensing/license_info_$$(basename "$$dep")"; \ + cat "$$dep/license_info" >> $@/LICENSE; \ + fi; \ + find "$$dep" -maxdepth 1 -name 'LICENSE-*' -exec cp '{}' $@/deps/licensing \; ; \ + done + $(verbose) cat packaging/common/LICENSE.tail >> $@/LICENSE + $(verbose) find $@/deps/licensing -name 'LICENSE-*' -exec cp '{}' $@ \; + $(verbose) for file in $$(find $@ -name '*.app.src'); do \ + sed -E -i.bak -e 's/[{]vsn[[:blank:]]*,[[:blank:]]*""[[:blank:]]*}/{vsn, "$(VERSION)"}/' $$file; \ + rm $$file.bak; \ + done + $(verbose) echo "$(PROJECT) $$(git rev-parse HEAD) $$(git describe --tags --exact-match 2>/dev/null || git symbolic-ref -q --short HEAD)" > $@/git-revisions.txt + $(verbose) for dep in $$(cat $(ERLANG_MK_RECURSIVE_DEPS_LIST)); do \ + (cd $$dep; echo "$$(basename "$$dep") $$(git rev-parse HEAD) $$(git describe --tags --exact-match 2>/dev/null || git symbolic-ref -q --short HEAD)") >> $@/git-revisions.txt; \ + done - chmod 0755 scripts/* - for script in rabbitmq-env rabbitmq-server rabbitmqctl rabbitmq-plugins rabbitmq-defaults; do \ - cp scripts/$$script $(TARGET_DIR)/sbin; \ - [ -e $(SBIN_DIR)/$$script ] || ln -s $(SCRIPTS_REL_PATH)/$$script $(SBIN_DIR)/$$script; \ +# TODO: Fix file timestamps to have reproducible source archives. +# $(verbose) find $@ -not -name 'git-revisions.txt' -print0 | xargs -0 touch -r $@/git-revisions.txt + +$(SOURCE_DIST).tar.gz: $(SOURCE_DIST) + $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \ + find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \ + xargs -0 $(TAR) $(TAR_V) --no-recursion -cf - | \ + $(GZIP) --best > $@ + +$(SOURCE_DIST).tar.bz2: $(SOURCE_DIST) + $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \ + find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \ + xargs -0 $(TAR) $(TAR_V) --no-recursion -cf - | \ + $(BZIP2) > $@ + +$(SOURCE_DIST).tar.xz: $(SOURCE_DIST) + $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \ + find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \ + xargs -0 $(TAR) $(TAR_V) --no-recursion -cf - | \ + $(XZ) > $@ + +$(SOURCE_DIST).zip: $(SOURCE_DIST) + $(verbose) rm -f $@ + $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \ + find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \ + xargs -0 $(ZIP) $(ZIP_V) $@ + +clean:: clean-source-dist + +clean-source-dist: + $(gen_verbose) rm -rf -- $(SOURCE_DIST_BASE)-* + +# -------------------------------------------------------------------- +# Installation. +# -------------------------------------------------------------------- + +.PHONY: install install-erlapp install-scripts install-bin install-man +.PHONY: install-windows install-windows-erlapp install-windows-scripts install-windows-docs + +DESTDIR ?= + +PREFIX ?= /usr/local +WINDOWS_PREFIX ?= rabbitmq-server-windows-$(VERSION) + +MANDIR ?= $(PREFIX)/share/man +RMQ_ROOTDIR ?= $(PREFIX)/lib/erlang +RMQ_BINDIR ?= $(RMQ_ROOTDIR)/bin +RMQ_LIBDIR ?= $(RMQ_ROOTDIR)/lib +RMQ_ERLAPP_DIR ?= $(RMQ_LIBDIR)/rabbitmq_server-$(VERSION) + +SCRIPTS = rabbitmq-defaults \ + rabbitmq-env \ + rabbitmq-server \ + rabbitmqctl \ + rabbitmq-plugins + +WINDOWS_SCRIPTS = rabbitmq-defaults.bat \ + rabbitmq-echopid.bat \ + rabbitmq-env.bat \ + rabbitmq-plugins.bat \ + rabbitmq-server.bat \ + rabbitmq-service.bat \ + rabbitmqctl.bat + +UNIX_TO_DOS ?= todos + +inst_verbose_0 = @echo " INST " $@; +inst_verbose = $(inst_verbose_$(V)) + +install: install-erlapp install-scripts + +install-erlapp: dist + $(verbose) mkdir -p $(DESTDIR)$(RMQ_ERLAPP_DIR) + $(inst_verbose) cp -r include ebin plugins LICENSE* INSTALL \ + $(DESTDIR)$(RMQ_ERLAPP_DIR) + $(verbose) echo "Put your EZs here and use rabbitmq-plugins to enable them." \ + > $(DESTDIR)$(RMQ_ERLAPP_DIR)/plugins/README + + @# rabbitmq-common provides headers too: copy them to + @# rabbitmq_server/include. + $(verbose) cp -r $(DEPS_DIR)/rabbit_common/include $(DESTDIR)$(RMQ_ERLAPP_DIR) + +install-scripts: + $(verbose) mkdir -p $(DESTDIR)$(RMQ_ERLAPP_DIR)/sbin + $(inst_verbose) for script in $(SCRIPTS); do \ + cp "scripts/$$script" "$(DESTDIR)$(RMQ_ERLAPP_DIR)/sbin"; \ + chmod 0755 "$(DESTDIR)$(RMQ_ERLAPP_DIR)/sbin/$$script"; \ done - mkdir -p $(TARGET_DIR)/$(PLUGINS_DIR) - [ -d "$(PLUGINS_DIR)" ] && cp $(PLUGINS_DIR)/*.ez $(PLUGINS_DIR)/README $(TARGET_DIR)/$(PLUGINS_DIR) || true +# FIXME: We do symlinks to scripts in $(RMQ_ERLAPP_DIR))/sbin but this +# code assumes a certain hierarchy to make relative symlinks. +install-bin: install-scripts + $(verbose) mkdir -p $(DESTDIR)$(RMQ_BINDIR) + $(inst_verbose) for script in $(SCRIPTS); do \ + test -e $(DESTDIR)$(RMQ_BINDIR)/$$script || \ + ln -sf ../lib/$(notdir $(RMQ_ERLAPP_DIR))/sbin/$$script \ + $(DESTDIR)$(RMQ_BINDIR)/$$script; \ + done -install_docs: docs_all install_dirs - for section in 1 5; do \ - mkdir -p $(MAN_DIR)/man$$section; \ - for manpage in $(DOCS_DIR)/*.$$section.gz; do \ - cp $$manpage $(MAN_DIR)/man$$section; \ +install-man: manpages + $(inst_verbose) sections=$$(ls -1 docs/*.[1-9] \ + | sed -E 's/.*\.([1-9])$$/\1/' | uniq | sort); \ + for section in $$sections; do \ + mkdir -p $(DESTDIR)$(MANDIR)/man$$section; \ + for manpage in $(DOCS_DIR)/*.$$section; do \ + gzip < $$manpage \ + > $(DESTDIR)$(MANDIR)/man$$section/$$(basename $$manpage).gz; \ done; \ done - if test "$(DOC_INSTALL_DIR)"; then \ - cp $(DOCS_DIR)/rabbitmq.config.example $(DOC_INSTALL_DIR)/rabbitmq.config.example; \ - fi - -install_dirs: - @ OK=true && \ - { [ -n "$(TARGET_DIR)" ] || { echo "Please set TARGET_DIR."; OK=false; }; } && \ - { [ -n "$(SBIN_DIR)" ] || { echo "Please set SBIN_DIR."; OK=false; }; } && \ - { [ -n "$(MAN_DIR)" ] || { echo "Please set MAN_DIR."; OK=false; }; } && $$OK - - mkdir -p $(TARGET_DIR)/sbin - mkdir -p $(SBIN_DIR) - mkdir -p $(MAN_DIR) - if test "$(DOC_INSTALL_DIR)"; then \ - mkdir -p $(DOC_INSTALL_DIR); \ - fi -$(foreach XML,$(USAGES_XML),$(eval $(call usage_dep, $(XML)))) +install-windows: install-windows-erlapp install-windows-scripts install-windows-docs + +install-windows-erlapp: dist + $(verbose) mkdir -p $(DESTDIR)$(WINDOWS_PREFIX) + $(inst_verbose) cp -r include ebin plugins LICENSE* INSTALL \ + $(DESTDIR)$(WINDOWS_PREFIX) + $(verbose) echo "Put your EZs here and use rabbitmq-plugins.bat to enable them." \ + > $(DESTDIR)$(WINDOWS_PREFIX)/plugins/README.txt + $(verbose) $(UNIX_TO_DOS) $(DESTDIR)$(WINDOWS_PREFIX)/plugins/README.txt + +# rabbitmq-common provides headers too: copy them to +# rabbitmq_server/include. + $(verbose) cp -r $(DEPS_DIR)/rabbit_common/include $(DESTDIR)$(WINDOWS_PREFIX) + +install-windows-scripts: + $(verbose) mkdir -p $(DESTDIR)$(WINDOWS_PREFIX)/sbin + $(inst_verbose) for script in $(WINDOWS_SCRIPTS); do \ + cp "scripts/$$script" "$(DESTDIR)$(WINDOWS_PREFIX)/sbin"; \ + chmod 0755 "$(DESTDIR)$(WINDOWS_PREFIX)/sbin/$$script"; \ + done -# Note that all targets which depend on clean must have clean in their -# name. Also any target that doesn't depend on clean should not have -# clean in its name, unless you know that you don't need any of the -# automatic dependency generation for that target (e.g. cleandb). +install-windows-docs: install-windows-erlapp + $(verbose) mkdir -p $(DESTDIR)$(WINDOWS_PREFIX)/etc + $(inst_verbose) xmlto -o . xhtml-nochunks docs/rabbitmq-service.xml + $(verbose) elinks -dump -no-references -no-numbering rabbitmq-service.html \ + > $(DESTDIR)$(WINDOWS_PREFIX)/readme-service.txt + $(verbose) rm rabbitmq-service.html + $(verbose) cp docs/rabbitmq.config.example $(DESTDIR)$(WINDOWS_PREFIX)/etc + $(verbose) for file in $(DESTDIR)$(WINDOWS_PREFIX)/readme-service.txt \ + $(DESTDIR)$(WINDOWS_PREFIX)/LICENSE* $(DESTDIR)$(WINDOWS_PREFIX)/INSTALL \ + $(DESTDIR)$(WINDOWS_PREFIX)/etc/rabbitmq.config.example; do \ + $(UNIX_TO_DOS) "$$file"; \ + case "$$file" in \ + *.txt) ;; \ + *.example) ;; \ + *) mv "$$file" "$$file.txt" ;; \ + esac; \ + done -# We want to load the dep file if *any* target *doesn't* contain -# "clean" - i.e. if removing all clean-like targets leaves something. +# -------------------------------------------------------------------- +# Packaging. +# -------------------------------------------------------------------- -ifeq "$(MAKECMDGOALS)" "" -TESTABLEGOALS:=$(.DEFAULT_GOAL) -else -TESTABLEGOALS:=$(MAKECMDGOALS) -endif +.PHONY: packages package-deb \ + package-rpm package-rpm-fedora package-rpm-suse \ + package-windows package-standalone-macosx \ + package-generic-unix -ifneq "$(strip $(patsubst clean%,,$(patsubst %clean,,$(TESTABLEGOALS))))" "" -include $(DEPS_FILE) -endif +# This variable is exported so sub-make instances know where to find the +# archive. +PACKAGES_SOURCE_DIST_FILE ?= $(firstword $(SOURCE_DIST_FILES)) -.PHONY: run-qc +packages package-deb package-rpm package-rpm-fedora \ +package-rpm-suse package-windows package-standalone-macosx \ +package-generic-unix: $(PACKAGES_SOURCE_DIST_FILE) + $(verbose) $(MAKE) -C packaging $@ \ + SOURCE_DIST_FILE=$(abspath $(PACKAGES_SOURCE_DIST_FILE)) diff --git a/rabbitmq-server/README b/rabbitmq-server/README index 67e3a66..43bfe00 100644 --- a/rabbitmq-server/README +++ b/rabbitmq-server/README @@ -1 +1 @@ -Please see http://www.rabbitmq.com/build-server.html for build instructions. +See http://rabbitmq.com and https://github.com/rabbitmq/rabbitmq-server. diff --git a/rabbitmq-server/README.md b/rabbitmq-server/README.md new file mode 100644 index 0000000..d64ab34 --- /dev/null +++ b/rabbitmq-server/README.md @@ -0,0 +1,47 @@ +# RabbitMQ Server + +[RabbitMQ](http://rabbitmq.com) is a [feature rich](http://www.rabbitmq.com/features.html), multi-protocol messaging broker. It supports: + + * AMQP 0-9-1 + * STOMP 1.0 through 1.2 + * MQTT 3.1.1 + * AMQP 1.0 + + +## Installation + + * [Installation guides](http://www.rabbitmq.com/download.html) for various platforms + + +## Tutorials & Documentation + + * [RabbitMQ tutorials](http://www.rabbitmq.com/getstarted.html) + * [Documentation guides](http://www.rabbitmq.com/documentation.html) + * [Client libraries and tools](http://www.rabbitmq.com/devtools.html) + + +## Getting Help + + * [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users) + * `#rabbitmq` on Freenode + * [Commercial RabbitMQ support](http://www.rabbitmq.com/services.html) from [Pivotal](http://pivotal.io) + + +## Contributing + +See [CONTRIBUTING.md](./CONTRIBUTING.md) and our [development process overview](http://www.rabbitmq.com/github.html). + + +## License + +RabbitMQ server is [licensed under the MPL](LICENSE-MPL-RabbitMQ). + + +## Building From Source + +See [building RabbitMQ server from source](http://www.rabbitmq.com/build-server.html). + + +## Copyright + +(c) Pivotal Software Inc., 2007-2015. diff --git a/rabbitmq-server/build.config b/rabbitmq-server/build.config new file mode 100644 index 0000000..b143068 --- /dev/null +++ b/rabbitmq-server/build.config @@ -0,0 +1,43 @@ +# Do *not* comment or remove core modules +# unless you know what you are doing. +# +# Feel free to comment plugins out however. + +# Core modules. +core/core +index/* +core/index +core/deps + +# Plugins that must run before Erlang code gets compiled. +plugins/erlydtl +plugins/protobuffs + +# Core modules, continued. +core/erlc +core/docs +core/rel +core/test +core/compat + +# Plugins. +plugins/asciidoc +plugins/bootstrap +plugins/c_src +plugins/ci +plugins/ct +plugins/dialyzer +# plugins/edoc +plugins/elvis +plugins/escript +plugins/eunit +plugins/relx +plugins/shell +plugins/triq +plugins/xref + +# Plugins enhancing the functionality of other plugins. +plugins/cover + +# Core modules which can use variables from plugins. +core/deps-tools diff --git a/rabbitmq-server/calculate-relative b/rabbitmq-server/calculate-relative deleted file mode 100755 index 3af18e8..0000000 --- a/rabbitmq-server/calculate-relative +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python -# -# relpath.py -# R.Barran 30/08/2004 -# Retrieved from http://code.activestate.com/recipes/302594/ - -import os -import sys - -def relpath(target, base=os.curdir): - """ - Return a relative path to the target from either the current dir or an optional base dir. - Base can be a directory specified either as absolute or relative to current dir. - """ - - if not os.path.exists(target): - raise OSError, 'Target does not exist: '+target - - if not os.path.isdir(base): - raise OSError, 'Base is not a directory or does not exist: '+base - - base_list = (os.path.abspath(base)).split(os.sep) - target_list = (os.path.abspath(target)).split(os.sep) - - # On the windows platform the target may be on a completely different drive from the base. - if os.name in ['nt','dos','os2'] and base_list[0] <> target_list[0]: - raise OSError, 'Target is on a different drive to base. Target: '+target_list[0].upper()+', base: '+base_list[0].upper() - - # Starting from the filepath root, work out how much of the filepath is - # shared by base and target. - for i in range(min(len(base_list), len(target_list))): - if base_list[i] <> target_list[i]: break - else: - # If we broke out of the loop, i is pointing to the first differing path elements. - # If we didn't break out of the loop, i is pointing to identical path elements. - # Increment i so that in all cases it points to the first differing path elements. - i+=1 - - rel_list = [os.pardir] * (len(base_list)-i) + target_list[i:] - if (len(rel_list) == 0): - return "." - return os.path.join(*rel_list) - -if __name__ == "__main__": - print(relpath(sys.argv[1], sys.argv[2])) diff --git a/rabbitmq-server/check_xref b/rabbitmq-server/check_xref new file mode 100755 index 0000000..78f932d --- /dev/null +++ b/rabbitmq-server/check_xref @@ -0,0 +1,291 @@ +#!/usr/bin/env escript +%% -*- erlang -*- +-mode(compile). + +%% The contents of this file are subject to the Mozilla Public License +%% Version 1.1 (the "License"); you may not use this file except in +%% compliance with the License. You may obtain a copy of the License +%% at http://www.mozilla.org/MPL/ +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and +%% limitations under the License. +%% +%% The Original Code is RabbitMQ. +%% +%% The Initial Developer of the Original Code is Pivotal Software, Inc. +%% Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. +%% + +main(["-h"]) -> + io:format("usage: check_xref PluginDirectory (options)~n" + "options:~n" + " -q - quiet mode (only prints errors)~n" + " -X - disables all filters~n"); +main([PluginsDir|Argv]) -> + put({?MODULE, quiet}, lists:member("-q", Argv)), + put({?MODULE, no_filters}, lists:member("-X", Argv)), + + {ok, Cwd} = file:get_cwd(), + code:add_pathz(filename:join(Cwd, "ebin")), + LibDir = filename:join(Cwd, "lib"), + case filelib:is_dir(LibDir) of + false -> ok; + true -> os:cmd("rm -rf " ++ LibDir) + end, + Rc = try + check(Cwd, PluginsDir, LibDir, checks()) + catch + _:Err -> + io:format(user, "failed: ~p~n", [Err]), + 1 + end, + shutdown(Rc, LibDir). + +shutdown(Rc, LibDir) -> + os:cmd("rm -rf " ++ LibDir), + erlang:halt(Rc). + +check(Cwd, PluginsDir, LibDir, Checks) -> + {ok, Plugins} = file:list_dir(PluginsDir), + ok = file:make_dir(LibDir), + put({?MODULE, third_party}, []), + [begin + Source = filename:join(PluginsDir, Plugin), + Target = filename:join(LibDir, Plugin), + IsExternal = external_dependency(Plugin), + AppN = case IsExternal of + true -> filename:join(LibDir, unmangle_name(Plugin)); + false -> filename:join( + LibDir, filename:basename(Plugin, ".ez")) + end, + + report(info, "mkdir -p ~s~n", [Target]), + filelib:ensure_dir(Target), + + report(info, "cp ~s ~s~n", [Source, Target]), + {ok, _} = file:copy(Source, Target), + + report(info, "unzip -d ~s ~s~n", [LibDir, Target]), + {ok, _} = zip:unzip(Target, [{cwd, LibDir}]), + + UnpackDir = filename:join(LibDir, filename:basename(Target, ".ez")), + report(info, "mv ~s ~s~n", [UnpackDir, AppN]), + ok = file:rename(UnpackDir, AppN), + + code:add_patha(filename:join(AppN, "ebin")), + case IsExternal of + true -> App = list_to_atom(hd(string:tokens(filename:basename(AppN), + "-"))), + report(info, "loading ~p~n", [App]), + application:load(App), + store_third_party(App); + _ -> ok + end + end || Plugin <- Plugins, + lists:suffix(".ez", Plugin)], + + RabbitAppEbin = filename:join([LibDir, "rabbit", "ebin"]), + filelib:ensure_dir(filename:join(RabbitAppEbin, "foo")), + {ok, Beams} = file:list_dir("ebin"), + [{ok, _} = file:copy(filename:join("ebin", Beam), + filename:join(RabbitAppEbin, Beam)) || Beam <- Beams], + xref:start(?MODULE), + xref:set_default(?MODULE, [{verbose, false}, {warnings, false}]), + xref:set_library_path(?MODULE, code:get_path()), + xref:add_release(?MODULE, Cwd, {name, rabbit}), + store_unresolved_calls(), + Results = lists:flatten([perform_analysis(Q) || Q <- Checks]), + report(Results). + +%% +%% Analysis +%% + +perform_analysis({Query, Description, Severity}) -> + perform_analysis({Query, Description, Severity, fun(_) -> false end}); +perform_analysis({Query, Description, Severity, Filter}) -> + report_progress("Checking whether any code ~s " + "(~s)~n", [Description, Query]), + case analyse(Query) of + {ok, Analysis} -> + [filter(Result, Filter) || + Result <- process_analysis(Query, Description, + Severity, Analysis)]; + {error, Module, Reason} -> + {analysis_error, {Module, Reason}} + end. + +partition(Results) -> + lists:partition(fun({{_, L}, _}) -> L =:= error end, Results). + +analyse(Query) when is_atom(Query) -> + xref:analyse(?MODULE, Query, [{verbose, false}]); +analyse(Query) when is_list(Query) -> + xref:q(?MODULE, Query). + +process_analysis(Query, Tag, Severity, Analysis) when is_atom(Query) -> + [{{Tag, Severity}, MFA} || MFA <- Analysis]; +process_analysis(Query, Tag, Severity, Analysis) when is_list(Query) -> + [{{Tag, Severity}, Result} || Result <- Analysis]. + +checks() -> + [{"(XXL)(Lin) ((XC - UC) || (XU - X - B))", + "has call to undefined function(s)", + error, filters()}, + {"(Lin) (L - LU)", + "has unused local function(s)", + error, filters()}, + {"(E | \"(rabbit|amqp).*\":_/_ || \"gen_server2?\":call/2)", + "has 5 sec timeout in", + error, filters()}, + {"(Lin) (LU * (X - XU))", + "has exported function(s) only used locally", + warning, filters()}, + {"(Lin) (DF * (XU + LU))", "used deprecated function(s)", + warning, filters()}]. +%% {"(Lin) (X - XU)", "possibly unused export", +%% warning, fun filter_unused/1}]. + +%% +%% noise filters (can be disabled with -X) - strip uninteresting analyses +%% + +filter(Result, Filter) -> + case Filter(Result) of + false -> Result; + true -> [] %% NB: this gets flattened out later on.... + end. + +filters() -> + case get({?MODULE, no_filters}) of + true -> fun(_) -> false end; + _ -> filter_chain([fun is_unresolved_call/1, fun is_callback/1, + fun is_unused/1, fun is_irrelevant/1]) + end. + +filter_chain(FnChain) -> + fun(AnalysisResult) -> + Result = cleanup(AnalysisResult), + lists:foldl(fun(F, false) -> F(Result); + (_F, true) -> true + end, false, FnChain) + end. + +cleanup({{_, _},{{{{_,_,_}=MFA1,_},{{_,_,_}=MFA2,_}},_}}) -> {MFA1, MFA2}; +cleanup({{_, _},{{{_,_,_}=MFA1,_},{{_,_,_}=MFA2,_}}}) -> {MFA1, MFA2}; +cleanup({{_, _},{{_,_,_}=MFA1,{_,_,_}=MFA2},_}) -> {MFA1, MFA2}; +cleanup({{_, _},{{_,_,_}=MFA1,{_,_,_}=MFA2}}) -> {MFA1, MFA2}; +cleanup({{_, _}, {_,_,_}=MFA}) -> MFA; +cleanup({{_, _}, {{_,_,_}=MFA,_}}) -> MFA; +cleanup({{_,_,_}=MFA, {_,_,_}}) -> MFA; +cleanup({{_,_,_}=MFA, {_,_,_},_}) -> MFA; +cleanup(Other) -> Other. + +is_irrelevant({{M,_,_}, {_,_,_}}) -> + is_irrelevant(M); +is_irrelevant({M,_,_}) -> + is_irrelevant(M); +is_irrelevant(Mod) when is_atom(Mod) -> + lists:member(Mod, get({?MODULE, third_party})). + +is_unused({{_,_,_}=MFA, {_,_,_}}) -> + is_unused(MFA); +is_unused({M,_F,_A}) -> + lists:suffix("_tests", atom_to_list(M)); +is_unused(_) -> + false. + +is_unresolved_call({_, F, A}) -> + UC = get({?MODULE, unresolved_calls}), + sets:is_element({'$M_EXPR', F, A}, UC); +is_unresolved_call(_) -> + false. + +%% TODO: cache this.... +is_callback({M,_,_}=MFA) -> + Attributes = M:module_info(attributes), + Behaviours = proplists:append_values(behaviour, Attributes), + {_, Callbacks} = lists:foldl(fun acc_behaviours/2, {M, []}, Behaviours), + lists:member(MFA, Callbacks); +is_callback(_) -> + false. + +acc_behaviours(B, {M, CB}=Acc) -> + case catch(B:behaviour_info(callbacks)) of + [{_,_} | _] = Callbacks -> + {M, CB ++ [{M, F, A} || {F,A} <- Callbacks]}; + _ -> + Acc + end. + +%% +%% reporting/output +%% + +report(Results) -> + [report_failures(F) || F <- Results], + {Errors, Warnings} = partition(Results), + report(info, "Completed: ~p errors, ~p warnings~n", + [length(Errors), length(Warnings)]), + case length(Errors) > 0 of + true -> 1; + false -> 0 + end. + +report_failures({analysis_error, {Mod, Reason}}) -> + report(error, "~s:0 Analysis Error: ~p~n", [source_file(Mod), Reason]); +report_failures({{Tag, Level}, {{{{M,_,_},L},{{M2,F2,A2},_}},_}}) -> + report(Level, "~s:~w ~s ~p:~p/~p~n", + [source_file(M), L, Tag, M2, F2, A2]); +report_failures({{Tag, Level}, {{M,F,A},L}}) -> + report(Level, "~s:~w ~s ~p:~p/~p~n", [source_file(M), L, Tag, M, F, A]); +report_failures({{Tag, Level}, {M,F,A}}) -> + report(Level, "~s:unknown ~s ~p:~p/~p~n", [source_file(M), Tag, M, F, A]); +report_failures(Term) -> + report(error, "Ignoring ~p~n", [Term]), + ok. + +report_progress(Fmt, Args) -> + report(info, Fmt, Args). + +report(Level, Fmt, Args) -> + case {get({?MODULE, quiet}), Level} of + {true, error} -> do_report(lookup_prefix(Level), Fmt, Args); + {false, _} -> do_report(lookup_prefix(Level), Fmt, Args); + _ -> ok + end. + +do_report(Prefix, Fmt, Args) -> + io:format(Prefix ++ Fmt, Args). + +lookup_prefix(error) -> "ERROR: "; +lookup_prefix(warning) -> "WARNING: "; +lookup_prefix(info) -> "INFO: ". + +source_file(M) -> + proplists:get_value(source, M:module_info(compile)). + +%% +%% setup/code-path/file-system ops +%% + +store_third_party(App) -> + {ok, AppConfig} = application:get_all_key(App), + AppModules = proplists:get_value(modules, AppConfig), + put({?MODULE, third_party}, AppModules ++ get({?MODULE, third_party})). + +%% TODO: this ought not to be maintained in such a fashion +external_dependency(Path) -> + lists:any(fun(P) -> lists:prefix(P, Path) end, + ["mochiweb", "webmachine", "rfc4627", "eldap"]). + +unmangle_name(Path) -> + [Name, Vsn | _] = re:split(Path, "-", [{return, list}]), + string:join([Name, Vsn], "-"). + +store_unresolved_calls() -> + {ok, UCFull} = analyse("UC"), + UC = [MFA || {_, {_,_,_} = MFA} <- UCFull], + put({?MODULE, unresolved_calls}, sets:from_list(UC)). diff --git a/rabbitmq-server/codegen/license_info b/rabbitmq-server/codegen/license_info deleted file mode 100644 index 1cebe90..0000000 --- a/rabbitmq-server/codegen/license_info +++ /dev/null @@ -1,4 +0,0 @@ -The files amqp-rabbitmq-0.8.json and amqp-rabbitmq-0.9.1.json are -"Copyright (C) 2008-2013 GoPivotal", Inc. and are covered by the MIT -license. - diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/CONTRIBUTING.md b/rabbitmq-server/deps/amqp_client/CONTRIBUTING.md similarity index 100% rename from rabbitmq-server/plugins-src/cowboy-wrapper/CONTRIBUTING.md rename to rabbitmq-server/deps/amqp_client/CONTRIBUTING.md diff --git a/rabbitmq-server/deps/amqp_client/Makefile b/rabbitmq-server/deps/amqp_client/Makefile new file mode 100644 index 0000000..f18807e --- /dev/null +++ b/rabbitmq-server/deps/amqp_client/Makefile @@ -0,0 +1,167 @@ +PROJECT = amqp_client +VERSION ?= $(call get_app_version,src/$(PROJECT).app.src) +ifeq ($(VERSION),) +VERSION = 0.0.0 +endif + +# Release artifacts are put in $(PACKAGES_DIR). +PACKAGES_DIR ?= $(abspath PACKAGES) + +TEST_DEPS = rabbit + +DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk + +# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be +# reviewed and merged. + +ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git +ERLANG_MK_COMMIT = rabbitmq-tmp + +include rabbitmq-components.mk +include erlang.mk + +# -------------------------------------------------------------------- +# Tests. +# -------------------------------------------------------------------- + +include test.mk + +tests:: all_tests + +# -------------------------------------------------------------------- +# Distribution. +# -------------------------------------------------------------------- + +.PHONY: distribution + +distribution: docs source-dist package + +edoc: doc/overview.edoc + +doc/overview.edoc: src/overview.edoc.in + mkdir -p doc + sed -e 's:%%VERSION%%:$(VERSION):g' < $< > $@ + +.PHONY: source-dist clean-source-dist + +SOURCE_DIST_BASE ?= $(PROJECT) +SOURCE_DIST_SUFFIXES ?= tar.xz zip +SOURCE_DIST ?= $(PACKAGES_DIR)/$(SOURCE_DIST_BASE)-$(VERSION)-src + +# The first source distribution file is used by packages: if the archive +# type changes, you must update all packages' Makefile. +SOURCE_DIST_FILES = $(addprefix $(SOURCE_DIST).,$(SOURCE_DIST_SUFFIXES)) + +.PHONY: $(SOURCE_DIST_FILES) + +source-dist: $(SOURCE_DIST_FILES) + @: + +RSYNC ?= rsync +RSYNC_V_0 = +RSYNC_V_1 = -v +RSYNC_V_2 = -v +RSYNC_V = $(RSYNC_V_$(V)) +RSYNC_FLAGS += -a $(RSYNC_V) \ + --exclude '.sw?' --exclude '.*.sw?' \ + --exclude '*.beam' \ + --exclude '*.pyc' \ + --exclude '.git*' \ + --exclude '.hg*' \ + --exclude '.travis.yml' \ + --exclude '$(notdir $(ERLANG_MK_TMP))' \ + --exclude 'ebin' \ + --exclude 'erl_crash.dump' \ + --exclude 'deps/' \ + --exclude '$(notdir $(DEPS_DIR))/' \ + --exclude 'doc/' \ + --exclude 'plugins/' \ + --exclude '$(notdir $(DIST_DIR))/' \ + --exclude '/$(notdir $(PACKAGES_DIR))/' \ + --delete \ + --delete-excluded + +TAR ?= tar +TAR_V_0 = +TAR_V_1 = -v +TAR_V_2 = -v +TAR_V = $(TAR_V_$(V)) + +GZIP ?= gzip +BZIP2 ?= bzip2 +XZ ?= xz + +ZIP ?= zip +ZIP_V_0 = -q +ZIP_V_1 = +ZIP_V_2 = +ZIP_V = $(ZIP_V_$(V)) + +.PHONY: $(SOURCE_DIST) + +$(SOURCE_DIST): $(ERLANG_MK_RECURSIVE_DEPS_LIST) + $(verbose) mkdir -p $(dir $@) + $(gen_verbose) $(RSYNC) $(RSYNC_FLAGS) ./ $@/ + $(verbose) sed -E -i.bak \ + -e 's/[{]vsn[[:blank:]]*,[^}]+}/{vsn, "$(VERSION)"}/' \ + $@/src/$(PROJECT).app.src && \ + rm $@/src/$(PROJECT).app.src.bak + $(verbose) for dep in $$(cat $(ERLANG_MK_RECURSIVE_DEPS_LIST) | grep -v '/$(PROJECT)$$' | LC_COLLATE=C sort); do \ + $(RSYNC) $(RSYNC_FLAGS) \ + $$dep \ + $@/deps; \ + if test -f $@/deps/$$(basename $$dep)/erlang.mk && \ + test "$$(wc -l $@/deps/$$(basename $$dep)/erlang.mk | awk '{print $$1;}')" = "1" && \ + grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" $@/deps/$$(basename $$dep)/erlang.mk; then \ + echo "include ../../erlang.mk" > $@/deps/$$(basename $$dep)/erlang.mk; \ + fi; \ + sed -E -i.bak "s|^[[:blank:]]*include[[:blank:]]+\.\./.*erlang.mk$$|include ../../erlang.mk|" \ + $@/deps/$$(basename $$dep)/Makefile && \ + rm $@/deps/$$(basename $$dep)/Makefile.bak; \ + done + $(verbose) for file in $$(find $@ -name '*.app.src'); do \ + sed -E -i.bak -e 's/[{]vsn[[:blank:]]*,[[:blank:]]*""[[:blank:]]*}/{vsn, "$(VERSION)"}/' $$file; \ + rm $$file.bak; \ + done + $(verbose) echo "$(PROJECT) $$(git rev-parse HEAD) $$(git describe --tags --exact-match 2>/dev/null || git symbolic-ref -q --short HEAD)" > $@/git-revisions.txt + $(verbose) for dep in $$(cat $(ERLANG_MK_RECURSIVE_DEPS_LIST)); do \ + (cd $$dep; echo "$$(basename "$$dep") $$(git rev-parse HEAD) $$(git describe --tags --exact-match 2>/dev/null || git symbolic-ref -q --short HEAD)") >> $@/git-revisions.txt; \ + done + $(verbose) rm $@/README.in + $(verbose) cp README.in $@/README + $(verbose) cat "$(BUILD_DOC)" >> $@/README + +# TODO: Fix file timestamps to have reproducible source archives. +# $(verbose) find $@ -not -name 'git-revisions.txt' -print0 | xargs -0 touch -r $@/git-revisions.txt + +$(SOURCE_DIST).tar.gz: $(SOURCE_DIST) + $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \ + find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \ + xargs -0 $(TAR) $(TAR_V) --no-recursion -cf - | \ + $(GZIP) --best > $@ + +$(SOURCE_DIST).tar.bz2: $(SOURCE_DIST) + $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \ + find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \ + xargs -0 $(TAR) $(TAR_V) --no-recursion -cf - | \ + $(BZIP2) > $@ + +$(SOURCE_DIST).tar.xz: $(SOURCE_DIST) + $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \ + find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \ + xargs -0 $(TAR) $(TAR_V) --no-recursion -cf - | \ + $(XZ) > $@ + +$(SOURCE_DIST).zip: $(SOURCE_DIST) + $(verbose) rm -f $@ + $(gen_verbose) cd $(dir $(SOURCE_DIST)) && \ + find $(notdir $(SOURCE_DIST)) -print0 | LC_COLLATE=C sort -z | \ + xargs -0 $(ZIP) $(ZIP_V) $@ + +clean:: clean-source-dist + +clean-source-dist: + $(gen_verbose) rm -rf -- $(SOURCE_DIST_BASE)-* + +package: dist + cp $(DIST_DIR)/*.ez $(PACKAGES_DIR) diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/README.in b/rabbitmq-server/deps/amqp_client/README.in similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/README.in rename to rabbitmq-server/deps/amqp_client/README.in diff --git a/rabbitmq-server/deps/amqp_client/build.config b/rabbitmq-server/deps/amqp_client/build.config new file mode 100644 index 0000000..8f32456 --- /dev/null +++ b/rabbitmq-server/deps/amqp_client/build.config @@ -0,0 +1,43 @@ +# Do *not* comment or remove core modules +# unless you know what you are doing. +# +# Feel free to comment plugins out however. + +# Core modules. +core/core +index/* +core/index +core/deps + +# Plugins that must run before Erlang code gets compiled. +plugins/erlydtl +plugins/protobuffs + +# Core modules, continued. +core/erlc +core/docs +core/rel +core/test +core/compat + +# Plugins. +plugins/asciidoc +plugins/bootstrap +plugins/c_src +plugins/ci +# plugins/ct +plugins/dialyzer +plugins/edoc +plugins/elvis +plugins/escript +# plugins/eunit +plugins/relx +plugins/shell +plugins/triq +plugins/xref + +# Plugins enhancing the functionality of other plugins. +plugins/cover + +# Core modules which can use variables from plugins. +core/deps-tools diff --git a/rabbitmq-server/deps/amqp_client/erlang.mk b/rabbitmq-server/deps/amqp_client/erlang.mk new file mode 100644 index 0000000..de8be5b --- /dev/null +++ b/rabbitmq-server/deps/amqp_client/erlang.mk @@ -0,0 +1,6533 @@ +# Copyright (c) 2013-2015, Loïc Hoguin +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +.PHONY: all app deps search rel docs install-docs check tests clean distclean help erlang-mk + +ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) + +ERLANG_MK_VERSION = 2.0.0-pre.2-16-gb52203c-dirty + +# Core configuration. + +PROJECT ?= $(notdir $(CURDIR)) +PROJECT := $(strip $(PROJECT)) + +PROJECT_VERSION ?= rolling + +# Verbosity. + +V ?= 0 + +verbose_0 = @ +verbose_2 = set -x; +verbose = $(verbose_$(V)) + +gen_verbose_0 = @echo " GEN " $@; +gen_verbose_2 = set -x; +gen_verbose = $(gen_verbose_$(V)) + +# Temporary files directory. + +ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk +export ERLANG_MK_TMP + +# "erl" command. + +ERL = erl +A0 -noinput -boot start_clean + +# Platform detection. + +ifeq ($(PLATFORM),) +UNAME_S := $(shell uname -s) + +ifeq ($(UNAME_S),Linux) +PLATFORM = linux +else ifeq ($(UNAME_S),Darwin) +PLATFORM = darwin +else ifeq ($(UNAME_S),SunOS) +PLATFORM = solaris +else ifeq ($(UNAME_S),GNU) +PLATFORM = gnu +else ifeq ($(UNAME_S),FreeBSD) +PLATFORM = freebsd +else ifeq ($(UNAME_S),NetBSD) +PLATFORM = netbsd +else ifeq ($(UNAME_S),OpenBSD) +PLATFORM = openbsd +else ifeq ($(UNAME_S),DragonFly) +PLATFORM = dragonfly +else ifeq ($(shell uname -o),Msys) +PLATFORM = msys2 +else +$(error Unable to detect platform. Please open a ticket with the output of uname -a.) +endif + +export PLATFORM +endif + +# Core targets. + +all:: deps app rel + +# Noop to avoid a Make warning when there's nothing to do. +rel:: + $(verbose) : + +check:: clean app tests + +clean:: clean-crashdump + +clean-crashdump: +ifneq ($(wildcard erl_crash.dump),) + $(gen_verbose) rm -f erl_crash.dump +endif + +distclean:: clean distclean-tmp + +distclean-tmp: + $(gen_verbose) rm -rf $(ERLANG_MK_TMP) + +help:: + $(verbose) printf "%s\n" \ + "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \ + "Copyright (c) 2013-2015 Loïc Hoguin " \ + "" \ + "Usage: [V=1] $(MAKE) [target]..." \ + "" \ + "Core targets:" \ + " all Run deps, app and rel targets in that order" \ + " app Compile the project" \ + " deps Fetch dependencies (if needed) and compile them" \ + " fetch-deps Fetch dependencies (if needed) without compiling them" \ + " list-deps Fetch dependencies (if needed) and list them" \ + " search q=... Search for a package in the built-in index" \ + " rel Build a release for this project, if applicable" \ + " docs Build the documentation for this project" \ + " install-docs Install the man pages for this project" \ + " check Compile and run all tests and analysis for this project" \ + " tests Run the tests for this project" \ + " clean Delete temporary and output files from most targets" \ + " distclean Delete all temporary and output files" \ + " help Display this help and exit" \ + " erlang-mk Update erlang.mk to the latest version" + +# Core functions. + +empty := +space := $(empty) $(empty) +tab := $(empty) $(empty) +comma := , + +define newline + + +endef + +define comma_list +$(subst $(space),$(comma),$(strip $(1))) +endef + +# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. +define erlang +$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk +endef + +ifeq ($(PLATFORM),msys2) +core_native_path = $(subst \,\\\\,$(shell cygpath -w $1)) +else +core_native_path = $1 +endif + +ifeq ($(shell which wget 2>/dev/null | wc -l), 1) +define core_http_get + wget --no-check-certificate -O $(1) $(2)|| rm $(1) +endef +else +define core_http_get.erl + ssl:start(), + inets:start(), + case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of + {ok, {{_, 200, _}, _, Body}} -> + case file:write_file("$(1)", Body) of + ok -> ok; + {error, R1} -> halt(R1) + end; + {error, R2} -> + halt(R2) + end, + halt(0). +endef + +define core_http_get + $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2)) +endef +endif + +core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1))) + +core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2))) + +core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1))))))))))))))))))))))))))) + +core_ls = $(filter-out $(1),$(shell echo $(1))) + +# @todo Use a solution that does not require using perl. +core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2) + +# Automated update. + +ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk +ERLANG_MK_COMMIT ?= +ERLANG_MK_BUILD_CONFIG ?= build.config +ERLANG_MK_BUILD_DIR ?= .erlang.mk.build + +erlang-mk: + git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) +ifdef ERLANG_MK_COMMIT + cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) +endif + if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi + $(MAKE) -C $(ERLANG_MK_BUILD_DIR) + cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk + rm -rf $(ERLANG_MK_BUILD_DIR) + +# The erlang.mk package index is bundled in the default erlang.mk build. +# Search for the string "copyright" to skip to the rest of the code. + +PACKAGES += aberth +pkg_aberth_name = aberth +pkg_aberth_description = Generic BERT-RPC server in Erlang +pkg_aberth_homepage = https://github.com/a13x/aberth +pkg_aberth_fetch = git +pkg_aberth_repo = https://github.com/a13x/aberth +pkg_aberth_commit = master + +PACKAGES += active +pkg_active_name = active +pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running +pkg_active_homepage = https://github.com/proger/active +pkg_active_fetch = git +pkg_active_repo = https://github.com/proger/active +pkg_active_commit = master + +PACKAGES += actordb_core +pkg_actordb_core_name = actordb_core +pkg_actordb_core_description = ActorDB main source +pkg_actordb_core_homepage = http://www.actordb.com/ +pkg_actordb_core_fetch = git +pkg_actordb_core_repo = https://github.com/biokoda/actordb_core +pkg_actordb_core_commit = master + +PACKAGES += actordb_thrift +pkg_actordb_thrift_name = actordb_thrift +pkg_actordb_thrift_description = Thrift API for ActorDB +pkg_actordb_thrift_homepage = http://www.actordb.com/ +pkg_actordb_thrift_fetch = git +pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift +pkg_actordb_thrift_commit = master + +PACKAGES += aleppo +pkg_aleppo_name = aleppo +pkg_aleppo_description = Alternative Erlang Pre-Processor +pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo +pkg_aleppo_fetch = git +pkg_aleppo_repo = https://github.com/ErlyORM/aleppo +pkg_aleppo_commit = master + +PACKAGES += alog +pkg_alog_name = alog +pkg_alog_description = Simply the best logging framework for Erlang +pkg_alog_homepage = https://github.com/siberian-fast-food/alogger +pkg_alog_fetch = git +pkg_alog_repo = https://github.com/siberian-fast-food/alogger +pkg_alog_commit = master + +PACKAGES += amqp_client +pkg_amqp_client_name = amqp_client +pkg_amqp_client_description = RabbitMQ Erlang AMQP client +pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html +pkg_amqp_client_fetch = git +pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git +pkg_amqp_client_commit = master + +PACKAGES += annotations +pkg_annotations_name = annotations +pkg_annotations_description = Simple code instrumentation utilities +pkg_annotations_homepage = https://github.com/hyperthunk/annotations +pkg_annotations_fetch = git +pkg_annotations_repo = https://github.com/hyperthunk/annotations +pkg_annotations_commit = master + +PACKAGES += antidote +pkg_antidote_name = antidote +pkg_antidote_description = Large-scale computation without synchronisation +pkg_antidote_homepage = https://syncfree.lip6.fr/ +pkg_antidote_fetch = git +pkg_antidote_repo = https://github.com/SyncFree/antidote +pkg_antidote_commit = master + +PACKAGES += apns +pkg_apns_name = apns +pkg_apns_description = Apple Push Notification Server for Erlang +pkg_apns_homepage = http://inaka.github.com/apns4erl +pkg_apns_fetch = git +pkg_apns_repo = https://github.com/inaka/apns4erl +pkg_apns_commit = 1.0.4 + +PACKAGES += azdht +pkg_azdht_name = azdht +pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang +pkg_azdht_homepage = https://github.com/arcusfelis/azdht +pkg_azdht_fetch = git +pkg_azdht_repo = https://github.com/arcusfelis/azdht +pkg_azdht_commit = master + +PACKAGES += backoff +pkg_backoff_name = backoff +pkg_backoff_description = Simple exponential backoffs in Erlang +pkg_backoff_homepage = https://github.com/ferd/backoff +pkg_backoff_fetch = git +pkg_backoff_repo = https://github.com/ferd/backoff +pkg_backoff_commit = master + +PACKAGES += barrel_tcp +pkg_barrel_tcp_name = barrel_tcp +pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang. +pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_fetch = git +pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_commit = master + +PACKAGES += basho_bench +pkg_basho_bench_name = basho_bench +pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for. +pkg_basho_bench_homepage = https://github.com/basho/basho_bench +pkg_basho_bench_fetch = git +pkg_basho_bench_repo = https://github.com/basho/basho_bench +pkg_basho_bench_commit = master + +PACKAGES += bcrypt +pkg_bcrypt_name = bcrypt +pkg_bcrypt_description = Bcrypt Erlang / C library +pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt +pkg_bcrypt_fetch = git +pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt +pkg_bcrypt_commit = master + +PACKAGES += beam +pkg_beam_name = beam +pkg_beam_description = BEAM emulator written in Erlang +pkg_beam_homepage = https://github.com/tonyrog/beam +pkg_beam_fetch = git +pkg_beam_repo = https://github.com/tonyrog/beam +pkg_beam_commit = master + +PACKAGES += beanstalk +pkg_beanstalk_name = beanstalk +pkg_beanstalk_description = An Erlang client for beanstalkd +pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_fetch = git +pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_commit = master + +PACKAGES += bear +pkg_bear_name = bear +pkg_bear_description = a set of statistics functions for erlang +pkg_bear_homepage = https://github.com/boundary/bear +pkg_bear_fetch = git +pkg_bear_repo = https://github.com/boundary/bear +pkg_bear_commit = master + +PACKAGES += bertconf +pkg_bertconf_name = bertconf +pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded +pkg_bertconf_homepage = https://github.com/ferd/bertconf +pkg_bertconf_fetch = git +pkg_bertconf_repo = https://github.com/ferd/bertconf +pkg_bertconf_commit = master + +PACKAGES += bifrost +pkg_bifrost_name = bifrost +pkg_bifrost_description = Erlang FTP Server Framework +pkg_bifrost_homepage = https://github.com/thorstadt/bifrost +pkg_bifrost_fetch = git +pkg_bifrost_repo = https://github.com/thorstadt/bifrost +pkg_bifrost_commit = master + +PACKAGES += binpp +pkg_binpp_name = binpp +pkg_binpp_description = Erlang Binary Pretty Printer +pkg_binpp_homepage = https://github.com/jtendo/binpp +pkg_binpp_fetch = git +pkg_binpp_repo = https://github.com/jtendo/binpp +pkg_binpp_commit = master + +PACKAGES += bisect +pkg_bisect_name = bisect +pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang +pkg_bisect_homepage = https://github.com/knutin/bisect +pkg_bisect_fetch = git +pkg_bisect_repo = https://github.com/knutin/bisect +pkg_bisect_commit = master + +PACKAGES += bitcask +pkg_bitcask_name = bitcask +pkg_bitcask_description = because you need another a key/value storage engine +pkg_bitcask_homepage = https://github.com/basho/bitcask +pkg_bitcask_fetch = git +pkg_bitcask_repo = https://github.com/basho/bitcask +pkg_bitcask_commit = master + +PACKAGES += bitstore +pkg_bitstore_name = bitstore +pkg_bitstore_description = A document based ontology development environment +pkg_bitstore_homepage = https://github.com/bdionne/bitstore +pkg_bitstore_fetch = git +pkg_bitstore_repo = https://github.com/bdionne/bitstore +pkg_bitstore_commit = master + +PACKAGES += bootstrap +pkg_bootstrap_name = bootstrap +pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application. +pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap +pkg_bootstrap_fetch = git +pkg_bootstrap_repo = https://github.com/schlagert/bootstrap +pkg_bootstrap_commit = master + +PACKAGES += boss +pkg_boss_name = boss +pkg_boss_description = Erlang web MVC, now featuring Comet +pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_fetch = git +pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_commit = master + +PACKAGES += boss_db +pkg_boss_db_name = boss_db +pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang +pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db +pkg_boss_db_fetch = git +pkg_boss_db_repo = https://github.com/ErlyORM/boss_db +pkg_boss_db_commit = master + +PACKAGES += bson +pkg_bson_name = bson +pkg_bson_description = BSON documents in Erlang, see bsonspec.org +pkg_bson_homepage = https://github.com/comtihon/bson-erlang +pkg_bson_fetch = git +pkg_bson_repo = https://github.com/comtihon/bson-erlang +pkg_bson_commit = master + +PACKAGES += bullet +pkg_bullet_name = bullet +pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy. +pkg_bullet_homepage = http://ninenines.eu +pkg_bullet_fetch = git +pkg_bullet_repo = https://github.com/ninenines/bullet +pkg_bullet_commit = master + +PACKAGES += cache +pkg_cache_name = cache +pkg_cache_description = Erlang in-memory cache +pkg_cache_homepage = https://github.com/fogfish/cache +pkg_cache_fetch = git +pkg_cache_repo = https://github.com/fogfish/cache +pkg_cache_commit = master + +PACKAGES += cake +pkg_cake_name = cake +pkg_cake_description = Really simple terminal colorization +pkg_cake_homepage = https://github.com/darach/cake-erl +pkg_cake_fetch = git +pkg_cake_repo = https://github.com/darach/cake-erl +pkg_cake_commit = v0.1.2 + +PACKAGES += carotene +pkg_carotene_name = carotene +pkg_carotene_description = Real-time server +pkg_carotene_homepage = https://github.com/carotene/carotene +pkg_carotene_fetch = git +pkg_carotene_repo = https://github.com/carotene/carotene +pkg_carotene_commit = master + +PACKAGES += cberl +pkg_cberl_name = cberl +pkg_cberl_description = NIF based Erlang bindings for Couchbase +pkg_cberl_homepage = https://github.com/chitika/cberl +pkg_cberl_fetch = git +pkg_cberl_repo = https://github.com/chitika/cberl +pkg_cberl_commit = master + +PACKAGES += cecho +pkg_cecho_name = cecho +pkg_cecho_description = An ncurses library for Erlang +pkg_cecho_homepage = https://github.com/mazenharake/cecho +pkg_cecho_fetch = git +pkg_cecho_repo = https://github.com/mazenharake/cecho +pkg_cecho_commit = master + +PACKAGES += cferl +pkg_cferl_name = cferl +pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client +pkg_cferl_homepage = https://github.com/ddossot/cferl +pkg_cferl_fetch = git +pkg_cferl_repo = https://github.com/ddossot/cferl +pkg_cferl_commit = master + +PACKAGES += chaos_monkey +pkg_chaos_monkey_name = chaos_monkey +pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes. +pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_fetch = git +pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_commit = master + +PACKAGES += check_node +pkg_check_node_name = check_node +pkg_check_node_description = Nagios Scripts for monitoring Riak +pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios +pkg_check_node_fetch = git +pkg_check_node_repo = https://github.com/basho-labs/riak_nagios +pkg_check_node_commit = master + +PACKAGES += chronos +pkg_chronos_name = chronos +pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests. +pkg_chronos_homepage = https://github.com/lehoff/chronos +pkg_chronos_fetch = git +pkg_chronos_repo = https://github.com/lehoff/chronos +pkg_chronos_commit = master + +PACKAGES += cl +pkg_cl_name = cl +pkg_cl_description = OpenCL binding for Erlang +pkg_cl_homepage = https://github.com/tonyrog/cl +pkg_cl_fetch = git +pkg_cl_repo = https://github.com/tonyrog/cl +pkg_cl_commit = master + +PACKAGES += classifier +pkg_classifier_name = classifier +pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier +pkg_classifier_homepage = https://github.com/inaka/classifier +pkg_classifier_fetch = git +pkg_classifier_repo = https://github.com/inaka/classifier +pkg_classifier_commit = master + +PACKAGES += clique +pkg_clique_name = clique +pkg_clique_description = CLI Framework for Erlang +pkg_clique_homepage = https://github.com/basho/clique +pkg_clique_fetch = git +pkg_clique_repo = https://github.com/basho/clique +pkg_clique_commit = develop + +PACKAGES += cloudi_core +pkg_cloudi_core_name = cloudi_core +pkg_cloudi_core_description = CloudI internal service runtime +pkg_cloudi_core_homepage = http://cloudi.org/ +pkg_cloudi_core_fetch = git +pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core +pkg_cloudi_core_commit = master + +PACKAGES += cloudi_service_api_requests +pkg_cloudi_service_api_requests_name = cloudi_service_api_requests +pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support) +pkg_cloudi_service_api_requests_homepage = http://cloudi.org/ +pkg_cloudi_service_api_requests_fetch = git +pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests +pkg_cloudi_service_api_requests_commit = master + +PACKAGES += cloudi_service_db +pkg_cloudi_service_db_name = cloudi_service_db +pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) +pkg_cloudi_service_db_homepage = http://cloudi.org/ +pkg_cloudi_service_db_fetch = git +pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db +pkg_cloudi_service_db_commit = master + +PACKAGES += cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service +pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_fetch = git +pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_commit = master + +PACKAGES += cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service +pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_cql_fetch = git +pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_commit = master + +PACKAGES += cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service +pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/ +pkg_cloudi_service_db_couchdb_fetch = git +pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_commit = master + +PACKAGES += cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service +pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/ +pkg_cloudi_service_db_elasticsearch_fetch = git +pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_commit = master + +PACKAGES += cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_description = memcached CloudI Service +pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/ +pkg_cloudi_service_db_memcached_fetch = git +pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_commit = master + +PACKAGES += cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_description = MySQL CloudI Service +pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_mysql_fetch = git +pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_commit = master + +PACKAGES += cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service +pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_pgsql_fetch = git +pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_commit = master + +PACKAGES += cloudi_service_db_riak +pkg_cloudi_service_db_riak_name = cloudi_service_db_riak +pkg_cloudi_service_db_riak_description = Riak CloudI Service +pkg_cloudi_service_db_riak_homepage = http://cloudi.org/ +pkg_cloudi_service_db_riak_fetch = git +pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak +pkg_cloudi_service_db_riak_commit = master + +PACKAGES += cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service +pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/ +pkg_cloudi_service_db_tokyotyrant_fetch = git +pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_commit = master + +PACKAGES += cloudi_service_filesystem +pkg_cloudi_service_filesystem_name = cloudi_service_filesystem +pkg_cloudi_service_filesystem_description = Filesystem CloudI Service +pkg_cloudi_service_filesystem_homepage = http://cloudi.org/ +pkg_cloudi_service_filesystem_fetch = git +pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem +pkg_cloudi_service_filesystem_commit = master + +PACKAGES += cloudi_service_http_client +pkg_cloudi_service_http_client_name = cloudi_service_http_client +pkg_cloudi_service_http_client_description = HTTP client CloudI Service +pkg_cloudi_service_http_client_homepage = http://cloudi.org/ +pkg_cloudi_service_http_client_fetch = git +pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client +pkg_cloudi_service_http_client_commit = master + +PACKAGES += cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service +pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/ +pkg_cloudi_service_http_cowboy_fetch = git +pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_commit = master + +PACKAGES += cloudi_service_http_elli +pkg_cloudi_service_http_elli_name = cloudi_service_http_elli +pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service +pkg_cloudi_service_http_elli_homepage = http://cloudi.org/ +pkg_cloudi_service_http_elli_fetch = git +pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli +pkg_cloudi_service_http_elli_commit = master + +PACKAGES += cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service +pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/ +pkg_cloudi_service_map_reduce_fetch = git +pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_commit = master + +PACKAGES += cloudi_service_oauth1 +pkg_cloudi_service_oauth1_name = cloudi_service_oauth1 +pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service +pkg_cloudi_service_oauth1_homepage = http://cloudi.org/ +pkg_cloudi_service_oauth1_fetch = git +pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1 +pkg_cloudi_service_oauth1_commit = master + +PACKAGES += cloudi_service_queue +pkg_cloudi_service_queue_name = cloudi_service_queue +pkg_cloudi_service_queue_description = Persistent Queue Service +pkg_cloudi_service_queue_homepage = http://cloudi.org/ +pkg_cloudi_service_queue_fetch = git +pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue +pkg_cloudi_service_queue_commit = master + +PACKAGES += cloudi_service_quorum +pkg_cloudi_service_quorum_name = cloudi_service_quorum +pkg_cloudi_service_quorum_description = CloudI Quorum Service +pkg_cloudi_service_quorum_homepage = http://cloudi.org/ +pkg_cloudi_service_quorum_fetch = git +pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum +pkg_cloudi_service_quorum_commit = master + +PACKAGES += cloudi_service_router +pkg_cloudi_service_router_name = cloudi_service_router +pkg_cloudi_service_router_description = CloudI Router Service +pkg_cloudi_service_router_homepage = http://cloudi.org/ +pkg_cloudi_service_router_fetch = git +pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router +pkg_cloudi_service_router_commit = master + +PACKAGES += cloudi_service_tcp +pkg_cloudi_service_tcp_name = cloudi_service_tcp +pkg_cloudi_service_tcp_description = TCP CloudI Service +pkg_cloudi_service_tcp_homepage = http://cloudi.org/ +pkg_cloudi_service_tcp_fetch = git +pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp +pkg_cloudi_service_tcp_commit = master + +PACKAGES += cloudi_service_timers +pkg_cloudi_service_timers_name = cloudi_service_timers +pkg_cloudi_service_timers_description = Timers CloudI Service +pkg_cloudi_service_timers_homepage = http://cloudi.org/ +pkg_cloudi_service_timers_fetch = git +pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers +pkg_cloudi_service_timers_commit = master + +PACKAGES += cloudi_service_udp +pkg_cloudi_service_udp_name = cloudi_service_udp +pkg_cloudi_service_udp_description = UDP CloudI Service +pkg_cloudi_service_udp_homepage = http://cloudi.org/ +pkg_cloudi_service_udp_fetch = git +pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp +pkg_cloudi_service_udp_commit = master + +PACKAGES += cloudi_service_validate +pkg_cloudi_service_validate_name = cloudi_service_validate +pkg_cloudi_service_validate_description = CloudI Validate Service +pkg_cloudi_service_validate_homepage = http://cloudi.org/ +pkg_cloudi_service_validate_fetch = git +pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate +pkg_cloudi_service_validate_commit = master + +PACKAGES += cloudi_service_zeromq +pkg_cloudi_service_zeromq_name = cloudi_service_zeromq +pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service +pkg_cloudi_service_zeromq_homepage = http://cloudi.org/ +pkg_cloudi_service_zeromq_fetch = git +pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq +pkg_cloudi_service_zeromq_commit = master + +PACKAGES += cluster_info +pkg_cluster_info_name = cluster_info +pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app +pkg_cluster_info_homepage = https://github.com/basho/cluster_info +pkg_cluster_info_fetch = git +pkg_cluster_info_repo = https://github.com/basho/cluster_info +pkg_cluster_info_commit = master + +PACKAGES += color +pkg_color_name = color +pkg_color_description = ANSI colors for your Erlang +pkg_color_homepage = https://github.com/julianduque/erlang-color +pkg_color_fetch = git +pkg_color_repo = https://github.com/julianduque/erlang-color +pkg_color_commit = master + +PACKAGES += confetti +pkg_confetti_name = confetti +pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids +pkg_confetti_homepage = https://github.com/jtendo/confetti +pkg_confetti_fetch = git +pkg_confetti_repo = https://github.com/jtendo/confetti +pkg_confetti_commit = master + +PACKAGES += couchbeam +pkg_couchbeam_name = couchbeam +pkg_couchbeam_description = Apache CouchDB client in Erlang +pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam +pkg_couchbeam_fetch = git +pkg_couchbeam_repo = https://github.com/benoitc/couchbeam +pkg_couchbeam_commit = master + +PACKAGES += covertool +pkg_covertool_name = covertool +pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports +pkg_covertool_homepage = https://github.com/idubrov/covertool +pkg_covertool_fetch = git +pkg_covertool_repo = https://github.com/idubrov/covertool +pkg_covertool_commit = master + +PACKAGES += cowboy +pkg_cowboy_name = cowboy +pkg_cowboy_description = Small, fast and modular HTTP server. +pkg_cowboy_homepage = http://ninenines.eu +pkg_cowboy_fetch = git +pkg_cowboy_repo = https://github.com/ninenines/cowboy +pkg_cowboy_commit = 1.0.1 + +PACKAGES += cowdb +pkg_cowdb_name = cowdb +pkg_cowdb_description = Pure Key/Value database library for Erlang Applications +pkg_cowdb_homepage = https://github.com/refuge/cowdb +pkg_cowdb_fetch = git +pkg_cowdb_repo = https://github.com/refuge/cowdb +pkg_cowdb_commit = master + +PACKAGES += cowlib +pkg_cowlib_name = cowlib +pkg_cowlib_description = Support library for manipulating Web protocols. +pkg_cowlib_homepage = http://ninenines.eu +pkg_cowlib_fetch = git +pkg_cowlib_repo = https://github.com/ninenines/cowlib +pkg_cowlib_commit = 1.0.1 + +PACKAGES += cpg +pkg_cpg_name = cpg +pkg_cpg_description = CloudI Process Groups +pkg_cpg_homepage = https://github.com/okeuday/cpg +pkg_cpg_fetch = git +pkg_cpg_repo = https://github.com/okeuday/cpg +pkg_cpg_commit = master + +PACKAGES += cqerl +pkg_cqerl_name = cqerl +pkg_cqerl_description = Native Erlang CQL client for Cassandra +pkg_cqerl_homepage = https://matehat.github.io/cqerl/ +pkg_cqerl_fetch = git +pkg_cqerl_repo = https://github.com/matehat/cqerl +pkg_cqerl_commit = master + +PACKAGES += cr +pkg_cr_name = cr +pkg_cr_description = Chain Replication +pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm +pkg_cr_fetch = git +pkg_cr_repo = https://github.com/spawnproc/cr +pkg_cr_commit = master + +PACKAGES += cuttlefish +pkg_cuttlefish_name = cuttlefish +pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me? +pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish +pkg_cuttlefish_fetch = git +pkg_cuttlefish_repo = https://github.com/basho/cuttlefish +pkg_cuttlefish_commit = master + +PACKAGES += damocles +pkg_damocles_name = damocles +pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box. +pkg_damocles_homepage = https://github.com/lostcolony/damocles +pkg_damocles_fetch = git +pkg_damocles_repo = https://github.com/lostcolony/damocles +pkg_damocles_commit = master + +PACKAGES += debbie +pkg_debbie_name = debbie +pkg_debbie_description = .DEB Built In Erlang +pkg_debbie_homepage = https://github.com/crownedgrouse/debbie +pkg_debbie_fetch = git +pkg_debbie_repo = https://github.com/crownedgrouse/debbie +pkg_debbie_commit = master + +PACKAGES += decimal +pkg_decimal_name = decimal +pkg_decimal_description = An Erlang decimal arithmetic library +pkg_decimal_homepage = https://github.com/tim/erlang-decimal +pkg_decimal_fetch = git +pkg_decimal_repo = https://github.com/tim/erlang-decimal +pkg_decimal_commit = master + +PACKAGES += detergent +pkg_detergent_name = detergent +pkg_detergent_description = An emulsifying Erlang SOAP library +pkg_detergent_homepage = https://github.com/devinus/detergent +pkg_detergent_fetch = git +pkg_detergent_repo = https://github.com/devinus/detergent +pkg_detergent_commit = master + +PACKAGES += detest +pkg_detest_name = detest +pkg_detest_description = Tool for running tests on a cluster of erlang nodes +pkg_detest_homepage = https://github.com/biokoda/detest +pkg_detest_fetch = git +pkg_detest_repo = https://github.com/biokoda/detest +pkg_detest_commit = master + +PACKAGES += dh_date +pkg_dh_date_name = dh_date +pkg_dh_date_description = Date formatting / parsing library for erlang +pkg_dh_date_homepage = https://github.com/daleharvey/dh_date +pkg_dh_date_fetch = git +pkg_dh_date_repo = https://github.com/daleharvey/dh_date +pkg_dh_date_commit = master + +PACKAGES += dhtcrawler +pkg_dhtcrawler_name = dhtcrawler +pkg_dhtcrawler_description = dhtcrawler is a DHT crawler written in erlang. It can join a DHT network and crawl many P2P torrents. +pkg_dhtcrawler_homepage = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_fetch = git +pkg_dhtcrawler_repo = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_commit = master + +PACKAGES += dirbusterl +pkg_dirbusterl_name = dirbusterl +pkg_dirbusterl_description = DirBuster successor in Erlang +pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_fetch = git +pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_commit = master + +PACKAGES += dispcount +pkg_dispcount_name = dispcount +pkg_dispcount_description = Erlang task dispatcher based on ETS counters. +pkg_dispcount_homepage = https://github.com/ferd/dispcount +pkg_dispcount_fetch = git +pkg_dispcount_repo = https://github.com/ferd/dispcount +pkg_dispcount_commit = master + +PACKAGES += dlhttpc +pkg_dlhttpc_name = dlhttpc +pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints +pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc +pkg_dlhttpc_fetch = git +pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc +pkg_dlhttpc_commit = master + +PACKAGES += dns +pkg_dns_name = dns +pkg_dns_description = Erlang DNS library +pkg_dns_homepage = https://github.com/aetrion/dns_erlang +pkg_dns_fetch = git +pkg_dns_repo = https://github.com/aetrion/dns_erlang +pkg_dns_commit = master + +PACKAGES += dnssd +pkg_dnssd_name = dnssd +pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation +pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_fetch = git +pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_commit = master + +PACKAGES += dtl +pkg_dtl_name = dtl +pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang. +pkg_dtl_homepage = https://github.com/oinksoft/dtl +pkg_dtl_fetch = git +pkg_dtl_repo = https://github.com/oinksoft/dtl +pkg_dtl_commit = master + +PACKAGES += dynamic_compile +pkg_dynamic_compile_name = dynamic_compile +pkg_dynamic_compile_description = compile and load erlang modules from string input +pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_fetch = git +pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_commit = master + +PACKAGES += e2 +pkg_e2_name = e2 +pkg_e2_description = Library to simply writing correct OTP applications. +pkg_e2_homepage = http://e2project.org +pkg_e2_fetch = git +pkg_e2_repo = https://github.com/gar1t/e2 +pkg_e2_commit = master + +PACKAGES += eamf +pkg_eamf_name = eamf +pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang +pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf +pkg_eamf_fetch = git +pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf +pkg_eamf_commit = master + +PACKAGES += eavro +pkg_eavro_name = eavro +pkg_eavro_description = Apache Avro encoder/decoder +pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_fetch = git +pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_commit = master + +PACKAGES += ecapnp +pkg_ecapnp_name = ecapnp +pkg_ecapnp_description = Cap'n Proto library for Erlang +pkg_ecapnp_homepage = https://github.com/kaos/ecapnp +pkg_ecapnp_fetch = git +pkg_ecapnp_repo = https://github.com/kaos/ecapnp +pkg_ecapnp_commit = master + +PACKAGES += econfig +pkg_econfig_name = econfig +pkg_econfig_description = simple Erlang config handler using INI files +pkg_econfig_homepage = https://github.com/benoitc/econfig +pkg_econfig_fetch = git +pkg_econfig_repo = https://github.com/benoitc/econfig +pkg_econfig_commit = master + +PACKAGES += edate +pkg_edate_name = edate +pkg_edate_description = date manipulation library for erlang +pkg_edate_homepage = https://github.com/dweldon/edate +pkg_edate_fetch = git +pkg_edate_repo = https://github.com/dweldon/edate +pkg_edate_commit = master + +PACKAGES += edgar +pkg_edgar_name = edgar +pkg_edgar_description = Erlang Does GNU AR +pkg_edgar_homepage = https://github.com/crownedgrouse/edgar +pkg_edgar_fetch = git +pkg_edgar_repo = https://github.com/crownedgrouse/edgar +pkg_edgar_commit = master + +PACKAGES += edis +pkg_edis_name = edis +pkg_edis_description = An Erlang implementation of Redis KV Store +pkg_edis_homepage = http://inaka.github.com/edis/ +pkg_edis_fetch = git +pkg_edis_repo = https://github.com/inaka/edis +pkg_edis_commit = master + +PACKAGES += edns +pkg_edns_name = edns +pkg_edns_description = Erlang/OTP DNS server +pkg_edns_homepage = https://github.com/hcvst/erlang-dns +pkg_edns_fetch = git +pkg_edns_repo = https://github.com/hcvst/erlang-dns +pkg_edns_commit = master + +PACKAGES += edown +pkg_edown_name = edown +pkg_edown_description = EDoc extension for generating Github-flavored Markdown +pkg_edown_homepage = https://github.com/uwiger/edown +pkg_edown_fetch = git +pkg_edown_repo = https://github.com/uwiger/edown +pkg_edown_commit = master + +PACKAGES += eep +pkg_eep_name = eep +pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy +pkg_eep_homepage = https://github.com/virtan/eep +pkg_eep_fetch = git +pkg_eep_repo = https://github.com/virtan/eep +pkg_eep_commit = master + +PACKAGES += eep_app +pkg_eep_app_name = eep_app +pkg_eep_app_description = Embedded Event Processing +pkg_eep_app_homepage = https://github.com/darach/eep-erl +pkg_eep_app_fetch = git +pkg_eep_app_repo = https://github.com/darach/eep-erl +pkg_eep_app_commit = master + +PACKAGES += efene +pkg_efene_name = efene +pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX +pkg_efene_homepage = https://github.com/efene/efene +pkg_efene_fetch = git +pkg_efene_repo = https://github.com/efene/efene +pkg_efene_commit = master + +PACKAGES += eganglia +pkg_eganglia_name = eganglia +pkg_eganglia_description = Erlang library to interact with Ganglia +pkg_eganglia_homepage = https://github.com/inaka/eganglia +pkg_eganglia_fetch = git +pkg_eganglia_repo = https://github.com/inaka/eganglia +pkg_eganglia_commit = v0.9.1 + +PACKAGES += egeoip +pkg_egeoip_name = egeoip +pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database. +pkg_egeoip_homepage = https://github.com/mochi/egeoip +pkg_egeoip_fetch = git +pkg_egeoip_repo = https://github.com/mochi/egeoip +pkg_egeoip_commit = master + +PACKAGES += ehsa +pkg_ehsa_name = ehsa +pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules +pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa +pkg_ehsa_fetch = hg +pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa +pkg_ehsa_commit = 2.0.4 + +PACKAGES += ej +pkg_ej_name = ej +pkg_ej_description = Helper module for working with Erlang terms representing JSON +pkg_ej_homepage = https://github.com/seth/ej +pkg_ej_fetch = git +pkg_ej_repo = https://github.com/seth/ej +pkg_ej_commit = master + +PACKAGES += ejabberd +pkg_ejabberd_name = ejabberd +pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform +pkg_ejabberd_homepage = https://github.com/processone/ejabberd +pkg_ejabberd_fetch = git +pkg_ejabberd_repo = https://github.com/processone/ejabberd +pkg_ejabberd_commit = master + +PACKAGES += ejwt +pkg_ejwt_name = ejwt +pkg_ejwt_description = erlang library for JSON Web Token +pkg_ejwt_homepage = https://github.com/artefactop/ejwt +pkg_ejwt_fetch = git +pkg_ejwt_repo = https://github.com/artefactop/ejwt +pkg_ejwt_commit = master + +PACKAGES += ekaf +pkg_ekaf_name = ekaf +pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang. +pkg_ekaf_homepage = https://github.com/helpshift/ekaf +pkg_ekaf_fetch = git +pkg_ekaf_repo = https://github.com/helpshift/ekaf +pkg_ekaf_commit = master + +PACKAGES += elarm +pkg_elarm_name = elarm +pkg_elarm_description = Alarm Manager for Erlang. +pkg_elarm_homepage = https://github.com/esl/elarm +pkg_elarm_fetch = git +pkg_elarm_repo = https://github.com/esl/elarm +pkg_elarm_commit = master + +PACKAGES += eleveldb +pkg_eleveldb_name = eleveldb +pkg_eleveldb_description = Erlang LevelDB API +pkg_eleveldb_homepage = https://github.com/basho/eleveldb +pkg_eleveldb_fetch = git +pkg_eleveldb_repo = https://github.com/basho/eleveldb +pkg_eleveldb_commit = master + +PACKAGES += elli +pkg_elli_name = elli +pkg_elli_description = Simple, robust and performant Erlang web server +pkg_elli_homepage = https://github.com/knutin/elli +pkg_elli_fetch = git +pkg_elli_repo = https://github.com/knutin/elli +pkg_elli_commit = master + +PACKAGES += elvis +pkg_elvis_name = elvis +pkg_elvis_description = Erlang Style Reviewer +pkg_elvis_homepage = https://github.com/inaka/elvis +pkg_elvis_fetch = git +pkg_elvis_repo = https://github.com/inaka/elvis +pkg_elvis_commit = 0.2.4 + +PACKAGES += emagick +pkg_emagick_name = emagick +pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool. +pkg_emagick_homepage = https://github.com/kivra/emagick +pkg_emagick_fetch = git +pkg_emagick_repo = https://github.com/kivra/emagick +pkg_emagick_commit = master + +PACKAGES += emysql +pkg_emysql_name = emysql +pkg_emysql_description = Stable, pure Erlang MySQL driver. +pkg_emysql_homepage = https://github.com/Eonblast/Emysql +pkg_emysql_fetch = git +pkg_emysql_repo = https://github.com/Eonblast/Emysql +pkg_emysql_commit = master + +PACKAGES += enm +pkg_enm_name = enm +pkg_enm_description = Erlang driver for nanomsg +pkg_enm_homepage = https://github.com/basho/enm +pkg_enm_fetch = git +pkg_enm_repo = https://github.com/basho/enm +pkg_enm_commit = master + +PACKAGES += entop +pkg_entop_name = entop +pkg_entop_description = A top-like tool for monitoring an Erlang node +pkg_entop_homepage = https://github.com/mazenharake/entop +pkg_entop_fetch = git +pkg_entop_repo = https://github.com/mazenharake/entop +pkg_entop_commit = master + +PACKAGES += epcap +pkg_epcap_name = epcap +pkg_epcap_description = Erlang packet capture interface using pcap +pkg_epcap_homepage = https://github.com/msantos/epcap +pkg_epcap_fetch = git +pkg_epcap_repo = https://github.com/msantos/epcap +pkg_epcap_commit = master + +PACKAGES += eper +pkg_eper_name = eper +pkg_eper_description = Erlang performance and debugging tools. +pkg_eper_homepage = https://github.com/massemanet/eper +pkg_eper_fetch = git +pkg_eper_repo = https://github.com/massemanet/eper +pkg_eper_commit = master + +PACKAGES += epgsql +pkg_epgsql_name = epgsql +pkg_epgsql_description = Erlang PostgreSQL client library. +pkg_epgsql_homepage = https://github.com/epgsql/epgsql +pkg_epgsql_fetch = git +pkg_epgsql_repo = https://github.com/epgsql/epgsql +pkg_epgsql_commit = master + +PACKAGES += episcina +pkg_episcina_name = episcina +pkg_episcina_description = A simple non intrusive resource pool for connections +pkg_episcina_homepage = https://github.com/erlware/episcina +pkg_episcina_fetch = git +pkg_episcina_repo = https://github.com/erlware/episcina +pkg_episcina_commit = master + +PACKAGES += eplot +pkg_eplot_name = eplot +pkg_eplot_description = A plot engine written in erlang. +pkg_eplot_homepage = https://github.com/psyeugenic/eplot +pkg_eplot_fetch = git +pkg_eplot_repo = https://github.com/psyeugenic/eplot +pkg_eplot_commit = master + +PACKAGES += epocxy +pkg_epocxy_name = epocxy +pkg_epocxy_description = Erlang Patterns of Concurrency +pkg_epocxy_homepage = https://github.com/duomark/epocxy +pkg_epocxy_fetch = git +pkg_epocxy_repo = https://github.com/duomark/epocxy +pkg_epocxy_commit = master + +PACKAGES += epubnub +pkg_epubnub_name = epubnub +pkg_epubnub_description = Erlang PubNub API +pkg_epubnub_homepage = https://github.com/tsloughter/epubnub +pkg_epubnub_fetch = git +pkg_epubnub_repo = https://github.com/tsloughter/epubnub +pkg_epubnub_commit = master + +PACKAGES += eqm +pkg_eqm_name = eqm +pkg_eqm_description = Erlang pub sub with supply-demand channels +pkg_eqm_homepage = https://github.com/loucash/eqm +pkg_eqm_fetch = git +pkg_eqm_repo = https://github.com/loucash/eqm +pkg_eqm_commit = master + +PACKAGES += eredis +pkg_eredis_name = eredis +pkg_eredis_description = Erlang Redis client +pkg_eredis_homepage = https://github.com/wooga/eredis +pkg_eredis_fetch = git +pkg_eredis_repo = https://github.com/wooga/eredis +pkg_eredis_commit = master + +PACKAGES += eredis_pool +pkg_eredis_pool_name = eredis_pool +pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy. +pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_fetch = git +pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_commit = master + +PACKAGES += erl_streams +pkg_erl_streams_name = erl_streams +pkg_erl_streams_description = Streams in Erlang +pkg_erl_streams_homepage = https://github.com/epappas/erl_streams +pkg_erl_streams_fetch = git +pkg_erl_streams_repo = https://github.com/epappas/erl_streams +pkg_erl_streams_commit = master + +PACKAGES += erlang_cep +pkg_erlang_cep_name = erlang_cep +pkg_erlang_cep_description = A basic CEP package written in erlang +pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_fetch = git +pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_commit = master + +PACKAGES += erlang_js +pkg_erlang_js_name = erlang_js +pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime. +pkg_erlang_js_homepage = https://github.com/basho/erlang_js +pkg_erlang_js_fetch = git +pkg_erlang_js_repo = https://github.com/basho/erlang_js +pkg_erlang_js_commit = master + +PACKAGES += erlang_localtime +pkg_erlang_localtime_name = erlang_localtime +pkg_erlang_localtime_description = Erlang library for conversion from one local time to another +pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_fetch = git +pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_commit = master + +PACKAGES += erlang_smtp +pkg_erlang_smtp_name = erlang_smtp +pkg_erlang_smtp_description = Erlang SMTP and POP3 server code. +pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_fetch = git +pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_commit = master + +PACKAGES += erlang_term +pkg_erlang_term_name = erlang_term +pkg_erlang_term_description = Erlang Term Info +pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term +pkg_erlang_term_fetch = git +pkg_erlang_term_repo = https://github.com/okeuday/erlang_term +pkg_erlang_term_commit = master + +PACKAGES += erlastic_search +pkg_erlastic_search_name = erlastic_search +pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface. +pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_fetch = git +pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_commit = master + +PACKAGES += erlasticsearch +pkg_erlasticsearch_name = erlasticsearch +pkg_erlasticsearch_description = Erlang thrift interface to elastic_search +pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_fetch = git +pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_commit = master + +PACKAGES += erlbrake +pkg_erlbrake_name = erlbrake +pkg_erlbrake_description = Erlang Airbrake notification client +pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake +pkg_erlbrake_fetch = git +pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake +pkg_erlbrake_commit = master + +PACKAGES += erlcloud +pkg_erlcloud_name = erlcloud +pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB) +pkg_erlcloud_homepage = https://github.com/gleber/erlcloud +pkg_erlcloud_fetch = git +pkg_erlcloud_repo = https://github.com/gleber/erlcloud +pkg_erlcloud_commit = master + +PACKAGES += erlcron +pkg_erlcron_name = erlcron +pkg_erlcron_description = Erlang cronish system +pkg_erlcron_homepage = https://github.com/erlware/erlcron +pkg_erlcron_fetch = git +pkg_erlcron_repo = https://github.com/erlware/erlcron +pkg_erlcron_commit = master + +PACKAGES += erldb +pkg_erldb_name = erldb +pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang +pkg_erldb_homepage = http://erldb.org +pkg_erldb_fetch = git +pkg_erldb_repo = https://github.com/erldb/erldb +pkg_erldb_commit = master + +PACKAGES += erldis +pkg_erldis_name = erldis +pkg_erldis_description = redis erlang client library +pkg_erldis_homepage = https://github.com/cstar/erldis +pkg_erldis_fetch = git +pkg_erldis_repo = https://github.com/cstar/erldis +pkg_erldis_commit = master + +PACKAGES += erldns +pkg_erldns_name = erldns +pkg_erldns_description = DNS server, in erlang. +pkg_erldns_homepage = https://github.com/aetrion/erl-dns +pkg_erldns_fetch = git +pkg_erldns_repo = https://github.com/aetrion/erl-dns +pkg_erldns_commit = master + +PACKAGES += erldocker +pkg_erldocker_name = erldocker +pkg_erldocker_description = Docker Remote API client for Erlang +pkg_erldocker_homepage = https://github.com/proger/erldocker +pkg_erldocker_fetch = git +pkg_erldocker_repo = https://github.com/proger/erldocker +pkg_erldocker_commit = master + +PACKAGES += erlfsmon +pkg_erlfsmon_name = erlfsmon +pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX +pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon +pkg_erlfsmon_fetch = git +pkg_erlfsmon_repo = https://github.com/proger/erlfsmon +pkg_erlfsmon_commit = master + +PACKAGES += erlgit +pkg_erlgit_name = erlgit +pkg_erlgit_description = Erlang convenience wrapper around git executable +pkg_erlgit_homepage = https://github.com/gleber/erlgit +pkg_erlgit_fetch = git +pkg_erlgit_repo = https://github.com/gleber/erlgit +pkg_erlgit_commit = master + +PACKAGES += erlguten +pkg_erlguten_name = erlguten +pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang. +pkg_erlguten_homepage = https://github.com/richcarl/erlguten +pkg_erlguten_fetch = git +pkg_erlguten_repo = https://github.com/richcarl/erlguten +pkg_erlguten_commit = master + +PACKAGES += erlmc +pkg_erlmc_name = erlmc +pkg_erlmc_description = Erlang memcached binary protocol client +pkg_erlmc_homepage = https://github.com/jkvor/erlmc +pkg_erlmc_fetch = git +pkg_erlmc_repo = https://github.com/jkvor/erlmc +pkg_erlmc_commit = master + +PACKAGES += erlmongo +pkg_erlmongo_name = erlmongo +pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support +pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_fetch = git +pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_commit = master + +PACKAGES += erlog +pkg_erlog_name = erlog +pkg_erlog_description = Prolog interpreter in and for Erlang +pkg_erlog_homepage = https://github.com/rvirding/erlog +pkg_erlog_fetch = git +pkg_erlog_repo = https://github.com/rvirding/erlog +pkg_erlog_commit = master + +PACKAGES += erlpass +pkg_erlpass_name = erlpass +pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever. +pkg_erlpass_homepage = https://github.com/ferd/erlpass +pkg_erlpass_fetch = git +pkg_erlpass_repo = https://github.com/ferd/erlpass +pkg_erlpass_commit = master + +PACKAGES += erlport +pkg_erlport_name = erlport +pkg_erlport_description = ErlPort - connect Erlang to other languages +pkg_erlport_homepage = https://github.com/hdima/erlport +pkg_erlport_fetch = git +pkg_erlport_repo = https://github.com/hdima/erlport +pkg_erlport_commit = master + +PACKAGES += erlsh +pkg_erlsh_name = erlsh +pkg_erlsh_description = Erlang shell tools +pkg_erlsh_homepage = https://github.com/proger/erlsh +pkg_erlsh_fetch = git +pkg_erlsh_repo = https://github.com/proger/erlsh +pkg_erlsh_commit = master + +PACKAGES += erlsha2 +pkg_erlsha2_name = erlsha2 +pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs. +pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2 +pkg_erlsha2_fetch = git +pkg_erlsha2_repo = https://github.com/vinoski/erlsha2 +pkg_erlsha2_commit = master + +PACKAGES += erlsom +pkg_erlsom_name = erlsom +pkg_erlsom_description = XML parser for Erlang +pkg_erlsom_homepage = https://github.com/willemdj/erlsom +pkg_erlsom_fetch = git +pkg_erlsom_repo = https://github.com/willemdj/erlsom +pkg_erlsom_commit = master + +PACKAGES += erlubi +pkg_erlubi_name = erlubi +pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer) +pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi +pkg_erlubi_fetch = git +pkg_erlubi_repo = https://github.com/krestenkrab/erlubi +pkg_erlubi_commit = master + +PACKAGES += erlvolt +pkg_erlvolt_name = erlvolt +pkg_erlvolt_description = VoltDB Erlang Client Driver +pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_fetch = git +pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_commit = master + +PACKAGES += erlware_commons +pkg_erlware_commons_name = erlware_commons +pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components. +pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons +pkg_erlware_commons_fetch = git +pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons +pkg_erlware_commons_commit = master + +PACKAGES += erlydtl +pkg_erlydtl_name = erlydtl +pkg_erlydtl_description = Django Template Language for Erlang. +pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl +pkg_erlydtl_fetch = git +pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl +pkg_erlydtl_commit = master + +PACKAGES += errd +pkg_errd_name = errd +pkg_errd_description = Erlang RRDTool library +pkg_errd_homepage = https://github.com/archaelus/errd +pkg_errd_fetch = git +pkg_errd_repo = https://github.com/archaelus/errd +pkg_errd_commit = master + +PACKAGES += erserve +pkg_erserve_name = erserve +pkg_erserve_description = Erlang/Rserve communication interface +pkg_erserve_homepage = https://github.com/del/erserve +pkg_erserve_fetch = git +pkg_erserve_repo = https://github.com/del/erserve +pkg_erserve_commit = master + +PACKAGES += erwa +pkg_erwa_name = erwa +pkg_erwa_description = A WAMP router and client written in Erlang. +pkg_erwa_homepage = https://github.com/bwegh/erwa +pkg_erwa_fetch = git +pkg_erwa_repo = https://github.com/bwegh/erwa +pkg_erwa_commit = 0.1.1 + +PACKAGES += espec +pkg_espec_name = espec +pkg_espec_description = ESpec: Behaviour driven development framework for Erlang +pkg_espec_homepage = https://github.com/lucaspiller/espec +pkg_espec_fetch = git +pkg_espec_repo = https://github.com/lucaspiller/espec +pkg_espec_commit = master + +PACKAGES += estatsd +pkg_estatsd_name = estatsd +pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite +pkg_estatsd_homepage = https://github.com/RJ/estatsd +pkg_estatsd_fetch = git +pkg_estatsd_repo = https://github.com/RJ/estatsd +pkg_estatsd_commit = master + +PACKAGES += etap +pkg_etap_name = etap +pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output. +pkg_etap_homepage = https://github.com/ngerakines/etap +pkg_etap_fetch = git +pkg_etap_repo = https://github.com/ngerakines/etap +pkg_etap_commit = master + +PACKAGES += etest +pkg_etest_name = etest +pkg_etest_description = A lightweight, convention over configuration test framework for Erlang +pkg_etest_homepage = https://github.com/wooga/etest +pkg_etest_fetch = git +pkg_etest_repo = https://github.com/wooga/etest +pkg_etest_commit = master + +PACKAGES += etest_http +pkg_etest_http_name = etest_http +pkg_etest_http_description = etest Assertions around HTTP (client-side) +pkg_etest_http_homepage = https://github.com/wooga/etest_http +pkg_etest_http_fetch = git +pkg_etest_http_repo = https://github.com/wooga/etest_http +pkg_etest_http_commit = master + +PACKAGES += etoml +pkg_etoml_name = etoml +pkg_etoml_description = TOML language erlang parser +pkg_etoml_homepage = https://github.com/kalta/etoml +pkg_etoml_fetch = git +pkg_etoml_repo = https://github.com/kalta/etoml +pkg_etoml_commit = master + +PACKAGES += eunit +pkg_eunit_name = eunit +pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. +pkg_eunit_homepage = https://github.com/richcarl/eunit +pkg_eunit_fetch = git +pkg_eunit_repo = https://github.com/richcarl/eunit +pkg_eunit_commit = master + +PACKAGES += eunit_formatters +pkg_eunit_formatters_name = eunit_formatters +pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better. +pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_fetch = git +pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_commit = master + +PACKAGES += euthanasia +pkg_euthanasia_name = euthanasia +pkg_euthanasia_description = Merciful killer for your Erlang processes +pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia +pkg_euthanasia_fetch = git +pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia +pkg_euthanasia_commit = master + +PACKAGES += evum +pkg_evum_name = evum +pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM +pkg_evum_homepage = https://github.com/msantos/evum +pkg_evum_fetch = git +pkg_evum_repo = https://github.com/msantos/evum +pkg_evum_commit = master + +PACKAGES += exec +pkg_exec_name = exec +pkg_exec_description = Execute and control OS processes from Erlang/OTP. +pkg_exec_homepage = http://saleyn.github.com/erlexec +pkg_exec_fetch = git +pkg_exec_repo = https://github.com/saleyn/erlexec +pkg_exec_commit = master + +PACKAGES += exml +pkg_exml_name = exml +pkg_exml_description = XML parsing library in Erlang +pkg_exml_homepage = https://github.com/paulgray/exml +pkg_exml_fetch = git +pkg_exml_repo = https://github.com/paulgray/exml +pkg_exml_commit = master + +PACKAGES += exometer +pkg_exometer_name = exometer +pkg_exometer_description = Basic measurement objects and probe behavior +pkg_exometer_homepage = https://github.com/Feuerlabs/exometer +pkg_exometer_fetch = git +pkg_exometer_repo = https://github.com/Feuerlabs/exometer +pkg_exometer_commit = 1.2 + +PACKAGES += exs1024 +pkg_exs1024_name = exs1024 +pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang. +pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024 +pkg_exs1024_fetch = git +pkg_exs1024_repo = https://github.com/jj1bdx/exs1024 +pkg_exs1024_commit = master + +PACKAGES += exs64 +pkg_exs64_name = exs64 +pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang. +pkg_exs64_homepage = https://github.com/jj1bdx/exs64 +pkg_exs64_fetch = git +pkg_exs64_repo = https://github.com/jj1bdx/exs64 +pkg_exs64_commit = master + +PACKAGES += exsplus116 +pkg_exsplus116_name = exsplus116 +pkg_exsplus116_description = Xorshift116plus for Erlang +pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_fetch = git +pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_commit = master + +PACKAGES += exsplus128 +pkg_exsplus128_name = exsplus128 +pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang. +pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_fetch = git +pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_commit = master + +PACKAGES += ezmq +pkg_ezmq_name = ezmq +pkg_ezmq_description = zMQ implemented in Erlang +pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq +pkg_ezmq_fetch = git +pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq +pkg_ezmq_commit = master + +PACKAGES += ezmtp +pkg_ezmtp_name = ezmtp +pkg_ezmtp_description = ZMTP protocol in pure Erlang. +pkg_ezmtp_homepage = https://github.com/a13x/ezmtp +pkg_ezmtp_fetch = git +pkg_ezmtp_repo = https://github.com/a13x/ezmtp +pkg_ezmtp_commit = master + +PACKAGES += fast_disk_log +pkg_fast_disk_log_name = fast_disk_log +pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger +pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_fetch = git +pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_commit = master + +PACKAGES += feeder +pkg_feeder_name = feeder +pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds. +pkg_feeder_homepage = https://github.com/michaelnisi/feeder +pkg_feeder_fetch = git +pkg_feeder_repo = https://github.com/michaelnisi/feeder +pkg_feeder_commit = v1.4.6 + +PACKAGES += fix +pkg_fix_name = fix +pkg_fix_description = http://fixprotocol.org/ implementation. +pkg_fix_homepage = https://github.com/maxlapshin/fix +pkg_fix_fetch = git +pkg_fix_repo = https://github.com/maxlapshin/fix +pkg_fix_commit = master + +PACKAGES += flower +pkg_flower_name = flower +pkg_flower_description = FlowER - a Erlang OpenFlow development platform +pkg_flower_homepage = https://github.com/travelping/flower +pkg_flower_fetch = git +pkg_flower_repo = https://github.com/travelping/flower +pkg_flower_commit = master + +PACKAGES += fn +pkg_fn_name = fn +pkg_fn_description = Function utilities for Erlang +pkg_fn_homepage = https://github.com/reiddraper/fn +pkg_fn_fetch = git +pkg_fn_repo = https://github.com/reiddraper/fn +pkg_fn_commit = master + +PACKAGES += folsom +pkg_folsom_name = folsom +pkg_folsom_description = Expose Erlang Events and Metrics +pkg_folsom_homepage = https://github.com/boundary/folsom +pkg_folsom_fetch = git +pkg_folsom_repo = https://github.com/boundary/folsom +pkg_folsom_commit = master + +PACKAGES += folsom_cowboy +pkg_folsom_cowboy_name = folsom_cowboy +pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper. +pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_fetch = git +pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_commit = master + +PACKAGES += folsomite +pkg_folsomite_name = folsomite +pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics +pkg_folsomite_homepage = https://github.com/campanja/folsomite +pkg_folsomite_fetch = git +pkg_folsomite_repo = https://github.com/campanja/folsomite +pkg_folsomite_commit = master + +PACKAGES += fs +pkg_fs_name = fs +pkg_fs_description = Erlang FileSystem Listener +pkg_fs_homepage = https://github.com/synrc/fs +pkg_fs_fetch = git +pkg_fs_repo = https://github.com/synrc/fs +pkg_fs_commit = master + +PACKAGES += fuse +pkg_fuse_name = fuse +pkg_fuse_description = A Circuit Breaker for Erlang +pkg_fuse_homepage = https://github.com/jlouis/fuse +pkg_fuse_fetch = git +pkg_fuse_repo = https://github.com/jlouis/fuse +pkg_fuse_commit = master + +PACKAGES += gcm +pkg_gcm_name = gcm +pkg_gcm_description = An Erlang application for Google Cloud Messaging +pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang +pkg_gcm_fetch = git +pkg_gcm_repo = https://github.com/pdincau/gcm-erlang +pkg_gcm_commit = master + +PACKAGES += gcprof +pkg_gcprof_name = gcprof +pkg_gcprof_description = Garbage Collection profiler for Erlang +pkg_gcprof_homepage = https://github.com/knutin/gcprof +pkg_gcprof_fetch = git +pkg_gcprof_repo = https://github.com/knutin/gcprof +pkg_gcprof_commit = master + +PACKAGES += geas +pkg_geas_name = geas +pkg_geas_description = Guess Erlang Application Scattering +pkg_geas_homepage = https://github.com/crownedgrouse/geas +pkg_geas_fetch = git +pkg_geas_repo = https://github.com/crownedgrouse/geas +pkg_geas_commit = master + +PACKAGES += geef +pkg_geef_name = geef +pkg_geef_description = Git NEEEEF (Erlang NIF) +pkg_geef_homepage = https://github.com/carlosmn/geef +pkg_geef_fetch = git +pkg_geef_repo = https://github.com/carlosmn/geef +pkg_geef_commit = master + +PACKAGES += gen_cycle +pkg_gen_cycle_name = gen_cycle +pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks +pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_fetch = git +pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_commit = develop + +PACKAGES += gen_icmp +pkg_gen_icmp_name = gen_icmp +pkg_gen_icmp_description = Erlang interface to ICMP sockets +pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp +pkg_gen_icmp_fetch = git +pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp +pkg_gen_icmp_commit = master + +PACKAGES += gen_nb_server +pkg_gen_nb_server_name = gen_nb_server +pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers +pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_fetch = git +pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_commit = master + +PACKAGES += gen_paxos +pkg_gen_paxos_name = gen_paxos +pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol +pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos +pkg_gen_paxos_fetch = git +pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos +pkg_gen_paxos_commit = master + +PACKAGES += gen_smtp +pkg_gen_smtp_name = gen_smtp +pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules +pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_fetch = git +pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_commit = master + +PACKAGES += gen_tracker +pkg_gen_tracker_name = gen_tracker +pkg_gen_tracker_description = supervisor with ets handling of children and their metadata +pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_fetch = git +pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_commit = master + +PACKAGES += gen_unix +pkg_gen_unix_name = gen_unix +pkg_gen_unix_description = Erlang Unix socket interface +pkg_gen_unix_homepage = https://github.com/msantos/gen_unix +pkg_gen_unix_fetch = git +pkg_gen_unix_repo = https://github.com/msantos/gen_unix +pkg_gen_unix_commit = master + +PACKAGES += getopt +pkg_getopt_name = getopt +pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax +pkg_getopt_homepage = https://github.com/jcomellas/getopt +pkg_getopt_fetch = git +pkg_getopt_repo = https://github.com/jcomellas/getopt +pkg_getopt_commit = master + +PACKAGES += gettext +pkg_gettext_name = gettext +pkg_gettext_description = Erlang internationalization library. +pkg_gettext_homepage = https://github.com/etnt/gettext +pkg_gettext_fetch = git +pkg_gettext_repo = https://github.com/etnt/gettext +pkg_gettext_commit = master + +PACKAGES += giallo +pkg_giallo_name = giallo +pkg_giallo_description = Small and flexible web framework on top of Cowboy +pkg_giallo_homepage = https://github.com/kivra/giallo +pkg_giallo_fetch = git +pkg_giallo_repo = https://github.com/kivra/giallo +pkg_giallo_commit = master + +PACKAGES += gin +pkg_gin_name = gin +pkg_gin_description = The guards and for Erlang parse_transform +pkg_gin_homepage = https://github.com/mad-cocktail/gin +pkg_gin_fetch = git +pkg_gin_repo = https://github.com/mad-cocktail/gin +pkg_gin_commit = master + +PACKAGES += gitty +pkg_gitty_name = gitty +pkg_gitty_description = Git access in erlang +pkg_gitty_homepage = https://github.com/maxlapshin/gitty +pkg_gitty_fetch = git +pkg_gitty_repo = https://github.com/maxlapshin/gitty +pkg_gitty_commit = master + +PACKAGES += gold_fever +pkg_gold_fever_name = gold_fever +pkg_gold_fever_description = A Treasure Hunt for Erlangers +pkg_gold_fever_homepage = https://github.com/inaka/gold_fever +pkg_gold_fever_fetch = git +pkg_gold_fever_repo = https://github.com/inaka/gold_fever +pkg_gold_fever_commit = master + +PACKAGES += gossiperl +pkg_gossiperl_name = gossiperl +pkg_gossiperl_description = Gossip middleware in Erlang +pkg_gossiperl_homepage = http://gossiperl.com/ +pkg_gossiperl_fetch = git +pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl +pkg_gossiperl_commit = master + +PACKAGES += gpb +pkg_gpb_name = gpb +pkg_gpb_description = A Google Protobuf implementation for Erlang +pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_fetch = git +pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_commit = master + +PACKAGES += gproc +pkg_gproc_name = gproc +pkg_gproc_description = Extended process registry for Erlang +pkg_gproc_homepage = https://github.com/uwiger/gproc +pkg_gproc_fetch = git +pkg_gproc_repo = https://github.com/uwiger/gproc +pkg_gproc_commit = master + +PACKAGES += grapherl +pkg_grapherl_name = grapherl +pkg_grapherl_description = Create graphs of Erlang systems and programs +pkg_grapherl_homepage = https://github.com/eproxus/grapherl +pkg_grapherl_fetch = git +pkg_grapherl_repo = https://github.com/eproxus/grapherl +pkg_grapherl_commit = master + +PACKAGES += gun +pkg_gun_name = gun +pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang. +pkg_gun_homepage = http//ninenines.eu +pkg_gun_fetch = git +pkg_gun_repo = https://github.com/ninenines/gun +pkg_gun_commit = master + +PACKAGES += gut +pkg_gut_name = gut +pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman +pkg_gut_homepage = https://github.com/unbalancedparentheses/gut +pkg_gut_fetch = git +pkg_gut_repo = https://github.com/unbalancedparentheses/gut +pkg_gut_commit = master + +PACKAGES += hackney +pkg_hackney_name = hackney +pkg_hackney_description = simple HTTP client in Erlang +pkg_hackney_homepage = https://github.com/benoitc/hackney +pkg_hackney_fetch = git +pkg_hackney_repo = https://github.com/benoitc/hackney +pkg_hackney_commit = master + +PACKAGES += hamcrest +pkg_hamcrest_name = hamcrest +pkg_hamcrest_description = Erlang port of Hamcrest +pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_fetch = git +pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_commit = master + +PACKAGES += hanoidb +pkg_hanoidb_name = hanoidb +pkg_hanoidb_description = Erlang LSM BTree Storage +pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_fetch = git +pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_commit = master + +PACKAGES += hottub +pkg_hottub_name = hottub +pkg_hottub_description = Permanent Erlang Worker Pool +pkg_hottub_homepage = https://github.com/bfrog/hottub +pkg_hottub_fetch = git +pkg_hottub_repo = https://github.com/bfrog/hottub +pkg_hottub_commit = master + +PACKAGES += hpack +pkg_hpack_name = hpack +pkg_hpack_description = HPACK Implementation for Erlang +pkg_hpack_homepage = https://github.com/joedevivo/hpack +pkg_hpack_fetch = git +pkg_hpack_repo = https://github.com/joedevivo/hpack +pkg_hpack_commit = master + +PACKAGES += hyper +pkg_hyper_name = hyper +pkg_hyper_description = Erlang implementation of HyperLogLog +pkg_hyper_homepage = https://github.com/GameAnalytics/hyper +pkg_hyper_fetch = git +pkg_hyper_repo = https://github.com/GameAnalytics/hyper +pkg_hyper_commit = master + +PACKAGES += ibrowse +pkg_ibrowse_name = ibrowse +pkg_ibrowse_description = Erlang HTTP client +pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_fetch = git +pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_commit = v4.1.1 + +PACKAGES += ierlang +pkg_ierlang_name = ierlang +pkg_ierlang_description = An Erlang language kernel for IPython. +pkg_ierlang_homepage = https://github.com/robbielynch/ierlang +pkg_ierlang_fetch = git +pkg_ierlang_repo = https://github.com/robbielynch/ierlang +pkg_ierlang_commit = master + +PACKAGES += iota +pkg_iota_name = iota +pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code +pkg_iota_homepage = https://github.com/jpgneves/iota +pkg_iota_fetch = git +pkg_iota_repo = https://github.com/jpgneves/iota +pkg_iota_commit = master + +PACKAGES += irc_lib +pkg_irc_lib_name = irc_lib +pkg_irc_lib_description = Erlang irc client library +pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_fetch = git +pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_commit = master + +PACKAGES += ircd +pkg_ircd_name = ircd +pkg_ircd_description = A pluggable IRC daemon application/library for Erlang. +pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd +pkg_ircd_fetch = git +pkg_ircd_repo = https://github.com/tonyg/erlang-ircd +pkg_ircd_commit = master + +PACKAGES += iris +pkg_iris_name = iris +pkg_iris_description = Iris Erlang binding +pkg_iris_homepage = https://github.com/project-iris/iris-erl +pkg_iris_fetch = git +pkg_iris_repo = https://github.com/project-iris/iris-erl +pkg_iris_commit = master + +PACKAGES += iso8601 +pkg_iso8601_name = iso8601 +pkg_iso8601_description = Erlang ISO 8601 date formatter/parser +pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_fetch = git +pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_commit = master + +PACKAGES += jamdb_sybase +pkg_jamdb_sybase_name = jamdb_sybase +pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE +pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_fetch = git +pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_commit = 0.6.0 + +PACKAGES += jerg +pkg_jerg_name = jerg +pkg_jerg_description = JSON Schema to Erlang Records Generator +pkg_jerg_homepage = https://github.com/ddossot/jerg +pkg_jerg_fetch = git +pkg_jerg_repo = https://github.com/ddossot/jerg +pkg_jerg_commit = master + +PACKAGES += jesse +pkg_jesse_name = jesse +pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang. +pkg_jesse_homepage = https://github.com/klarna/jesse +pkg_jesse_fetch = git +pkg_jesse_repo = https://github.com/klarna/jesse +pkg_jesse_commit = master + +PACKAGES += jiffy +pkg_jiffy_name = jiffy +pkg_jiffy_description = JSON NIFs for Erlang. +pkg_jiffy_homepage = https://github.com/davisp/jiffy +pkg_jiffy_fetch = git +pkg_jiffy_repo = https://github.com/davisp/jiffy +pkg_jiffy_commit = master + +PACKAGES += jiffy_v +pkg_jiffy_v_name = jiffy_v +pkg_jiffy_v_description = JSON validation utility +pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_fetch = git +pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_commit = 0.3.3 + +PACKAGES += jobs +pkg_jobs_name = jobs +pkg_jobs_description = a Job scheduler for load regulation +pkg_jobs_homepage = https://github.com/esl/jobs +pkg_jobs_fetch = git +pkg_jobs_repo = https://github.com/esl/jobs +pkg_jobs_commit = 0.3 + +PACKAGES += joxa +pkg_joxa_name = joxa +pkg_joxa_description = A Modern Lisp for the Erlang VM +pkg_joxa_homepage = https://github.com/joxa/joxa +pkg_joxa_fetch = git +pkg_joxa_repo = https://github.com/joxa/joxa +pkg_joxa_commit = master + +PACKAGES += json +pkg_json_name = json +pkg_json_description = a high level json library for erlang (17.0+) +pkg_json_homepage = https://github.com/talentdeficit/json +pkg_json_fetch = git +pkg_json_repo = https://github.com/talentdeficit/json +pkg_json_commit = master + +PACKAGES += json_rec +pkg_json_rec_name = json_rec +pkg_json_rec_description = JSON to erlang record +pkg_json_rec_homepage = https://github.com/justinkirby/json_rec +pkg_json_rec_fetch = git +pkg_json_rec_repo = https://github.com/justinkirby/json_rec +pkg_json_rec_commit = master + +PACKAGES += jsonerl +pkg_jsonerl_name = jsonerl +pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder +pkg_jsonerl_homepage = https://github.com/lambder/jsonerl +pkg_jsonerl_fetch = git +pkg_jsonerl_repo = https://github.com/lambder/jsonerl +pkg_jsonerl_commit = master + +PACKAGES += jsonpath +pkg_jsonpath_name = jsonpath +pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation +pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_fetch = git +pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_commit = master + +PACKAGES += jsonx +pkg_jsonx_name = jsonx +pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C. +pkg_jsonx_homepage = https://github.com/iskra/jsonx +pkg_jsonx_fetch = git +pkg_jsonx_repo = https://github.com/iskra/jsonx +pkg_jsonx_commit = master + +PACKAGES += jsx +pkg_jsx_name = jsx +pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON. +pkg_jsx_homepage = https://github.com/talentdeficit/jsx +pkg_jsx_fetch = git +pkg_jsx_repo = https://github.com/talentdeficit/jsx +pkg_jsx_commit = master + +PACKAGES += kafka +pkg_kafka_name = kafka +pkg_kafka_description = Kafka consumer and producer in Erlang +pkg_kafka_homepage = https://github.com/wooga/kafka-erlang +pkg_kafka_fetch = git +pkg_kafka_repo = https://github.com/wooga/kafka-erlang +pkg_kafka_commit = master + +PACKAGES += kai +pkg_kai_name = kai +pkg_kai_description = DHT storage by Takeshi Inoue +pkg_kai_homepage = https://github.com/synrc/kai +pkg_kai_fetch = git +pkg_kai_repo = https://github.com/synrc/kai +pkg_kai_commit = master + +PACKAGES += katja +pkg_katja_name = katja +pkg_katja_description = A simple Riemann client written in Erlang. +pkg_katja_homepage = https://github.com/nifoc/katja +pkg_katja_fetch = git +pkg_katja_repo = https://github.com/nifoc/katja +pkg_katja_commit = master + +PACKAGES += kdht +pkg_kdht_name = kdht +pkg_kdht_description = kdht is an erlang DHT implementation +pkg_kdht_homepage = https://github.com/kevinlynx/kdht +pkg_kdht_fetch = git +pkg_kdht_repo = https://github.com/kevinlynx/kdht +pkg_kdht_commit = master + +PACKAGES += key2value +pkg_key2value_name = key2value +pkg_key2value_description = Erlang 2-way map +pkg_key2value_homepage = https://github.com/okeuday/key2value +pkg_key2value_fetch = git +pkg_key2value_repo = https://github.com/okeuday/key2value +pkg_key2value_commit = master + +PACKAGES += keys1value +pkg_keys1value_name = keys1value +pkg_keys1value_description = Erlang set associative map for key lists +pkg_keys1value_homepage = https://github.com/okeuday/keys1value +pkg_keys1value_fetch = git +pkg_keys1value_repo = https://github.com/okeuday/keys1value +pkg_keys1value_commit = master + +PACKAGES += kinetic +pkg_kinetic_name = kinetic +pkg_kinetic_description = Erlang Kinesis Client +pkg_kinetic_homepage = https://github.com/AdRoll/kinetic +pkg_kinetic_fetch = git +pkg_kinetic_repo = https://github.com/AdRoll/kinetic +pkg_kinetic_commit = master + +PACKAGES += kjell +pkg_kjell_name = kjell +pkg_kjell_description = Erlang Shell +pkg_kjell_homepage = https://github.com/karlll/kjell +pkg_kjell_fetch = git +pkg_kjell_repo = https://github.com/karlll/kjell +pkg_kjell_commit = master + +PACKAGES += kraken +pkg_kraken_name = kraken +pkg_kraken_description = Distributed Pubsub Server for Realtime Apps +pkg_kraken_homepage = https://github.com/Asana/kraken +pkg_kraken_fetch = git +pkg_kraken_repo = https://github.com/Asana/kraken +pkg_kraken_commit = master + +PACKAGES += kucumberl +pkg_kucumberl_name = kucumberl +pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber +pkg_kucumberl_homepage = https://github.com/openshine/kucumberl +pkg_kucumberl_fetch = git +pkg_kucumberl_repo = https://github.com/openshine/kucumberl +pkg_kucumberl_commit = master + +PACKAGES += kvc +pkg_kvc_name = kvc +pkg_kvc_description = KVC - Key Value Coding for Erlang data structures +pkg_kvc_homepage = https://github.com/etrepum/kvc +pkg_kvc_fetch = git +pkg_kvc_repo = https://github.com/etrepum/kvc +pkg_kvc_commit = master + +PACKAGES += kvlists +pkg_kvlists_name = kvlists +pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang +pkg_kvlists_homepage = https://github.com/jcomellas/kvlists +pkg_kvlists_fetch = git +pkg_kvlists_repo = https://github.com/jcomellas/kvlists +pkg_kvlists_commit = master + +PACKAGES += kvs +pkg_kvs_name = kvs +pkg_kvs_description = Container and Iterator +pkg_kvs_homepage = https://github.com/synrc/kvs +pkg_kvs_fetch = git +pkg_kvs_repo = https://github.com/synrc/kvs +pkg_kvs_commit = master + +PACKAGES += lager +pkg_lager_name = lager +pkg_lager_description = A logging framework for Erlang/OTP. +pkg_lager_homepage = https://github.com/basho/lager +pkg_lager_fetch = git +pkg_lager_repo = https://github.com/basho/lager +pkg_lager_commit = master + +PACKAGES += lager_amqp_backend +pkg_lager_amqp_backend_name = lager_amqp_backend +pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend +pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_fetch = git +pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_commit = master + +PACKAGES += lager_syslog +pkg_lager_syslog_name = lager_syslog +pkg_lager_syslog_description = Syslog backend for lager +pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog +pkg_lager_syslog_fetch = git +pkg_lager_syslog_repo = https://github.com/basho/lager_syslog +pkg_lager_syslog_commit = master + +PACKAGES += lambdapad +pkg_lambdapad_name = lambdapad +pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang. +pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad +pkg_lambdapad_fetch = git +pkg_lambdapad_repo = https://github.com/gar1t/lambdapad +pkg_lambdapad_commit = master + +PACKAGES += lasp +pkg_lasp_name = lasp +pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations +pkg_lasp_homepage = http://lasp-lang.org/ +pkg_lasp_fetch = git +pkg_lasp_repo = https://github.com/lasp-lang/lasp +pkg_lasp_commit = master + +PACKAGES += lasse +pkg_lasse_name = lasse +pkg_lasse_description = SSE handler for Cowboy +pkg_lasse_homepage = https://github.com/inaka/lasse +pkg_lasse_fetch = git +pkg_lasse_repo = https://github.com/inaka/lasse +pkg_lasse_commit = 0.1.0 + +PACKAGES += ldap +pkg_ldap_name = ldap +pkg_ldap_description = LDAP server written in Erlang +pkg_ldap_homepage = https://github.com/spawnproc/ldap +pkg_ldap_fetch = git +pkg_ldap_repo = https://github.com/spawnproc/ldap +pkg_ldap_commit = master + +PACKAGES += lethink +pkg_lethink_name = lethink +pkg_lethink_description = erlang driver for rethinkdb +pkg_lethink_homepage = https://github.com/taybin/lethink +pkg_lethink_fetch = git +pkg_lethink_repo = https://github.com/taybin/lethink +pkg_lethink_commit = master + +PACKAGES += lfe +pkg_lfe_name = lfe +pkg_lfe_description = Lisp Flavoured Erlang (LFE) +pkg_lfe_homepage = https://github.com/rvirding/lfe +pkg_lfe_fetch = git +pkg_lfe_repo = https://github.com/rvirding/lfe +pkg_lfe_commit = master + +PACKAGES += ling +pkg_ling_name = ling +pkg_ling_description = Erlang on Xen +pkg_ling_homepage = https://github.com/cloudozer/ling +pkg_ling_fetch = git +pkg_ling_repo = https://github.com/cloudozer/ling +pkg_ling_commit = master + +PACKAGES += live +pkg_live_name = live +pkg_live_description = Automated module and configuration reloader. +pkg_live_homepage = http://ninenines.eu +pkg_live_fetch = git +pkg_live_repo = https://github.com/ninenines/live +pkg_live_commit = master + +PACKAGES += lmq +pkg_lmq_name = lmq +pkg_lmq_description = Lightweight Message Queue +pkg_lmq_homepage = https://github.com/iij/lmq +pkg_lmq_fetch = git +pkg_lmq_repo = https://github.com/iij/lmq +pkg_lmq_commit = master + +PACKAGES += locker +pkg_locker_name = locker +pkg_locker_description = Atomic distributed 'check and set' for short-lived keys +pkg_locker_homepage = https://github.com/wooga/locker +pkg_locker_fetch = git +pkg_locker_repo = https://github.com/wooga/locker +pkg_locker_commit = master + +PACKAGES += locks +pkg_locks_name = locks +pkg_locks_description = A scalable, deadlock-resolving resource locker +pkg_locks_homepage = https://github.com/uwiger/locks +pkg_locks_fetch = git +pkg_locks_repo = https://github.com/uwiger/locks +pkg_locks_commit = master + +PACKAGES += log4erl +pkg_log4erl_name = log4erl +pkg_log4erl_description = A logger for erlang in the spirit of Log4J. +pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl +pkg_log4erl_fetch = git +pkg_log4erl_repo = https://github.com/ahmednawras/log4erl +pkg_log4erl_commit = master + +PACKAGES += lol +pkg_lol_name = lol +pkg_lol_description = Lisp on erLang, and programming is fun again +pkg_lol_homepage = https://github.com/b0oh/lol +pkg_lol_fetch = git +pkg_lol_repo = https://github.com/b0oh/lol +pkg_lol_commit = master + +PACKAGES += lucid +pkg_lucid_name = lucid +pkg_lucid_description = HTTP/2 server written in Erlang +pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid +pkg_lucid_fetch = git +pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid +pkg_lucid_commit = master + +PACKAGES += luerl +pkg_luerl_name = luerl +pkg_luerl_description = Lua in Erlang +pkg_luerl_homepage = https://github.com/rvirding/luerl +pkg_luerl_fetch = git +pkg_luerl_repo = https://github.com/rvirding/luerl +pkg_luerl_commit = develop + +PACKAGES += luwak +pkg_luwak_name = luwak +pkg_luwak_description = Large-object storage interface for Riak +pkg_luwak_homepage = https://github.com/basho/luwak +pkg_luwak_fetch = git +pkg_luwak_repo = https://github.com/basho/luwak +pkg_luwak_commit = master + +PACKAGES += lux +pkg_lux_name = lux +pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands +pkg_lux_homepage = https://github.com/hawk/lux +pkg_lux_fetch = git +pkg_lux_repo = https://github.com/hawk/lux +pkg_lux_commit = master + +PACKAGES += machi +pkg_machi_name = machi +pkg_machi_description = Machi file store +pkg_machi_homepage = https://github.com/basho/machi +pkg_machi_fetch = git +pkg_machi_repo = https://github.com/basho/machi +pkg_machi_commit = master + +PACKAGES += mad +pkg_mad_name = mad +pkg_mad_description = Small and Fast Rebar Replacement +pkg_mad_homepage = https://github.com/synrc/mad +pkg_mad_fetch = git +pkg_mad_repo = https://github.com/synrc/mad +pkg_mad_commit = master + +PACKAGES += marina +pkg_marina_name = marina +pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client +pkg_marina_homepage = https://github.com/lpgauth/marina +pkg_marina_fetch = git +pkg_marina_repo = https://github.com/lpgauth/marina +pkg_marina_commit = master + +PACKAGES += mavg +pkg_mavg_name = mavg +pkg_mavg_description = Erlang :: Exponential moving average library +pkg_mavg_homepage = https://github.com/EchoTeam/mavg +pkg_mavg_fetch = git +pkg_mavg_repo = https://github.com/EchoTeam/mavg +pkg_mavg_commit = master + +PACKAGES += mc_erl +pkg_mc_erl_name = mc_erl +pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. +pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl +pkg_mc_erl_fetch = git +pkg_mc_erl_repo = https://github.com/clonejo/mc-erl +pkg_mc_erl_commit = master + +PACKAGES += mcd +pkg_mcd_name = mcd +pkg_mcd_description = Fast memcached protocol client in pure Erlang +pkg_mcd_homepage = https://github.com/EchoTeam/mcd +pkg_mcd_fetch = git +pkg_mcd_repo = https://github.com/EchoTeam/mcd +pkg_mcd_commit = master + +PACKAGES += mcerlang +pkg_mcerlang_name = mcerlang +pkg_mcerlang_description = The McErlang model checker for Erlang +pkg_mcerlang_homepage = https://github.com/fredlund/McErlang +pkg_mcerlang_fetch = git +pkg_mcerlang_repo = https://github.com/fredlund/McErlang +pkg_mcerlang_commit = master + +PACKAGES += meck +pkg_meck_name = meck +pkg_meck_description = A mocking library for Erlang +pkg_meck_homepage = https://github.com/eproxus/meck +pkg_meck_fetch = git +pkg_meck_repo = https://github.com/eproxus/meck +pkg_meck_commit = master + +PACKAGES += mekao +pkg_mekao_name = mekao +pkg_mekao_description = SQL constructor +pkg_mekao_homepage = https://github.com/ddosia/mekao +pkg_mekao_fetch = git +pkg_mekao_repo = https://github.com/ddosia/mekao +pkg_mekao_commit = master + +PACKAGES += memo +pkg_memo_name = memo +pkg_memo_description = Erlang memoization server +pkg_memo_homepage = https://github.com/tuncer/memo +pkg_memo_fetch = git +pkg_memo_repo = https://github.com/tuncer/memo +pkg_memo_commit = master + +PACKAGES += merge_index +pkg_merge_index_name = merge_index +pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop). +pkg_merge_index_homepage = https://github.com/basho/merge_index +pkg_merge_index_fetch = git +pkg_merge_index_repo = https://github.com/basho/merge_index +pkg_merge_index_commit = master + +PACKAGES += merl +pkg_merl_name = merl +pkg_merl_description = Metaprogramming in Erlang +pkg_merl_homepage = https://github.com/richcarl/merl +pkg_merl_fetch = git +pkg_merl_repo = https://github.com/richcarl/merl +pkg_merl_commit = master + +PACKAGES += mimetypes +pkg_mimetypes_name = mimetypes +pkg_mimetypes_description = Erlang MIME types library +pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes +pkg_mimetypes_fetch = git +pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes +pkg_mimetypes_commit = master + +PACKAGES += mixer +pkg_mixer_name = mixer +pkg_mixer_description = Mix in functions from other modules +pkg_mixer_homepage = https://github.com/chef/mixer +pkg_mixer_fetch = git +pkg_mixer_repo = https://github.com/chef/mixer +pkg_mixer_commit = master + +PACKAGES += mochiweb +pkg_mochiweb_name = mochiweb +pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers. +pkg_mochiweb_homepage = https://github.com/mochi/mochiweb +pkg_mochiweb_fetch = git +pkg_mochiweb_repo = https://github.com/mochi/mochiweb +pkg_mochiweb_commit = master + +PACKAGES += mochiweb_xpath +pkg_mochiweb_xpath_name = mochiweb_xpath +pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser +pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_fetch = git +pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_commit = master + +PACKAGES += mockgyver +pkg_mockgyver_name = mockgyver +pkg_mockgyver_description = A mocking library for Erlang +pkg_mockgyver_homepage = https://github.com/klajo/mockgyver +pkg_mockgyver_fetch = git +pkg_mockgyver_repo = https://github.com/klajo/mockgyver +pkg_mockgyver_commit = master + +PACKAGES += modlib +pkg_modlib_name = modlib +pkg_modlib_description = Web framework based on Erlang's inets httpd +pkg_modlib_homepage = https://github.com/gar1t/modlib +pkg_modlib_fetch = git +pkg_modlib_repo = https://github.com/gar1t/modlib +pkg_modlib_commit = master + +PACKAGES += mongodb +pkg_mongodb_name = mongodb +pkg_mongodb_description = MongoDB driver for Erlang +pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_fetch = git +pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_commit = master + +PACKAGES += mongooseim +pkg_mongooseim_name = mongooseim +pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions +pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform +pkg_mongooseim_fetch = git +pkg_mongooseim_repo = https://github.com/esl/MongooseIM +pkg_mongooseim_commit = master + +PACKAGES += moyo +pkg_moyo_name = moyo +pkg_moyo_description = Erlang utility functions library +pkg_moyo_homepage = https://github.com/dwango/moyo +pkg_moyo_fetch = git +pkg_moyo_repo = https://github.com/dwango/moyo +pkg_moyo_commit = master + +PACKAGES += msgpack +pkg_msgpack_name = msgpack +pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang +pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_fetch = git +pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_commit = master + +PACKAGES += mu2 +pkg_mu2_name = mu2 +pkg_mu2_description = Erlang mutation testing tool +pkg_mu2_homepage = https://github.com/ramsay-t/mu2 +pkg_mu2_fetch = git +pkg_mu2_repo = https://github.com/ramsay-t/mu2 +pkg_mu2_commit = master + +PACKAGES += mustache +pkg_mustache_name = mustache +pkg_mustache_description = Mustache template engine for Erlang. +pkg_mustache_homepage = https://github.com/mojombo/mustache.erl +pkg_mustache_fetch = git +pkg_mustache_repo = https://github.com/mojombo/mustache.erl +pkg_mustache_commit = master + +PACKAGES += myproto +pkg_myproto_name = myproto +pkg_myproto_description = MySQL Server Protocol in Erlang +pkg_myproto_homepage = https://github.com/altenwald/myproto +pkg_myproto_fetch = git +pkg_myproto_repo = https://github.com/altenwald/myproto +pkg_myproto_commit = master + +PACKAGES += mysql +pkg_mysql_name = mysql +pkg_mysql_description = Erlang MySQL Driver (from code.google.com) +pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_fetch = git +pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_commit = master + +PACKAGES += n2o +pkg_n2o_name = n2o +pkg_n2o_description = WebSocket Application Server +pkg_n2o_homepage = https://github.com/5HT/n2o +pkg_n2o_fetch = git +pkg_n2o_repo = https://github.com/5HT/n2o +pkg_n2o_commit = master + +PACKAGES += nat_upnp +pkg_nat_upnp_name = nat_upnp +pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD +pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_fetch = git +pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_commit = master + +PACKAGES += neo4j +pkg_neo4j_name = neo4j +pkg_neo4j_description = Erlang client library for Neo4J. +pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_fetch = git +pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_commit = master + +PACKAGES += neotoma +pkg_neotoma_name = neotoma +pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars. +pkg_neotoma_homepage = https://github.com/seancribbs/neotoma +pkg_neotoma_fetch = git +pkg_neotoma_repo = https://github.com/seancribbs/neotoma +pkg_neotoma_commit = master + +PACKAGES += newrelic +pkg_newrelic_name = newrelic +pkg_newrelic_description = Erlang library for sending metrics to New Relic +pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang +pkg_newrelic_fetch = git +pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang +pkg_newrelic_commit = master + +PACKAGES += nifty +pkg_nifty_name = nifty +pkg_nifty_description = Erlang NIF wrapper generator +pkg_nifty_homepage = https://github.com/parapluu/nifty +pkg_nifty_fetch = git +pkg_nifty_repo = https://github.com/parapluu/nifty +pkg_nifty_commit = master + +PACKAGES += nitrogen_core +pkg_nitrogen_core_name = nitrogen_core +pkg_nitrogen_core_description = The core Nitrogen library. +pkg_nitrogen_core_homepage = http://nitrogenproject.com/ +pkg_nitrogen_core_fetch = git +pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core +pkg_nitrogen_core_commit = master + +PACKAGES += nkbase +pkg_nkbase_name = nkbase +pkg_nkbase_description = NkBASE distributed database +pkg_nkbase_homepage = https://github.com/Nekso/nkbase +pkg_nkbase_fetch = git +pkg_nkbase_repo = https://github.com/Nekso/nkbase +pkg_nkbase_commit = develop + +PACKAGES += nkdocker +pkg_nkdocker_name = nkdocker +pkg_nkdocker_description = Erlang Docker client +pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker +pkg_nkdocker_fetch = git +pkg_nkdocker_repo = https://github.com/Nekso/nkdocker +pkg_nkdocker_commit = master + +PACKAGES += nkpacket +pkg_nkpacket_name = nkpacket +pkg_nkpacket_description = Generic Erlang transport layer +pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket +pkg_nkpacket_fetch = git +pkg_nkpacket_repo = https://github.com/Nekso/nkpacket +pkg_nkpacket_commit = master + +PACKAGES += nksip +pkg_nksip_name = nksip +pkg_nksip_description = Erlang SIP application server +pkg_nksip_homepage = https://github.com/kalta/nksip +pkg_nksip_fetch = git +pkg_nksip_repo = https://github.com/kalta/nksip +pkg_nksip_commit = master + +PACKAGES += nodefinder +pkg_nodefinder_name = nodefinder +pkg_nodefinder_description = automatic node discovery via UDP multicast +pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder +pkg_nodefinder_fetch = git +pkg_nodefinder_repo = https://github.com/okeuday/nodefinder +pkg_nodefinder_commit = master + +PACKAGES += nprocreg +pkg_nprocreg_name = nprocreg +pkg_nprocreg_description = Minimal Distributed Erlang Process Registry +pkg_nprocreg_homepage = http://nitrogenproject.com/ +pkg_nprocreg_fetch = git +pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg +pkg_nprocreg_commit = master + +PACKAGES += oauth +pkg_oauth_name = oauth +pkg_oauth_description = An Erlang OAuth 1.0 implementation +pkg_oauth_homepage = https://github.com/tim/erlang-oauth +pkg_oauth_fetch = git +pkg_oauth_repo = https://github.com/tim/erlang-oauth +pkg_oauth_commit = master + +PACKAGES += oauth2 +pkg_oauth2_name = oauth2 +pkg_oauth2_description = Erlang Oauth2 implementation +pkg_oauth2_homepage = https://github.com/kivra/oauth2 +pkg_oauth2_fetch = git +pkg_oauth2_repo = https://github.com/kivra/oauth2 +pkg_oauth2_commit = master + +PACKAGES += oauth2c +pkg_oauth2c_name = oauth2c +pkg_oauth2c_description = Erlang OAuth2 Client +pkg_oauth2c_homepage = https://github.com/kivra/oauth2_client +pkg_oauth2c_fetch = git +pkg_oauth2c_repo = https://github.com/kivra/oauth2_client +pkg_oauth2c_commit = master + +PACKAGES += octopus +pkg_octopus_name = octopus +pkg_octopus_description = Small and flexible pool manager written in Erlang +pkg_octopus_homepage = https://github.com/erlangbureau/octopus +pkg_octopus_fetch = git +pkg_octopus_repo = https://github.com/erlangbureau/octopus +pkg_octopus_commit = 1.0.0 + +PACKAGES += of_protocol +pkg_of_protocol_name = of_protocol +pkg_of_protocol_description = OpenFlow Protocol Library for Erlang +pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_fetch = git +pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_commit = master + +PACKAGES += opencouch +pkg_opencouch_name = couch +pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB +pkg_opencouch_homepage = https://github.com/benoitc/opencouch +pkg_opencouch_fetch = git +pkg_opencouch_repo = https://github.com/benoitc/opencouch +pkg_opencouch_commit = master + +PACKAGES += openflow +pkg_openflow_name = openflow +pkg_openflow_description = An OpenFlow controller written in pure erlang +pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_fetch = git +pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_commit = master + +PACKAGES += openid +pkg_openid_name = openid +pkg_openid_description = Erlang OpenID +pkg_openid_homepage = https://github.com/brendonh/erl_openid +pkg_openid_fetch = git +pkg_openid_repo = https://github.com/brendonh/erl_openid +pkg_openid_commit = master + +PACKAGES += openpoker +pkg_openpoker_name = openpoker +pkg_openpoker_description = Genesis Texas hold'em Game Server +pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker +pkg_openpoker_fetch = git +pkg_openpoker_repo = https://github.com/hpyhacking/openpoker +pkg_openpoker_commit = master + +PACKAGES += pal +pkg_pal_name = pal +pkg_pal_description = Pragmatic Authentication Library +pkg_pal_homepage = https://github.com/manifest/pal +pkg_pal_fetch = git +pkg_pal_repo = https://github.com/manifest/pal +pkg_pal_commit = master + +PACKAGES += parse_trans +pkg_parse_trans_name = parse_trans +pkg_parse_trans_description = Parse transform utilities for Erlang +pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans +pkg_parse_trans_fetch = git +pkg_parse_trans_repo = https://github.com/uwiger/parse_trans +pkg_parse_trans_commit = master + +PACKAGES += parsexml +pkg_parsexml_name = parsexml +pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API +pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml +pkg_parsexml_fetch = git +pkg_parsexml_repo = https://github.com/maxlapshin/parsexml +pkg_parsexml_commit = master + +PACKAGES += pegjs +pkg_pegjs_name = pegjs +pkg_pegjs_description = An implementation of PEG.js grammar for Erlang. +pkg_pegjs_homepage = https://github.com/dmitriid/pegjs +pkg_pegjs_fetch = git +pkg_pegjs_repo = https://github.com/dmitriid/pegjs +pkg_pegjs_commit = 0.3 + +PACKAGES += percept2 +pkg_percept2_name = percept2 +pkg_percept2_description = Concurrent profiling tool for Erlang +pkg_percept2_homepage = https://github.com/huiqing/percept2 +pkg_percept2_fetch = git +pkg_percept2_repo = https://github.com/huiqing/percept2 +pkg_percept2_commit = master + +PACKAGES += pgsql +pkg_pgsql_name = pgsql +pkg_pgsql_description = Erlang PostgreSQL driver +pkg_pgsql_homepage = https://github.com/semiocast/pgsql +pkg_pgsql_fetch = git +pkg_pgsql_repo = https://github.com/semiocast/pgsql +pkg_pgsql_commit = master + +PACKAGES += pkgx +pkg_pkgx_name = pkgx +pkg_pkgx_description = Build .deb packages from Erlang releases +pkg_pkgx_homepage = https://github.com/arjan/pkgx +pkg_pkgx_fetch = git +pkg_pkgx_repo = https://github.com/arjan/pkgx +pkg_pkgx_commit = master + +PACKAGES += pkt +pkg_pkt_name = pkt +pkg_pkt_description = Erlang network protocol library +pkg_pkt_homepage = https://github.com/msantos/pkt +pkg_pkt_fetch = git +pkg_pkt_repo = https://github.com/msantos/pkt +pkg_pkt_commit = master + +PACKAGES += plain_fsm +pkg_plain_fsm_name = plain_fsm +pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs. +pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_fetch = git +pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_commit = master + +PACKAGES += plumtree +pkg_plumtree_name = plumtree +pkg_plumtree_description = Epidemic Broadcast Trees +pkg_plumtree_homepage = https://github.com/helium/plumtree +pkg_plumtree_fetch = git +pkg_plumtree_repo = https://github.com/helium/plumtree +pkg_plumtree_commit = master + +PACKAGES += pmod_transform +pkg_pmod_transform_name = pmod_transform +pkg_pmod_transform_description = Parse transform for parameterized modules +pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform +pkg_pmod_transform_fetch = git +pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform +pkg_pmod_transform_commit = master + +PACKAGES += pobox +pkg_pobox_name = pobox +pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang +pkg_pobox_homepage = https://github.com/ferd/pobox +pkg_pobox_fetch = git +pkg_pobox_repo = https://github.com/ferd/pobox +pkg_pobox_commit = master + +PACKAGES += ponos +pkg_ponos_name = ponos +pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang +pkg_ponos_homepage = https://github.com/klarna/ponos +pkg_ponos_fetch = git +pkg_ponos_repo = https://github.com/klarna/ponos +pkg_ponos_commit = master + +PACKAGES += poolboy +pkg_poolboy_name = poolboy +pkg_poolboy_description = A hunky Erlang worker pool factory +pkg_poolboy_homepage = https://github.com/devinus/poolboy +pkg_poolboy_fetch = git +pkg_poolboy_repo = https://github.com/devinus/poolboy +pkg_poolboy_commit = master + +PACKAGES += pooler +pkg_pooler_name = pooler +pkg_pooler_description = An OTP Process Pool Application +pkg_pooler_homepage = https://github.com/seth/pooler +pkg_pooler_fetch = git +pkg_pooler_repo = https://github.com/seth/pooler +pkg_pooler_commit = master + +PACKAGES += pqueue +pkg_pqueue_name = pqueue +pkg_pqueue_description = Erlang Priority Queues +pkg_pqueue_homepage = https://github.com/okeuday/pqueue +pkg_pqueue_fetch = git +pkg_pqueue_repo = https://github.com/okeuday/pqueue +pkg_pqueue_commit = master + +PACKAGES += procket +pkg_procket_name = procket +pkg_procket_description = Erlang interface to low level socket operations +pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket +pkg_procket_fetch = git +pkg_procket_repo = https://github.com/msantos/procket +pkg_procket_commit = master + +PACKAGES += prop +pkg_prop_name = prop +pkg_prop_description = An Erlang code scaffolding and generator system. +pkg_prop_homepage = https://github.com/nuex/prop +pkg_prop_fetch = git +pkg_prop_repo = https://github.com/nuex/prop +pkg_prop_commit = master + +PACKAGES += proper +pkg_proper_name = proper +pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang. +pkg_proper_homepage = http://proper.softlab.ntua.gr +pkg_proper_fetch = git +pkg_proper_repo = https://github.com/manopapad/proper +pkg_proper_commit = master + +PACKAGES += props +pkg_props_name = props +pkg_props_description = Property structure library +pkg_props_homepage = https://github.com/greyarea/props +pkg_props_fetch = git +pkg_props_repo = https://github.com/greyarea/props +pkg_props_commit = master + +PACKAGES += protobuffs +pkg_protobuffs_name = protobuffs +pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs. +pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_fetch = git +pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_commit = master + +PACKAGES += psycho +pkg_psycho_name = psycho +pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware. +pkg_psycho_homepage = https://github.com/gar1t/psycho +pkg_psycho_fetch = git +pkg_psycho_repo = https://github.com/gar1t/psycho +pkg_psycho_commit = master + +PACKAGES += purity +pkg_purity_name = purity +pkg_purity_description = A side-effect analyzer for Erlang +pkg_purity_homepage = https://github.com/mpitid/purity +pkg_purity_fetch = git +pkg_purity_repo = https://github.com/mpitid/purity +pkg_purity_commit = master + +PACKAGES += push_service +pkg_push_service_name = push_service +pkg_push_service_description = Push service +pkg_push_service_homepage = https://github.com/hairyhum/push_service +pkg_push_service_fetch = git +pkg_push_service_repo = https://github.com/hairyhum/push_service +pkg_push_service_commit = master + +PACKAGES += qdate +pkg_qdate_name = qdate +pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang. +pkg_qdate_homepage = https://github.com/choptastic/qdate +pkg_qdate_fetch = git +pkg_qdate_repo = https://github.com/choptastic/qdate +pkg_qdate_commit = 0.4.0 + +PACKAGES += qrcode +pkg_qrcode_name = qrcode +pkg_qrcode_description = QR Code encoder in Erlang +pkg_qrcode_homepage = https://github.com/komone/qrcode +pkg_qrcode_fetch = git +pkg_qrcode_repo = https://github.com/komone/qrcode +pkg_qrcode_commit = master + +PACKAGES += quest +pkg_quest_name = quest +pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang. +pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest +pkg_quest_fetch = git +pkg_quest_repo = https://github.com/eriksoe/ErlangQuest +pkg_quest_commit = master + +PACKAGES += quickrand +pkg_quickrand_name = quickrand +pkg_quickrand_description = Quick Erlang Random Number Generation +pkg_quickrand_homepage = https://github.com/okeuday/quickrand +pkg_quickrand_fetch = git +pkg_quickrand_repo = https://github.com/okeuday/quickrand +pkg_quickrand_commit = master + +PACKAGES += rabbit +pkg_rabbit_name = rabbit +pkg_rabbit_description = RabbitMQ Server +pkg_rabbit_homepage = https://www.rabbitmq.com/ +pkg_rabbit_fetch = git +pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git +pkg_rabbit_commit = master + +PACKAGES += rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak +pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_fetch = git +pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_commit = master + +PACKAGES += rack +pkg_rack_name = rack +pkg_rack_description = Rack handler for erlang +pkg_rack_homepage = https://github.com/erlyvideo/rack +pkg_rack_fetch = git +pkg_rack_repo = https://github.com/erlyvideo/rack +pkg_rack_commit = master + +PACKAGES += radierl +pkg_radierl_name = radierl +pkg_radierl_description = RADIUS protocol stack implemented in Erlang. +pkg_radierl_homepage = https://github.com/vances/radierl +pkg_radierl_fetch = git +pkg_radierl_repo = https://github.com/vances/radierl +pkg_radierl_commit = master + +PACKAGES += rafter +pkg_rafter_name = rafter +pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol +pkg_rafter_homepage = https://github.com/andrewjstone/rafter +pkg_rafter_fetch = git +pkg_rafter_repo = https://github.com/andrewjstone/rafter +pkg_rafter_commit = master + +PACKAGES += ranch +pkg_ranch_name = ranch +pkg_ranch_description = Socket acceptor pool for TCP protocols. +pkg_ranch_homepage = http://ninenines.eu +pkg_ranch_fetch = git +pkg_ranch_repo = https://github.com/ninenines/ranch +pkg_ranch_commit = 1.1.0 + +PACKAGES += rbeacon +pkg_rbeacon_name = rbeacon +pkg_rbeacon_description = LAN discovery and presence in Erlang. +pkg_rbeacon_homepage = https://github.com/refuge/rbeacon +pkg_rbeacon_fetch = git +pkg_rbeacon_repo = https://github.com/refuge/rbeacon +pkg_rbeacon_commit = master + +PACKAGES += rebar +pkg_rebar_name = rebar +pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases. +pkg_rebar_homepage = http://www.rebar3.org +pkg_rebar_fetch = git +pkg_rebar_repo = https://github.com/rebar/rebar3 +pkg_rebar_commit = master + +PACKAGES += rebus +pkg_rebus_name = rebus +pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang. +pkg_rebus_homepage = https://github.com/olle/rebus +pkg_rebus_fetch = git +pkg_rebus_repo = https://github.com/olle/rebus +pkg_rebus_commit = master + +PACKAGES += rec2json +pkg_rec2json_name = rec2json +pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily. +pkg_rec2json_homepage = https://github.com/lordnull/rec2json +pkg_rec2json_fetch = git +pkg_rec2json_repo = https://github.com/lordnull/rec2json +pkg_rec2json_commit = master + +PACKAGES += recon +pkg_recon_name = recon +pkg_recon_description = Collection of functions and scripts to debug Erlang in production. +pkg_recon_homepage = https://github.com/ferd/recon +pkg_recon_fetch = git +pkg_recon_repo = https://github.com/ferd/recon +pkg_recon_commit = 2.2.1 + +PACKAGES += record_info +pkg_record_info_name = record_info +pkg_record_info_description = Convert between record and proplist +pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info +pkg_record_info_fetch = git +pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info +pkg_record_info_commit = master + +PACKAGES += redgrid +pkg_redgrid_name = redgrid +pkg_redgrid_description = automatic Erlang node discovery via redis +pkg_redgrid_homepage = https://github.com/jkvor/redgrid +pkg_redgrid_fetch = git +pkg_redgrid_repo = https://github.com/jkvor/redgrid +pkg_redgrid_commit = master + +PACKAGES += redo +pkg_redo_name = redo +pkg_redo_description = pipelined erlang redis client +pkg_redo_homepage = https://github.com/jkvor/redo +pkg_redo_fetch = git +pkg_redo_repo = https://github.com/jkvor/redo +pkg_redo_commit = master + +PACKAGES += reload_mk +pkg_reload_mk_name = reload_mk +pkg_reload_mk_description = Live reload plugin for erlang.mk. +pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk +pkg_reload_mk_fetch = git +pkg_reload_mk_repo = https://github.com/bullno1/reload.mk +pkg_reload_mk_commit = master + +PACKAGES += reltool_util +pkg_reltool_util_name = reltool_util +pkg_reltool_util_description = Erlang reltool utility functionality application +pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util +pkg_reltool_util_fetch = git +pkg_reltool_util_repo = https://github.com/okeuday/reltool_util +pkg_reltool_util_commit = master + +PACKAGES += relx +pkg_relx_name = relx +pkg_relx_description = Sane, simple release creation for Erlang +pkg_relx_homepage = https://github.com/erlware/relx +pkg_relx_fetch = git +pkg_relx_repo = https://github.com/erlware/relx +pkg_relx_commit = master + +PACKAGES += resource_discovery +pkg_resource_discovery_name = resource_discovery +pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster. +pkg_resource_discovery_homepage = http://erlware.org/ +pkg_resource_discovery_fetch = git +pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery +pkg_resource_discovery_commit = master + +PACKAGES += restc +pkg_restc_name = restc +pkg_restc_description = Erlang Rest Client +pkg_restc_homepage = https://github.com/kivra/restclient +pkg_restc_fetch = git +pkg_restc_repo = https://github.com/kivra/restclient +pkg_restc_commit = master + +PACKAGES += rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation. +pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_fetch = git +pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_commit = master + +PACKAGES += riak_control +pkg_riak_control_name = riak_control +pkg_riak_control_description = Webmachine-based administration interface for Riak. +pkg_riak_control_homepage = https://github.com/basho/riak_control +pkg_riak_control_fetch = git +pkg_riak_control_repo = https://github.com/basho/riak_control +pkg_riak_control_commit = master + +PACKAGES += riak_core +pkg_riak_core_name = riak_core +pkg_riak_core_description = Distributed systems infrastructure used by Riak. +pkg_riak_core_homepage = https://github.com/basho/riak_core +pkg_riak_core_fetch = git +pkg_riak_core_repo = https://github.com/basho/riak_core +pkg_riak_core_commit = master + +PACKAGES += riak_dt +pkg_riak_dt_name = riak_dt +pkg_riak_dt_description = Convergent replicated datatypes in Erlang +pkg_riak_dt_homepage = https://github.com/basho/riak_dt +pkg_riak_dt_fetch = git +pkg_riak_dt_repo = https://github.com/basho/riak_dt +pkg_riak_dt_commit = master + +PACKAGES += riak_ensemble +pkg_riak_ensemble_name = riak_ensemble +pkg_riak_ensemble_description = Multi-Paxos framework in Erlang +pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_fetch = git +pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_commit = master + +PACKAGES += riak_kv +pkg_riak_kv_name = riak_kv +pkg_riak_kv_description = Riak Key/Value Store +pkg_riak_kv_homepage = https://github.com/basho/riak_kv +pkg_riak_kv_fetch = git +pkg_riak_kv_repo = https://github.com/basho/riak_kv +pkg_riak_kv_commit = master + +PACKAGES += riak_pg +pkg_riak_pg_name = riak_pg +pkg_riak_pg_description = Distributed process groups with riak_core. +pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_fetch = git +pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_commit = master + +PACKAGES += riak_pipe +pkg_riak_pipe_name = riak_pipe +pkg_riak_pipe_description = Riak Pipelines +pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe +pkg_riak_pipe_fetch = git +pkg_riak_pipe_repo = https://github.com/basho/riak_pipe +pkg_riak_pipe_commit = master + +PACKAGES += riak_sysmon +pkg_riak_sysmon_name = riak_sysmon +pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages +pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_fetch = git +pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_commit = master + +PACKAGES += riak_test +pkg_riak_test_name = riak_test +pkg_riak_test_description = I'm in your cluster, testing your riaks +pkg_riak_test_homepage = https://github.com/basho/riak_test +pkg_riak_test_fetch = git +pkg_riak_test_repo = https://github.com/basho/riak_test +pkg_riak_test_commit = master + +PACKAGES += riakc +pkg_riakc_name = riakc +pkg_riakc_description = Erlang clients for Riak. +pkg_riakc_homepage = https://github.com/basho/riak-erlang-client +pkg_riakc_fetch = git +pkg_riakc_repo = https://github.com/basho/riak-erlang-client +pkg_riakc_commit = master + +PACKAGES += riakhttpc +pkg_riakhttpc_name = riakhttpc +pkg_riakhttpc_description = Riak Erlang client using the HTTP interface +pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_fetch = git +pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_commit = master + +PACKAGES += riaknostic +pkg_riaknostic_name = riaknostic +pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap +pkg_riaknostic_homepage = https://github.com/basho/riaknostic +pkg_riaknostic_fetch = git +pkg_riaknostic_repo = https://github.com/basho/riaknostic +pkg_riaknostic_commit = master + +PACKAGES += riakpool +pkg_riakpool_name = riakpool +pkg_riakpool_description = erlang riak client pool +pkg_riakpool_homepage = https://github.com/dweldon/riakpool +pkg_riakpool_fetch = git +pkg_riakpool_repo = https://github.com/dweldon/riakpool +pkg_riakpool_commit = master + +PACKAGES += rivus_cep +pkg_rivus_cep_name = rivus_cep +pkg_rivus_cep_description = Complex event processing in Erlang +pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_fetch = git +pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_commit = master + +PACKAGES += rlimit +pkg_rlimit_name = rlimit +pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent +pkg_rlimit_homepage = https://github.com/jlouis/rlimit +pkg_rlimit_fetch = git +pkg_rlimit_repo = https://github.com/jlouis/rlimit +pkg_rlimit_commit = master + +PACKAGES += safetyvalve +pkg_safetyvalve_name = safetyvalve +pkg_safetyvalve_description = A safety valve for your erlang node +pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_fetch = git +pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_commit = master + +PACKAGES += seestar +pkg_seestar_name = seestar +pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol +pkg_seestar_homepage = https://github.com/iamaleksey/seestar +pkg_seestar_fetch = git +pkg_seestar_repo = https://github.com/iamaleksey/seestar +pkg_seestar_commit = master + +PACKAGES += service +pkg_service_name = service +pkg_service_description = A minimal Erlang behavior for creating CloudI internal services +pkg_service_homepage = http://cloudi.org/ +pkg_service_fetch = git +pkg_service_repo = https://github.com/CloudI/service +pkg_service_commit = master + +PACKAGES += setup +pkg_setup_name = setup +pkg_setup_description = Generic setup utility for Erlang-based systems +pkg_setup_homepage = https://github.com/uwiger/setup +pkg_setup_fetch = git +pkg_setup_repo = https://github.com/uwiger/setup +pkg_setup_commit = master + +PACKAGES += sext +pkg_sext_name = sext +pkg_sext_description = Sortable Erlang Term Serialization +pkg_sext_homepage = https://github.com/uwiger/sext +pkg_sext_fetch = git +pkg_sext_repo = https://github.com/uwiger/sext +pkg_sext_commit = master + +PACKAGES += sfmt +pkg_sfmt_name = sfmt +pkg_sfmt_description = SFMT pseudo random number generator for Erlang. +pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_fetch = git +pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_commit = master + +PACKAGES += sgte +pkg_sgte_name = sgte +pkg_sgte_description = A simple Erlang Template Engine +pkg_sgte_homepage = https://github.com/filippo/sgte +pkg_sgte_fetch = git +pkg_sgte_repo = https://github.com/filippo/sgte +pkg_sgte_commit = master + +PACKAGES += sheriff +pkg_sheriff_name = sheriff +pkg_sheriff_description = Parse transform for type based validation. +pkg_sheriff_homepage = http://ninenines.eu +pkg_sheriff_fetch = git +pkg_sheriff_repo = https://github.com/extend/sheriff +pkg_sheriff_commit = master + +PACKAGES += shotgun +pkg_shotgun_name = shotgun +pkg_shotgun_description = better than just a gun +pkg_shotgun_homepage = https://github.com/inaka/shotgun +pkg_shotgun_fetch = git +pkg_shotgun_repo = https://github.com/inaka/shotgun +pkg_shotgun_commit = 0.1.0 + +PACKAGES += sidejob +pkg_sidejob_name = sidejob +pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang +pkg_sidejob_homepage = https://github.com/basho/sidejob +pkg_sidejob_fetch = git +pkg_sidejob_repo = https://github.com/basho/sidejob +pkg_sidejob_commit = master + +PACKAGES += sieve +pkg_sieve_name = sieve +pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang +pkg_sieve_homepage = https://github.com/benoitc/sieve +pkg_sieve_fetch = git +pkg_sieve_repo = https://github.com/benoitc/sieve +pkg_sieve_commit = master + +PACKAGES += sighandler +pkg_sighandler_name = sighandler +pkg_sighandler_description = Handle UNIX signals in Er lang +pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler +pkg_sighandler_fetch = git +pkg_sighandler_repo = https://github.com/jkingsbery/sighandler +pkg_sighandler_commit = master + +PACKAGES += simhash +pkg_simhash_name = simhash +pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data. +pkg_simhash_homepage = https://github.com/ferd/simhash +pkg_simhash_fetch = git +pkg_simhash_repo = https://github.com/ferd/simhash +pkg_simhash_commit = master + +PACKAGES += simple_bridge +pkg_simple_bridge_name = simple_bridge +pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers. +pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_fetch = git +pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_commit = master + +PACKAGES += simple_oauth2 +pkg_simple_oauth2_name = simple_oauth2 +pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured) +pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_fetch = git +pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_commit = master + +PACKAGES += skel +pkg_skel_name = skel +pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang +pkg_skel_homepage = https://github.com/ParaPhrase/skel +pkg_skel_fetch = git +pkg_skel_repo = https://github.com/ParaPhrase/skel +pkg_skel_commit = master + +PACKAGES += smother +pkg_smother_name = smother +pkg_smother_description = Extended code coverage metrics for Erlang. +pkg_smother_homepage = https://ramsay-t.github.io/Smother/ +pkg_smother_fetch = git +pkg_smother_repo = https://github.com/ramsay-t/Smother +pkg_smother_commit = master + +PACKAGES += social +pkg_social_name = social +pkg_social_description = Cowboy handler for social login via OAuth2 providers +pkg_social_homepage = https://github.com/dvv/social +pkg_social_fetch = git +pkg_social_repo = https://github.com/dvv/social +pkg_social_commit = master + +PACKAGES += spapi_router +pkg_spapi_router_name = spapi_router +pkg_spapi_router_description = Partially-connected Erlang clustering +pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router +pkg_spapi_router_fetch = git +pkg_spapi_router_repo = https://github.com/spilgames/spapi-router +pkg_spapi_router_commit = master + +PACKAGES += sqerl +pkg_sqerl_name = sqerl +pkg_sqerl_description = An Erlang-flavoured SQL DSL +pkg_sqerl_homepage = https://github.com/hairyhum/sqerl +pkg_sqerl_fetch = git +pkg_sqerl_repo = https://github.com/hairyhum/sqerl +pkg_sqerl_commit = master + +PACKAGES += srly +pkg_srly_name = srly +pkg_srly_description = Native Erlang Unix serial interface +pkg_srly_homepage = https://github.com/msantos/srly +pkg_srly_fetch = git +pkg_srly_repo = https://github.com/msantos/srly +pkg_srly_commit = master + +PACKAGES += sshrpc +pkg_sshrpc_name = sshrpc +pkg_sshrpc_description = Erlang SSH RPC module (experimental) +pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_fetch = git +pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_commit = master + +PACKAGES += stable +pkg_stable_name = stable +pkg_stable_description = Library of assorted helpers for Cowboy web server. +pkg_stable_homepage = https://github.com/dvv/stable +pkg_stable_fetch = git +pkg_stable_repo = https://github.com/dvv/stable +pkg_stable_commit = master + +PACKAGES += statebox +pkg_statebox_name = statebox +pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak. +pkg_statebox_homepage = https://github.com/mochi/statebox +pkg_statebox_fetch = git +pkg_statebox_repo = https://github.com/mochi/statebox +pkg_statebox_commit = master + +PACKAGES += statebox_riak +pkg_statebox_riak_name = statebox_riak +pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media. +pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak +pkg_statebox_riak_fetch = git +pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak +pkg_statebox_riak_commit = master + +PACKAGES += statman +pkg_statman_name = statman +pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM +pkg_statman_homepage = https://github.com/knutin/statman +pkg_statman_fetch = git +pkg_statman_repo = https://github.com/knutin/statman +pkg_statman_commit = master + +PACKAGES += statsderl +pkg_statsderl_name = statsderl +pkg_statsderl_description = StatsD client (erlang) +pkg_statsderl_homepage = https://github.com/lpgauth/statsderl +pkg_statsderl_fetch = git +pkg_statsderl_repo = https://github.com/lpgauth/statsderl +pkg_statsderl_commit = master + +PACKAGES += stdinout_pool +pkg_stdinout_pool_name = stdinout_pool +pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication. +pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_fetch = git +pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_commit = master + +PACKAGES += stockdb +pkg_stockdb_name = stockdb +pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang +pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb +pkg_stockdb_fetch = git +pkg_stockdb_repo = https://github.com/maxlapshin/stockdb +pkg_stockdb_commit = master + +PACKAGES += stripe +pkg_stripe_name = stripe +pkg_stripe_description = Erlang interface to the stripe.com API +pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang +pkg_stripe_fetch = git +pkg_stripe_repo = https://github.com/mattsta/stripe-erlang +pkg_stripe_commit = v1 + +PACKAGES += surrogate +pkg_surrogate_name = surrogate +pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes. +pkg_surrogate_homepage = https://github.com/skruger/Surrogate +pkg_surrogate_fetch = git +pkg_surrogate_repo = https://github.com/skruger/Surrogate +pkg_surrogate_commit = master + +PACKAGES += swab +pkg_swab_name = swab +pkg_swab_description = General purpose buffer handling module +pkg_swab_homepage = https://github.com/crownedgrouse/swab +pkg_swab_fetch = git +pkg_swab_repo = https://github.com/crownedgrouse/swab +pkg_swab_commit = master + +PACKAGES += swarm +pkg_swarm_name = swarm +pkg_swarm_description = Fast and simple acceptor pool for Erlang +pkg_swarm_homepage = https://github.com/jeremey/swarm +pkg_swarm_fetch = git +pkg_swarm_repo = https://github.com/jeremey/swarm +pkg_swarm_commit = master + +PACKAGES += switchboard +pkg_switchboard_name = switchboard +pkg_switchboard_description = A framework for processing email using worker plugins. +pkg_switchboard_homepage = https://github.com/thusfresh/switchboard +pkg_switchboard_fetch = git +pkg_switchboard_repo = https://github.com/thusfresh/switchboard +pkg_switchboard_commit = master + +PACKAGES += syn +pkg_syn_name = syn +pkg_syn_description = A global process registry for Erlang. +pkg_syn_homepage = https://github.com/ostinelli/syn +pkg_syn_fetch = git +pkg_syn_repo = https://github.com/ostinelli/syn +pkg_syn_commit = master + +PACKAGES += sync +pkg_sync_name = sync +pkg_sync_description = On-the-fly recompiling and reloading in Erlang. +pkg_sync_homepage = https://github.com/rustyio/sync +pkg_sync_fetch = git +pkg_sync_repo = https://github.com/rustyio/sync +pkg_sync_commit = master + +PACKAGES += syntaxerl +pkg_syntaxerl_name = syntaxerl +pkg_syntaxerl_description = Syntax checker for Erlang +pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_fetch = git +pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_commit = master + +PACKAGES += syslog +pkg_syslog_name = syslog +pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3) +pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog +pkg_syslog_fetch = git +pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog +pkg_syslog_commit = master + +PACKAGES += taskforce +pkg_taskforce_name = taskforce +pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks. +pkg_taskforce_homepage = https://github.com/g-andrade/taskforce +pkg_taskforce_fetch = git +pkg_taskforce_repo = https://github.com/g-andrade/taskforce +pkg_taskforce_commit = master + +PACKAGES += tddreloader +pkg_tddreloader_name = tddreloader +pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes +pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader +pkg_tddreloader_fetch = git +pkg_tddreloader_repo = https://github.com/version2beta/tddreloader +pkg_tddreloader_commit = master + +PACKAGES += tempo +pkg_tempo_name = tempo +pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang. +pkg_tempo_homepage = https://github.com/selectel/tempo +pkg_tempo_fetch = git +pkg_tempo_repo = https://github.com/selectel/tempo +pkg_tempo_commit = master + +PACKAGES += ticktick +pkg_ticktick_name = ticktick +pkg_ticktick_description = Ticktick is an id generator for message service. +pkg_ticktick_homepage = https://github.com/ericliang/ticktick +pkg_ticktick_fetch = git +pkg_ticktick_repo = https://github.com/ericliang/ticktick +pkg_ticktick_commit = master + +PACKAGES += tinymq +pkg_tinymq_name = tinymq +pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue +pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_fetch = git +pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_commit = master + +PACKAGES += tinymt +pkg_tinymt_name = tinymt +pkg_tinymt_description = TinyMT pseudo random number generator for Erlang. +pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_fetch = git +pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_commit = master + +PACKAGES += tirerl +pkg_tirerl_name = tirerl +pkg_tirerl_description = Erlang interface to Elastic Search +pkg_tirerl_homepage = https://github.com/inaka/tirerl +pkg_tirerl_fetch = git +pkg_tirerl_repo = https://github.com/inaka/tirerl +pkg_tirerl_commit = master + +PACKAGES += traffic_tools +pkg_traffic_tools_name = traffic_tools +pkg_traffic_tools_description = Simple traffic limiting library +pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools +pkg_traffic_tools_fetch = git +pkg_traffic_tools_repo = https://github.com/systra/traffic_tools +pkg_traffic_tools_commit = master + +PACKAGES += trails +pkg_trails_name = trails +pkg_trails_description = A couple of improvements over Cowboy Routes +pkg_trails_homepage = http://inaka.github.io/cowboy-trails/ +pkg_trails_fetch = git +pkg_trails_repo = https://github.com/inaka/cowboy-trails +pkg_trails_commit = master + +PACKAGES += trane +pkg_trane_name = trane +pkg_trane_description = SAX style broken HTML parser in Erlang +pkg_trane_homepage = https://github.com/massemanet/trane +pkg_trane_fetch = git +pkg_trane_repo = https://github.com/massemanet/trane +pkg_trane_commit = master + +PACKAGES += transit +pkg_transit_name = transit +pkg_transit_description = transit format for erlang +pkg_transit_homepage = https://github.com/isaiah/transit-erlang +pkg_transit_fetch = git +pkg_transit_repo = https://github.com/isaiah/transit-erlang +pkg_transit_commit = master + +PACKAGES += trie +pkg_trie_name = trie +pkg_trie_description = Erlang Trie Implementation +pkg_trie_homepage = https://github.com/okeuday/trie +pkg_trie_fetch = git +pkg_trie_repo = https://github.com/okeuday/trie +pkg_trie_commit = master + +PACKAGES += triq +pkg_triq_name = triq +pkg_triq_description = Trifork QuickCheck +pkg_triq_homepage = https://github.com/krestenkrab/triq +pkg_triq_fetch = git +pkg_triq_repo = https://github.com/krestenkrab/triq +pkg_triq_commit = master + +PACKAGES += tunctl +pkg_tunctl_name = tunctl +pkg_tunctl_description = Erlang TUN/TAP interface +pkg_tunctl_homepage = https://github.com/msantos/tunctl +pkg_tunctl_fetch = git +pkg_tunctl_repo = https://github.com/msantos/tunctl +pkg_tunctl_commit = master + +PACKAGES += twerl +pkg_twerl_name = twerl +pkg_twerl_description = Erlang client for the Twitter Streaming API +pkg_twerl_homepage = https://github.com/lucaspiller/twerl +pkg_twerl_fetch = git +pkg_twerl_repo = https://github.com/lucaspiller/twerl +pkg_twerl_commit = oauth + +PACKAGES += twitter_erlang +pkg_twitter_erlang_name = twitter_erlang +pkg_twitter_erlang_description = An Erlang twitter client +pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_fetch = git +pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_commit = master + +PACKAGES += ucol_nif +pkg_ucol_nif_name = ucol_nif +pkg_ucol_nif_description = ICU based collation Erlang module +pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif +pkg_ucol_nif_fetch = git +pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif +pkg_ucol_nif_commit = master + +PACKAGES += unicorn +pkg_unicorn_name = unicorn +pkg_unicorn_description = Generic configuration server +pkg_unicorn_homepage = https://github.com/shizzard/unicorn +pkg_unicorn_fetch = git +pkg_unicorn_repo = https://github.com/shizzard/unicorn +pkg_unicorn_commit = 0.3.0 + +PACKAGES += unsplit +pkg_unsplit_name = unsplit +pkg_unsplit_description = Resolves conflicts in Mnesia after network splits +pkg_unsplit_homepage = https://github.com/uwiger/unsplit +pkg_unsplit_fetch = git +pkg_unsplit_repo = https://github.com/uwiger/unsplit +pkg_unsplit_commit = master + +PACKAGES += uuid +pkg_uuid_name = uuid +pkg_uuid_description = Erlang UUID Implementation +pkg_uuid_homepage = https://github.com/okeuday/uuid +pkg_uuid_fetch = git +pkg_uuid_repo = https://github.com/okeuday/uuid +pkg_uuid_commit = v1.4.0 + +PACKAGES += ux +pkg_ux_name = ux +pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation) +pkg_ux_homepage = https://github.com/erlang-unicode/ux +pkg_ux_fetch = git +pkg_ux_repo = https://github.com/erlang-unicode/ux +pkg_ux_commit = master + +PACKAGES += vert +pkg_vert_name = vert +pkg_vert_description = Erlang binding to libvirt virtualization API +pkg_vert_homepage = https://github.com/msantos/erlang-libvirt +pkg_vert_fetch = git +pkg_vert_repo = https://github.com/msantos/erlang-libvirt +pkg_vert_commit = master + +PACKAGES += verx +pkg_verx_name = verx +pkg_verx_description = Erlang implementation of the libvirtd remote protocol +pkg_verx_homepage = https://github.com/msantos/verx +pkg_verx_fetch = git +pkg_verx_repo = https://github.com/msantos/verx +pkg_verx_commit = master + +PACKAGES += vmq_acl +pkg_vmq_acl_name = vmq_acl +pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_acl_homepage = https://verne.mq/ +pkg_vmq_acl_fetch = git +pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl +pkg_vmq_acl_commit = master + +PACKAGES += vmq_bridge +pkg_vmq_bridge_name = vmq_bridge +pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_bridge_homepage = https://verne.mq/ +pkg_vmq_bridge_fetch = git +pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge +pkg_vmq_bridge_commit = master + +PACKAGES += vmq_graphite +pkg_vmq_graphite_name = vmq_graphite +pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_graphite_homepage = https://verne.mq/ +pkg_vmq_graphite_fetch = git +pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite +pkg_vmq_graphite_commit = master + +PACKAGES += vmq_passwd +pkg_vmq_passwd_name = vmq_passwd +pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_passwd_homepage = https://verne.mq/ +pkg_vmq_passwd_fetch = git +pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd +pkg_vmq_passwd_commit = master + +PACKAGES += vmq_server +pkg_vmq_server_name = vmq_server +pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_server_homepage = https://verne.mq/ +pkg_vmq_server_fetch = git +pkg_vmq_server_repo = https://github.com/erlio/vmq_server +pkg_vmq_server_commit = master + +PACKAGES += vmq_snmp +pkg_vmq_snmp_name = vmq_snmp +pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_snmp_homepage = https://verne.mq/ +pkg_vmq_snmp_fetch = git +pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp +pkg_vmq_snmp_commit = master + +PACKAGES += vmq_systree +pkg_vmq_systree_name = vmq_systree +pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_systree_homepage = https://verne.mq/ +pkg_vmq_systree_fetch = git +pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree +pkg_vmq_systree_commit = master + +PACKAGES += vmstats +pkg_vmstats_name = vmstats +pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs. +pkg_vmstats_homepage = https://github.com/ferd/vmstats +pkg_vmstats_fetch = git +pkg_vmstats_repo = https://github.com/ferd/vmstats +pkg_vmstats_commit = master + +PACKAGES += walrus +pkg_walrus_name = walrus +pkg_walrus_description = Walrus - Mustache-like Templating +pkg_walrus_homepage = https://github.com/devinus/walrus +pkg_walrus_fetch = git +pkg_walrus_repo = https://github.com/devinus/walrus +pkg_walrus_commit = master + +PACKAGES += webmachine +pkg_webmachine_name = webmachine +pkg_webmachine_description = A REST-based system for building web applications. +pkg_webmachine_homepage = https://github.com/basho/webmachine +pkg_webmachine_fetch = git +pkg_webmachine_repo = https://github.com/basho/webmachine +pkg_webmachine_commit = master + +PACKAGES += websocket_client +pkg_websocket_client_name = websocket_client +pkg_websocket_client_description = Erlang websocket client (ws and wss supported) +pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client +pkg_websocket_client_fetch = git +pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client +pkg_websocket_client_commit = master + +PACKAGES += worker_pool +pkg_worker_pool_name = worker_pool +pkg_worker_pool_description = a simple erlang worker pool +pkg_worker_pool_homepage = https://github.com/inaka/worker_pool +pkg_worker_pool_fetch = git +pkg_worker_pool_repo = https://github.com/inaka/worker_pool +pkg_worker_pool_commit = 1.0.3 + +PACKAGES += wrangler +pkg_wrangler_name = wrangler +pkg_wrangler_description = Import of the Wrangler svn repository. +pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html +pkg_wrangler_fetch = git +pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler +pkg_wrangler_commit = master + +PACKAGES += wsock +pkg_wsock_name = wsock +pkg_wsock_description = Erlang library to build WebSocket clients and servers +pkg_wsock_homepage = https://github.com/madtrick/wsock +pkg_wsock_fetch = git +pkg_wsock_repo = https://github.com/madtrick/wsock +pkg_wsock_commit = master + +PACKAGES += xhttpc +pkg_xhttpc_name = xhttpc +pkg_xhttpc_description = Extensible HTTP Client for Erlang +pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc +pkg_xhttpc_fetch = git +pkg_xhttpc_repo = https://github.com/seriyps/xhttpc +pkg_xhttpc_commit = master + +PACKAGES += xref_runner +pkg_xref_runner_name = xref_runner +pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref) +pkg_xref_runner_homepage = https://github.com/inaka/xref_runner +pkg_xref_runner_fetch = git +pkg_xref_runner_repo = https://github.com/inaka/xref_runner +pkg_xref_runner_commit = 0.2.0 + +PACKAGES += yamerl +pkg_yamerl_name = yamerl +pkg_yamerl_description = YAML 1.2 parser in pure Erlang +pkg_yamerl_homepage = https://github.com/yakaz/yamerl +pkg_yamerl_fetch = git +pkg_yamerl_repo = https://github.com/yakaz/yamerl +pkg_yamerl_commit = master + +PACKAGES += yamler +pkg_yamler_name = yamler +pkg_yamler_description = libyaml-based yaml loader for Erlang +pkg_yamler_homepage = https://github.com/goertzenator/yamler +pkg_yamler_fetch = git +pkg_yamler_repo = https://github.com/goertzenator/yamler +pkg_yamler_commit = master + +PACKAGES += yaws +pkg_yaws_name = yaws +pkg_yaws_description = Yaws webserver +pkg_yaws_homepage = http://yaws.hyber.org +pkg_yaws_fetch = git +pkg_yaws_repo = https://github.com/klacke/yaws +pkg_yaws_commit = master + +PACKAGES += zab_engine +pkg_zab_engine_name = zab_engine +pkg_zab_engine_description = zab propotocol implement by erlang +pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_fetch = git +pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_commit = master + +PACKAGES += zeta +pkg_zeta_name = zeta +pkg_zeta_description = HTTP access log parser in Erlang +pkg_zeta_homepage = https://github.com/s1n4/zeta +pkg_zeta_fetch = git +pkg_zeta_repo = https://github.com/s1n4/zeta +pkg_zeta_commit = + +PACKAGES += zippers +pkg_zippers_name = zippers +pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers +pkg_zippers_homepage = https://github.com/ferd/zippers +pkg_zippers_fetch = git +pkg_zippers_repo = https://github.com/ferd/zippers +pkg_zippers_commit = master + +PACKAGES += zlists +pkg_zlists_name = zlists +pkg_zlists_description = Erlang lazy lists library. +pkg_zlists_homepage = https://github.com/vjache/erlang-zlists +pkg_zlists_fetch = git +pkg_zlists_repo = https://github.com/vjache/erlang-zlists +pkg_zlists_commit = master + +PACKAGES += zraft_lib +pkg_zraft_lib_name = zraft_lib +pkg_zraft_lib_description = Erlang raft consensus protocol implementation +pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_fetch = git +pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_commit = master + +PACKAGES += zucchini +pkg_zucchini_name = zucchini +pkg_zucchini_description = An Erlang INI parser +pkg_zucchini_homepage = https://github.com/devinus/zucchini +pkg_zucchini_fetch = git +pkg_zucchini_repo = https://github.com/devinus/zucchini +pkg_zucchini_commit = master + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: search + +define pkg_print + $(verbose) printf "%s\n" \ + $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \ + "App name: $(pkg_$(1)_name)" \ + "Description: $(pkg_$(1)_description)" \ + "Home page: $(pkg_$(1)_homepage)" \ + "Fetch with: $(pkg_$(1)_fetch)" \ + "Repository: $(pkg_$(1)_repo)" \ + "Commit: $(pkg_$(1)_commit)" \ + "" + +endef + +search: +ifdef q + $(foreach p,$(PACKAGES), \ + $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \ + $(call pkg_print,$(p)))) +else + $(foreach p,$(PACKAGES),$(call pkg_print,$(p))) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-deps + +# Configuration. + +ifdef OTP_DEPS +$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.) +endif + +IGNORE_DEPS ?= +export IGNORE_DEPS + +APPS_DIR ?= $(CURDIR)/apps +export APPS_DIR + +DEPS_DIR ?= $(CURDIR)/deps +export DEPS_DIR + +REBAR_DEPS_DIR = $(DEPS_DIR) +export REBAR_DEPS_DIR + +dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) +dep_repo = $(patsubst git://github.com/%,https://github.com/%, \ + $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))) +dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) + +ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) +ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) + +ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),) +ifeq ($(ERL_LIBS),) + ERL_LIBS = $(APPS_DIR):$(DEPS_DIR) +else + ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR) +endif +endif +export ERL_LIBS + +export NO_AUTOPATCH + +# Verbosity. + +dep_verbose_0 = @echo " DEP " $(1); +dep_verbose_2 = set -x; +dep_verbose = $(dep_verbose_$(V)) + +# Core targets. + +ifneq ($(SKIP_DEPS),) +deps:: +else +deps:: $(ALL_DEPS_DIRS) +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \ + if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \ + :; \ + else \ + echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ + if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ + $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \ + else \ + echo "Error: No Makefile to build dependency $$dep."; \ + exit 2; \ + fi \ + fi \ + done +endif + +# Deps related targets. + +# @todo rename GNUmakefile and makefile into Makefile first, if they exist +# While Makefile file could be GNUmakefile or makefile, +# in practice only Makefile is needed so far. +define dep_autopatch + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \ + $(call dep_autopatch2,$(1)); \ + else \ + if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + $(call dep_autopatch_erlang_mk,$(1)); \ + else \ + $(call erlang,$(call dep_autopatch_app.erl,$(1))); \ + fi \ + fi \ + else \ + if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ + $(call dep_autopatch_noop,$(1)); \ + else \ + $(call dep_autopatch2,$(1)); \ + fi \ + fi +endef + +define dep_autopatch2 + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + if [ -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \ + $(call dep_autopatch_fetch_rebar); \ + $(call dep_autopatch_rebar,$(1)); \ + else \ + $(call dep_autopatch_gen,$(1)); \ + fi +endef + +define dep_autopatch_noop + printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile +endef + +# Overwrite erlang.mk with the current file by default. +ifeq ($(NO_AUTOPATCH_ERLANG_MK),) +define dep_autopatch_erlang_mk + echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \ + > $(DEPS_DIR)/$1/erlang.mk +endef +else +define dep_autopatch_erlang_mk + : +endef +endif + +define dep_autopatch_gen + printf "%s\n" \ + "ERLC_OPTS = +debug_info" \ + "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile +endef + +define dep_autopatch_fetch_rebar + mkdir -p $(ERLANG_MK_TMP); \ + if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ + git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \ + cd $(ERLANG_MK_TMP)/rebar; \ + git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \ + $(MAKE); \ + cd -; \ + fi +endef + +define dep_autopatch_rebar + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \ + fi; \ + $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \ + rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app +endef + +define dep_autopatch_rebar.erl + application:load(rebar), + application:set_env(rebar, log_level, debug), + Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of + {ok, Conf0} -> Conf0; + _ -> [] + end, + {Conf, OsEnv} = fun() -> + case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of + false -> {Conf1, []}; + true -> + Bindings0 = erl_eval:new_bindings(), + Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0), + Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1), + Before = os:getenv(), + {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings), + {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)} + end + end(), + Write = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append]) + end, + Escape = fun (Text) -> + re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}]) + end, + Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package " + "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"), + Write("C_SRC_DIR = /path/do/not/exist\n"), + Write("C_SRC_TYPE = rebar\n"), + Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"), + Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]), + fun() -> + Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), + case lists:keyfind(erl_opts, 1, Conf) of + false -> ok; + {_, ErlOpts} -> + lists:foreach(fun + ({d, D}) -> + Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + ({i, I}) -> + Write(["ERLC_OPTS += -I ", I, "\n"]); + ({platform_define, Regex, D}) -> + case rebar_utils:is_arch(Regex) of + true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + false -> ok + end; + ({parse_transform, PT}) -> + Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n"); + (_) -> ok + end, ErlOpts) + end, + Write("\n") + end(), + fun() -> + File = case lists:keyfind(deps, 1, Conf) of + false -> []; + {_, Deps} -> + [begin case case Dep of + {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}}; + {N, S} when is_tuple(S) -> {N, S}; + {N, _, S} -> {N, S}; + {N, _, S, _} -> {N, S}; + _ -> false + end of + false -> ok; + {Name, Source} -> + {Method, Repo, Commit} = case Source of + {hex, V} -> {hex, V, undefined}; + {git, R} -> {git, R, master}; + {M, R, {branch, C}} -> {M, R, C}; + {M, R, {ref, C}} -> {M, R, C}; + {M, R, {tag, C}} -> {M, R, C}; + {M, R, C} -> {M, R, C} + end, + Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit])) + end end || Dep <- Deps] + end + end(), + fun() -> + case lists:keyfind(erl_first_files, 1, Conf) of + false -> ok; + {_, Files} -> + Names = [[" ", case lists:reverse(F) of + "lre." ++ Elif -> lists:reverse(Elif); + Elif -> lists:reverse(Elif) + end] || "src/" ++ F <- Files], + Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) + end + end(), + FindFirst = fun(F, Fd) -> + case io:parse_erl_form(Fd, undefined) of + {ok, {attribute, _, compile, {parse_transform, PT}}, _} -> + [PT, F(F, Fd)]; + {ok, {attribute, _, compile, CompileOpts}, _} when is_list(CompileOpts) -> + case proplists:get_value(parse_transform, CompileOpts) of + undefined -> [F(F, Fd)]; + PT -> [PT, F(F, Fd)] + end; + {ok, {attribute, _, include, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end + end; + {ok, {attribute, _, include_lib, "$(1)/include/" ++ Hrl}, _} -> + {ok, HrlFd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]), + [F(F, HrlFd), F(F, Fd)]; + {ok, {attribute, _, include_lib, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {ok, {attribute, _, import, {Imp, _}}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(Imp) ++ ".erl", [read]) of + {ok, ImpFd} -> [Imp, F(F, ImpFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {eof, _} -> + file:close(Fd), + []; + _ -> + F(F, Fd) + end + end, + fun() -> + ErlFiles = filelib:wildcard("$(call core_native_path,$(DEPS_DIR)/$1/src/)*.erl"), + First0 = lists:usort(lists:flatten([begin + {ok, Fd} = file:open(F, [read]), + FindFirst(FindFirst, Fd) + end || F <- ErlFiles])), + First = lists:flatten([begin + {ok, Fd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", [read]), + FindFirst(FindFirst, Fd) + end || M <- First0, lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)]) ++ First0, + Write(["COMPILE_FIRST +=", [[" ", atom_to_list(M)] || M <- First, + lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)], "\n"]) + end(), + Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"), + Write("\npreprocess::\n"), + Write("\npre-deps::\n"), + Write("\npre-app::\n"), + PatchHook = fun(Cmd) -> + case Cmd of + "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + _ -> Escape(Cmd) + end + end, + fun() -> + case lists:keyfind(pre_hooks, 1, Conf) of + false -> ok; + {_, Hooks} -> + [case H of + {'get-deps', Cmd} -> + Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n"); + {compile, Cmd} -> + Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + {Regex, compile, Cmd} -> + case rebar_utils:is_arch(Regex) of + true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + false -> ok + end; + _ -> ok + end || H <- Hooks] + end + end(), + ShellToMk = fun(V) -> + re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]), + "-Werror\\\\b", "", [{return, list}, global]) + end, + PortSpecs = fun() -> + case lists:keyfind(port_specs, 1, Conf) of + false -> + case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of + false -> []; + true -> + [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"), + proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}] + end; + {_, Specs} -> + lists:flatten([case S of + {Output, Input} -> {ShellToMk(Output), Input, []}; + {Regex, Output, Input} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, []}; + false -> [] + end; + {Regex, Output, Input, [{env, Env}]} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, Env}; + false -> [] + end + end || S <- Specs]) + end + end(), + PortSpecWrite = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append]) + end, + case PortSpecs of + [] -> ok; + _ -> + Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"), + PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I ~s/erts-~s/include -I ~s\n", + [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])), + PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L ~s -lerl_interface -lei\n", + [code:lib_dir(erl_interface, lib)])), + [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv], + FilterEnv = fun(Env) -> + lists:flatten([case E of + {_, _} -> E; + {Regex, K, V} -> + case rebar_utils:is_arch(Regex) of + true -> {K, V}; + false -> [] + end + end || E <- Env]) + end, + MergeEnv = fun(Env) -> + lists:foldl(fun ({K, V}, Acc) -> + case lists:keyfind(K, 1, Acc) of + false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc]; + {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc] + end + end, [], Env) + end, + PortEnv = case lists:keyfind(port_env, 1, Conf) of + false -> []; + {_, PortEnv0} -> FilterEnv(PortEnv0) + end, + PortSpec = fun ({Output, Input0, Env}) -> + filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output), + Input = [[" ", I] || I <- Input0], + PortSpecWrite([ + [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))], + case $(PLATFORM) of + darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress"; + _ -> "" + end, + "\n\nall:: ", Output, "\n\n", + "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))], + Output, ": $$\(foreach ext,.c .C .cc .cpp,", + "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n", + "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)", + case filename:extension(Output) of + [] -> "\n"; + _ -> " -shared\n" + end]) + end, + [PortSpec(S) || S <- PortSpecs] + end, + Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"), + RunPlugin = fun(Plugin, Step) -> + case erlang:function_exported(Plugin, Step, 2) of + false -> ok; + true -> + c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"), + Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(), + dict:store(base_dir, "", dict:new())}, undefined), + io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret]) + end + end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins} -> + [begin + case lists:keyfind(deps, 1, Conf) of + false -> ok; + {_, Deps} -> + case lists:keyfind(P, 1, Deps) of + false -> ok; + _ -> + Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P), + io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]), + io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]), + code:add_patha(Path ++ "/ebin") + end + end + end || P <- Plugins], + [case code:load_file(P) of + {module, P} -> ok; + _ -> + case lists:keyfind(plugin_dir, 1, Conf) of + false -> ok; + {_, PluginsDir} -> + ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl", + {ok, P, Bin} = compile:file(ErlFile, [binary]), + {module, P} = code:load_binary(P, ErlFile, Bin) + end + end || P <- Plugins], + [RunPlugin(P, preprocess) || P <- Plugins], + [RunPlugin(P, pre_compile) || P <- Plugins], + [RunPlugin(P, compile) || P <- Plugins] + end + end(), + halt() +endef + +define dep_autopatch_app.erl + UpdateModules = fun(App) -> + case filelib:is_regular(App) of + false -> ok; + true -> + {ok, [{application, '$(1)', L0}]} = file:consult(App), + Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true, + fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []), + L = lists:keystore(modules, 1, L0, {modules, Mods}), + ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}])) + end + end, + UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"), + halt() +endef + +define dep_autopatch_appsrc.erl + AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", + AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end, + case filelib:is_regular(AppSrcIn) of + false -> ok; + true -> + {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), + L1 = lists:keystore(modules, 1, L0, {modules, []}), + L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end, + L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end, + ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])), + case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end + end, + halt() +endef + +define dep_fetch_git + git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1)); +endef + +define dep_fetch_git-submodule + git submodule update --init -- $(DEPS_DIR)/$1; +endef + +define dep_fetch_hg + hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1)); +endef + +define dep_fetch_svn + svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_cp + cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_hex.erl + ssl:start(), + inets:start(), + {ok, {{_, 200, _}, _, Body}} = httpc:request(get, + {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []}, + [], [{body_format, binary}]), + {ok, Files} = erl_tar:extract({binary, Body}, [memory]), + {_, Source} = lists:keyfind("contents.tar.gz", 1, Files), + ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]), + halt() +endef + +# Hex only has a package version. No need to look in the Erlang.mk packages. +define dep_fetch_hex + $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1)))))); +endef + +define dep_fetch_fail + echo "Error: Unknown or invalid dependency: $(1)." >&2; \ + exit 78; +endef + +# Kept for compatibility purposes with older Erlang.mk configuration. +define dep_fetch_legacy + $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \ + git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \ + cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master); +endef + +define dep_fetch + $(if $(dep_$(1)), \ + $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \ + $(word 1,$(dep_$(1))), \ + $(if $(IS_DEP),legacy,fail)), \ + $(if $(filter $(1),$(PACKAGES)), \ + $(pkg_$(1)_fetch), \ + fail)) +endef + +define dep_target +$(DEPS_DIR)/$(call dep_name,$1): + $(eval DEP_NAME := $(call dep_name,$1)) + $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) + $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ + echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \ + exit 17; \ + fi + $(verbose) mkdir -p $(DEPS_DIR) + $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$1)),$1) + $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure.ac -o -f $(DEPS_DIR)/$(DEP_NAME)/configure.in ]; then \ + echo " AUTO " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && autoreconf -Wall -vif -I m4; \ + fi + - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \ + echo " CONF " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \ + fi +ifeq ($(filter $(1),$(NO_AUTOPATCH)),) + $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi; \ + if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \ + echo " PATCH Downloading rabbitmq-server"; \ + git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \ + fi; \ + ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \ + elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi \ + else \ + $$(call dep_autopatch,$(DEP_NAME)) \ + fi +endif +endef + +$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep)))) + +ifndef IS_APP +clean:: clean-apps + +clean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \ + done + +distclean:: distclean-apps + +distclean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \ + done +endif + +ifndef SKIP_DEPS +distclean:: distclean-deps + +distclean-deps: + $(gen_verbose) rm -rf $(DEPS_DIR) +endif + +# Forward-declare variables used in core/deps-tools.mk. This is required +# in case plugins use them. + +ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/list-deps.log +ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/list-doc-deps.log +ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/list-rel-deps.log +ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/list-test-deps.log +ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/list-shell-deps.log + +# External plugins. + +DEP_PLUGINS ?= + +define core_dep_plugin +-include $(DEPS_DIR)/$(1) + +$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; +endef + +$(foreach p,$(DEP_PLUGINS),\ + $(eval $(if $(findstring /,$p),\ + $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ + $(call core_dep_plugin,$p/plugins.mk,$p)))) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Configuration. + +DTL_FULL_PATH ?= +DTL_PATH ?= templates/ +DTL_SUFFIX ?= _dtl + +# Verbosity. + +dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); +dtl_verbose = $(dtl_verbose_$(V)) + +# Core targets. + +define erlydtl_compile.erl + [begin + Module0 = case "$(strip $(DTL_FULL_PATH))" of + "" -> + filename:basename(F, ".dtl"); + _ -> + "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"), + re:replace(F2, "/", "_", [{return, list}, global]) + end, + Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), + case erlydtl:compile(F, Module, [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of + ok -> ok; + {ok, _} -> ok + end + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) + +DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl)) + +ifdef DTL_FULL_PATH +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%)))) +else +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES)))) +endif + +ifneq ($(words $(DTL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(DTL_FILES); \ + fi + @touch $@ + +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl +endif + +ebin/$(PROJECT).app:: $(DTL_FILES) + $(if $(strip $?),\ + $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?,-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Verbosity. + +proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); +proto_verbose = $(proto_verbose_$(V)) + +# Core targets. + +define compile_proto + $(verbose) mkdir -p ebin/ include/ + $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1))) + $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl + $(verbose) rm ebin/*.erl +endef + +define compile_proto.erl + [begin + Dir = filename:dirname(filename:dirname(F)), + protobuffs_compile:generate_source(F, + [{output_include_dir, Dir ++ "/include"}, + {output_src_dir, Dir ++ "/ebin"}]) + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) +ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto)) + $(if $(strip $?),$(call compile_proto,$?)) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-app + +# Configuration. + +ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \ + +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec +COMPILE_FIRST ?= +COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST))) +ERLC_EXCLUDE ?= +ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE))) + +ERLC_MIB_OPTS ?= +COMPILE_MIB_FIRST ?= +COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST))) + +# Verbosity. + +app_verbose_0 = @echo " APP " $(PROJECT); +app_verbose_2 = set -x; +app_verbose = $(app_verbose_$(V)) + +appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src; +appsrc_verbose_2 = set -x; +appsrc_verbose = $(appsrc_verbose_$(V)) + +makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d; +makedep_verbose_2 = set -x; +makedep_verbose = $(makedep_verbose_$(V)) + +erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\ + $(filter %.erl %.core,$(?F))); +erlc_verbose_2 = set -x; +erlc_verbose = $(erlc_verbose_$(V)) + +xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F)); +xyrl_verbose_2 = set -x; +xyrl_verbose = $(xyrl_verbose_$(V)) + +asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F)); +asn1_verbose_2 = set -x; +asn1_verbose = $(asn1_verbose_$(V)) + +mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F)); +mib_verbose_2 = set -x; +mib_verbose = $(mib_verbose_$(V)) + +ifneq ($(wildcard src/),) + +# Targets. + +ifeq ($(wildcard ebin/test),) +app:: deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +else +app:: clean deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +endif + +ifeq ($(wildcard src/$(PROJECT)_app.erl),) +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, []}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]} +]}. +endef +else +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}, + {mod, {$(PROJECT)_app, []}} +]}. +endef +endif + +app-build: ebin/$(PROJECT).app + $(verbose) : + +# Source files. + +ERL_FILES = $(sort $(call core_find,src/,*.erl)) +CORE_FILES = $(sort $(call core_find,src/,*.core)) + +# ASN.1 files. + +ifneq ($(wildcard asn1/),) +ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1)) +ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +define compile_asn1 + $(verbose) mkdir -p include/ + $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1) + $(verbose) mv asn1/*.erl src/ + $(verbose) mv asn1/*.hrl include/ + $(verbose) mv asn1/*.asn1db include/ +endef + +$(PROJECT).d:: $(ASN1_FILES) + $(if $(strip $?),$(call compile_asn1,$?)) +endif + +# SNMP MIB files. + +ifneq ($(wildcard mibs/),) +MIB_FILES = $(sort $(call core_find,mibs/,*.mib)) + +$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES) + $(verbose) mkdir -p include/ priv/mibs/ + $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $? + $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?))) +endif + +# Leex and Yecc files. + +XRL_FILES = $(sort $(call core_find,src/,*.xrl)) +XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES)))) +ERL_FILES += $(XRL_ERL_FILES) + +YRL_FILES = $(sort $(call core_find,src/,*.yrl)) +YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES)))) +ERL_FILES += $(YRL_ERL_FILES) + +$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES) + $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?) + +# Erlang and Core Erlang files. + +define makedep.erl + ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")), + Modules = [{filename:basename(F, ".erl"), F} || F <- ErlFiles], + Add = fun (Dep, Acc) -> + case lists:keyfind(atom_to_list(Dep), 1, Modules) of + {_, DepFile} -> [DepFile|Acc]; + false -> Acc + end + end, + AddHd = fun (Dep, Acc) -> + case {Dep, lists:keymember(Dep, 2, Modules)} of + {"src/" ++ _, false} -> [Dep|Acc]; + {"include/" ++ _, false} -> [Dep|Acc]; + _ -> Acc + end + end, + CompileFirst = fun (Deps) -> + First0 = [case filename:extension(D) of + ".erl" -> filename:basename(D, ".erl"); + _ -> [] + end || D <- Deps], + case lists:usort(First0) of + [] -> []; + [[]] -> []; + First -> ["COMPILE_FIRST +=", [[" ", F] || F <- First], "\n"] + end + end, + Depend = [begin + case epp:parse_file(F, ["include/"], []) of + {ok, Forms} -> + Deps = lists:usort(lists:foldl(fun + ({attribute, _, behavior, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, behaviour, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, compile, {parse_transform, Dep}}, Acc) -> Add(Dep, Acc); + ({attribute, _, file, {Dep, _}}, Acc) -> AddHd(Dep, Acc); + (_, Acc) -> Acc + end, [], Forms)), + case Deps of + [] -> ""; + _ -> [F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n", CompileFirst(Deps)] + end; + {error, enoent} -> + [] + end + end || F <- ErlFiles], + ok = file:write_file("$(1)", Depend), + halt() +endef + +ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),) +$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST) + $(makedep_verbose) $(call erlang,$(call makedep.erl,$@)) +endif + +ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \ + touch -c $(PROJECT).d; \ + fi + @touch $@ + +$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change +endif + +-include $(PROJECT).d + +ebin/$(PROJECT).app:: ebin/ + +ebin/: + $(verbose) mkdir -p ebin/ + +define compile_erl + $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \ + -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1)) +endef + +ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src) + $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?)) + $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE))) + $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true)) + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES))))))) +ifeq ($(wildcard src/$(PROJECT).app.src),) + $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \ + > ebin/$(PROJECT).app +else + $(verbose) if [ -z "$$(grep -E '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \ + echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \ + exit 1; \ + fi + $(appsrc_verbose) cat src/$(PROJECT).app.src \ + | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \ + | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(GITDESCRIBE)\"}/" \ + > ebin/$(PROJECT).app +endif + +clean:: clean-app + +clean-app: + $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \ + $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \ + $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +endif + +# Copyright (c) 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: docs-deps + +# Configuration. + +ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS)) + +# Targets. + +$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +doc-deps: +else +doc-deps: $(ALL_DOC_DEPS_DIRS) + $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rel-deps + +# Configuration. + +ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS)) + +# Targets. + +$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +rel-deps: +else +rel-deps: $(ALL_REL_DEPS_DIRS) + $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: test-deps test-dir test-build clean-test-dir + +# Configuration. + +TEST_DIR ?= $(CURDIR)/test + +ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS)) + +TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard +TEST_ERLC_OPTS += -DTEST=1 + +# Targets. + +$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +test-deps: +else +test-deps: $(ALL_TEST_DEPS_DIRS) + $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done +endif + +ifneq ($(wildcard $(TEST_DIR)),) +test-dir: + $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \ + $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/ +endif + +ifeq ($(wildcard ebin/test),) +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: clean deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" + $(gen_verbose) touch ebin/test +else +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" +endif + +clean:: clean-test-dir + +clean-test-dir: +ifneq ($(wildcard $(TEST_DIR)/*.beam),) + $(gen_verbose) rm -f $(TEST_DIR)/*.beam +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rebar.config + +# We strip out -Werror because we don't want to fail due to +# warnings when used as a dependency. + +compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/') + +define compat_convert_erlc_opts +$(if $(filter-out -Werror,$1),\ + $(if $(findstring +,$1),\ + $(shell echo $1 | cut -b 2-))) +endef + +define compat_rebar_config +{deps, [$(call comma_list,$(foreach d,$(DEPS),\ + {$(call dep_name,$d),".*",{git,"$(call dep_repo,$d)","$(call dep_commit,$d)"}}))]}. +{erl_opts, [$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$(ERLC_OPTS)),\ + $(call compat_convert_erlc_opts,$o)))]}. +endef + +$(eval _compat_rebar_config = $$(compat_rebar_config)) +$(eval export _compat_rebar_config) + +rebar.config: + $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc + +MAN_INSTALL_PATH ?= /usr/local/share/man +MAN_SECTIONS ?= 3 7 + +docs:: asciidoc + +asciidoc: distclean-asciidoc doc-deps asciidoc-guide asciidoc-manual + +ifeq ($(wildcard doc/src/guide/book.asciidoc),) +asciidoc-guide: +else +asciidoc-guide: + a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf + a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ +endif + +ifeq ($(wildcard doc/src/manual/*.asciidoc),) +asciidoc-manual: +else +asciidoc-manual: + for f in doc/src/manual/*.asciidoc ; do \ + a2x -v -f manpage $$f ; \ + done + for s in $(MAN_SECTIONS); do \ + mkdir -p doc/man$$s/ ; \ + mv doc/src/manual/*.$$s doc/man$$s/ ; \ + gzip doc/man$$s/*.$$s ; \ + done + +install-docs:: install-asciidoc + +install-asciidoc: asciidoc-manual + for s in $(MAN_SECTIONS); do \ + mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \ + install -g 0 -o 0 -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \ + done +endif + +distclean:: distclean-asciidoc + +distclean-asciidoc: + $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/ + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Bootstrap targets:" \ + " bootstrap Generate a skeleton of an OTP application" \ + " bootstrap-lib Generate a skeleton of an OTP library" \ + " bootstrap-rel Generate the files needed to build a release" \ + " new-app n=NAME Create a new local OTP application NAME" \ + " new-lib n=NAME Create a new local OTP library NAME" \ + " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ + " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \ + " list-templates List available templates" + +# Bootstrap templates. + +define bs_appsrc +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]}, + {mod, {$p_app, []}}, + {env, []} +]}. +endef + +define bs_appsrc_lib +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. +endef + +ifdef SP +define bs_Makefile +PROJECT = $p +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.0.1 + +# Whitespace to be used when creating files from templates. +SP = $(SP) + +include erlang.mk +endef +else +define bs_Makefile +PROJECT = $p +include erlang.mk +endef +endif + +define bs_apps_Makefile +PROJECT = $p +include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk +endef + +define bs_app +-module($p_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). + +start(_Type, _Args) -> + $p_sup:start_link(). + +stop(_State) -> + ok. +endef + +define bs_relx_config +{release, {$p_release, "1"}, [$p]}. +{extended_start_script, true}. +{sys_config, "rel/sys.config"}. +{vm_args, "rel/vm.args"}. +endef + +define bs_sys_config +[ +]. +endef + +define bs_vm_args +-name $p@127.0.0.1 +-setcookie $p +-heart +endef + +# Normal templates. + +define tpl_supervisor +-module($(n)). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +init([]) -> + Procs = [], + {ok, {{one_for_one, 1, 5}, Procs}}. +endef + +define tpl_gen_server +-module($(n)). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +%% gen_server. + +init([]) -> + {ok, #state{}}. + +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +endef + +define tpl_cowboy_http +-module($(n)). +-behaviour(cowboy_http_handler). + +-export([init/3]). +-export([handle/2]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {ok, Req, #state{}}. + +handle(Req, State=#state{}) -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_gen_fsm +-module($(n)). +-behaviour(gen_fsm). + +%% API. +-export([start_link/0]). + +%% gen_fsm. +-export([init/1]). +-export([state_name/2]). +-export([handle_event/3]). +-export([state_name/3]). +-export([handle_sync_event/4]). +-export([handle_info/3]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_fsm:start_link(?MODULE, [], []). + +%% gen_fsm. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_Event, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_Event, StateName, StateData) -> + {next_state, StateName, StateData}. + +state_name(_Event, _From, StateData) -> + {reply, ignored, state_name, StateData}. + +handle_sync_event(_Event, _From, StateName, StateData) -> + {reply, ignored, StateName, StateData}. + +handle_info(_Info, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + +define tpl_cowboy_loop +-module($(n)). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {loop, Req, #state{}, 5000, hibernate}. + +info(_Info, Req, State) -> + {loop, Req, State, hibernate}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_cowboy_rest +-module($(n)). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_html/2]). + +init(_, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. + +get_html(Req, State) -> + {<<"This is REST!">>, Req, State}. +endef + +define tpl_cowboy_ws +-module($(n)). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +-record(state, { +}). + +init(_, _, _) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, #state{}}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_ranch_protocol +-module($(n)). +-behaviour(ranch_protocol). + +-export([start_link/4]). +-export([init/4]). + +-type opts() :: []. +-export_type([opts/0]). + +-record(state, { + socket :: inet:socket(), + transport :: module() +}). + +start_link(Ref, Socket, Transport, Opts) -> + Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]), + {ok, Pid}. + +-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Ref, Socket, Transport, _Opts) -> + ok = ranch:accept_ack(Ref), + loop(#state{socket=Socket, transport=Transport}). + +loop(State) -> + loop(State). +endef + +# Plugin-specific targets. + +define render_template + $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) +endef + +ifndef WS +ifdef SP +WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a)) +else +WS = $(tab) +endif +endif + +bootstrap: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(eval n := $(PROJECT)_sup) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc,src/$(PROJECT).app.src) +endif + $(call render_template,bs_app,src/$(PROJECT)_app.erl) + $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl) + +bootstrap-lib: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src) +endif + +bootstrap-rel: +ifneq ($(wildcard relx.config),) + $(error Error: relx.config already exists) +endif +ifneq ($(wildcard rel/),) + $(error Error: rel/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_relx_config,relx.config) + $(verbose) mkdir rel/ + $(call render_template,bs_sys_config,rel/sys.config) + $(call render_template,bs_vm_args,rel/vm.args) + +new-app: +ifndef in + $(error Usage: $(MAKE) new-app in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(eval n := $(in)_sup) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) +endif + $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) + $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) + +new-lib: +ifndef in + $(error Usage: $(MAKE) new-lib in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) +endif + +new: +ifeq ($(wildcard src/)$(in),) + $(error Error: src/ directory does not exist) +endif +ifndef t + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifndef tpl_$(t) + $(error Unknown template) +endif +ifndef n + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in= +else + $(call render_template,tpl_$(t),src/$(n).erl) +endif + +list-templates: + $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-c_src distclean-c_src-env + +# Configuration. + +C_SRC_DIR ?= $(CURDIR)/c_src +C_SRC_ENV ?= $(C_SRC_DIR)/env.mk +C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT).so +C_SRC_TYPE ?= shared + +# System type and C compiler/flags. + +ifeq ($(PLATFORM),darwin) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall + LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress +else ifeq ($(PLATFORM),freebsd) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +else ifeq ($(PLATFORM),linux) + CC ?= gcc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +endif + +CFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) +CXXFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) + +LDLIBS += -L $(ERL_INTERFACE_LIB_DIR) -lerl_interface -lei + +# Verbosity. + +c_verbose_0 = @echo " C " $(?F); +c_verbose = $(c_verbose_$(V)) + +cpp_verbose_0 = @echo " CPP " $(?F); +cpp_verbose = $(cpp_verbose_$(V)) + +link_verbose_0 = @echo " LD " $(@F); +link_verbose = $(link_verbose_$(V)) + +# Targets. + +ifeq ($(wildcard $(C_SRC_DIR)),) +else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),) +app:: app-c_src + +test-build:: app-c_src + +app-c_src: + $(MAKE) -C $(C_SRC_DIR) + +clean:: + $(MAKE) -C $(C_SRC_DIR) clean + +else + +ifeq ($(SOURCES),) +SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat)))) +endif +OBJECTS = $(addsuffix .o, $(basename $(SOURCES))) + +COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c +COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c + +app:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +$(C_SRC_OUTPUT): $(OBJECTS) + $(verbose) mkdir -p priv/ + $(link_verbose) $(CC) $(OBJECTS) \ + $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \ + -o $(C_SRC_OUTPUT) + +%.o: %.c + $(COMPILE_C) $(OUTPUT_OPTION) $< + +%.o: %.cc + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.C + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.cpp + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +clean:: clean-c_src + +clean-c_src: + $(gen_verbose) rm -f $(C_SRC_OUTPUT) $(OBJECTS) + +endif + +ifneq ($(wildcard $(C_SRC_DIR)),) +$(C_SRC_ENV): + $(verbose) $(ERL) -eval "file:write_file(\"$(C_SRC_ENV)\", \ + io_lib:format( \ + \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \ + \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \ + \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \ + [code:root_dir(), erlang:system_info(version), \ + code:lib_dir(erl_interface, include), \ + code:lib_dir(erl_interface, lib)])), \ + halt()." + +distclean:: distclean-c_src-env + +distclean-c_src-env: + $(gen_verbose) rm -f $(C_SRC_ENV) + +-include $(C_SRC_ENV) +endif + +# Templates. + +define bs_c_nif +#include "erl_nif.h" + +static int loads = 0; + +static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info) +{ + /* Initialize private data. */ + *priv_data = NULL; + + loads++; + + return 0; +} + +static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info) +{ + /* Convert the private data to the new version. */ + *priv_data = *old_priv_data; + + loads++; + + return 0; +} + +static void unload(ErlNifEnv* env, void* priv_data) +{ + if (loads == 1) { + /* Destroy the private data. */ + } + + loads--; +} + +static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{ + if (enif_is_atom(env, argv[0])) { + return enif_make_tuple2(env, + enif_make_atom(env, "hello"), + argv[0]); + } + + return enif_make_tuple2(env, + enif_make_atom(env, "error"), + enif_make_atom(env, "badarg")); +} + +static ErlNifFunc nif_funcs[] = { + {"hello", 1, hello} +}; + +ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload) +endef + +define bs_erl_nif +-module($n). + +-export([hello/1]). + +-on_load(on_load/0). +on_load() -> + PrivDir = case code:priv_dir(?MODULE) of + {error, _} -> + AppPath = filename:dirname(filename:dirname(code:which(?MODULE))), + filename:join(AppPath, "priv"); + Path -> + Path + end, + erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0). + +hello(_) -> + erlang:nif_error({not_loaded, ?MODULE}). +endef + +new-nif: +ifneq ($(wildcard $(C_SRC_DIR)/$n.c),) + $(error Error: $(C_SRC_DIR)/$n.c already exists) +endif +ifneq ($(wildcard src/$n.erl),) + $(error Error: src/$n.erl already exists) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in= +else + $(verbose) mkdir -p $(C_SRC_DIR) src/ + $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c) + $(call render_template,bs_erl_nif,src/$n.erl) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ci ci-setup distclean-kerl + +KERL ?= $(CURDIR)/kerl +export KERL + +KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl + +OTP_GIT ?= https://github.com/erlang/otp + +CI_INSTALL_DIR ?= $(HOME)/erlang +CI_OTP ?= + +ifeq ($(strip $(CI_OTP)),) +ci:: +else +ci:: $(addprefix ci-,$(CI_OTP)) + +ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP)) + +ci-setup:: + +ci_verbose_0 = @echo " CI " $(1); +ci_verbose = $(ci_verbose_$(V)) + +define ci_target +ci-$(1): $(CI_INSTALL_DIR)/$(1) + $(ci_verbose) \ + PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \ + CI_OTP_RELEASE="$(1)" \ + CT_OPTS="-label $(1)" \ + $(MAKE) clean ci-setup tests +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp)))) + +define ci_otp_target +ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),) +$(CI_INSTALL_DIR)/$(1): $(KERL) + $(KERL) build git $(OTP_GIT) $(1) $(1) + $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1) +endif +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp)))) + +$(KERL): + $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL)) + $(verbose) chmod +x $(KERL) + +help:: + $(verbose) printf "%s\n" "" \ + "Continuous Integration targets:" \ + " ci Run '$(MAKE) tests' on all configured Erlang versions." \ + "" \ + "The CI_OTP variable must be defined with the Erlang versions" \ + "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3" + +distclean:: distclean-kerl + +distclean-kerl: + $(gen_verbose) rm -rf $(KERL) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: plt distclean-plt dialyze + +# Configuration. + +DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt +export DIALYZER_PLT + +PLT_APPS ?= +DIALYZER_DIRS ?= --src -r src +DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions \ + -Wunmatched_returns # -Wunderspecs + +# Core targets. + +check:: dialyze + +distclean:: distclean-plt + +help:: + $(verbose) printf "%s\n" "" \ + "Dialyzer targets:" \ + " plt Build a PLT file for this project" \ + " dialyze Analyze the project using Dialyzer" + +# Plugin-specific targets. + +$(DIALYZER_PLT): deps app + $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS) + +plt: $(DIALYZER_PLT) + +distclean-plt: + $(gen_verbose) rm -f $(DIALYZER_PLT) + +ifneq ($(wildcard $(DIALYZER_PLT)),) +dialyze: +else +dialyze: $(DIALYZER_PLT) +endif + $(verbose) dialyzer --no_native $(DIALYZER_DIRS) $(DIALYZER_OPTS) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-edoc edoc + +# Configuration. + +EDOC_OPTS ?= + +# Core targets. + +docs:: distclean-edoc edoc + +distclean:: distclean-edoc + +# Plugin-specific targets. + +edoc: doc-deps + $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().' + +distclean-edoc: + $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: elvis distclean-elvis + +# Configuration. + +ELVIS_CONFIG ?= $(CURDIR)/elvis.config + +ELVIS ?= $(CURDIR)/elvis +export ELVIS + +ELVIS_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis +ELVIS_CONFIG_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis.config +ELVIS_OPTS ?= + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Elvis targets:" \ + " elvis Run Elvis using the local elvis.config or download the default otherwise" + +distclean:: distclean-elvis + +# Plugin-specific targets. + +$(ELVIS): + $(gen_verbose) $(call core_http_get,$(ELVIS),$(ELVIS_URL)) + $(verbose) chmod +x $(ELVIS) + +$(ELVIS_CONFIG): + $(verbose) $(call core_http_get,$(ELVIS_CONFIG),$(ELVIS_CONFIG_URL)) + +elvis: $(ELVIS) $(ELVIS_CONFIG) + $(verbose) $(ELVIS) rock -c $(ELVIS_CONFIG) $(ELVIS_OPTS) + +distclean-elvis: + $(gen_verbose) rm -rf $(ELVIS) + +# Copyright (c) 2014 Dave Cottlehuber +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-escript escript + +# Configuration. + +ESCRIPT_NAME ?= $(PROJECT) +ESCRIPT_COMMENT ?= This is an -*- erlang -*- file + +ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*" +ESCRIPT_SYS_CONFIG ?= "rel/sys.config" +ESCRIPT_EMU_ARGS ?= -pa . \ + -sasl errlog_type error \ + -escript main $(ESCRIPT_NAME) +ESCRIPT_SHEBANG ?= /usr/bin/env escript +ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**" + +# Core targets. + +distclean:: distclean-escript + +help:: + $(verbose) printf "%s\n" "" \ + "Escript targets:" \ + " escript Build an executable escript archive" \ + +# Plugin-specific targets. + +# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl +# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center +# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE : +# Software may only be used for the great good and the true happiness of all +# sentient beings. + +define ESCRIPT_RAW +'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\ +'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\ +' [F || F <- A, not filelib:is_dir(F) ] end,'\ +'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\ +'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\ +'Ez = fun(Escript) ->'\ +' Static = Files([$(ESCRIPT_STATIC)]),'\ +' Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\ +' Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\ +' escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\ +' {archive, Archive, [memory]},'\ +' {shebang, "$(ESCRIPT_SHEBANG)"},'\ +' {comment, "$(ESCRIPT_COMMENT)"},'\ +' {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\ +' ]),'\ +' file:change_mode(Escript, 8#755)'\ +'end,'\ +'Ez("$(ESCRIPT_NAME)"),'\ +'halt().' +endef + +ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW)) + +escript:: distclean-escript deps app + $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND) + +distclean-escript: + $(gen_verbose) rm -f $(ESCRIPT_NAME) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: relx-rel distclean-relx-rel distclean-relx run + +# Configuration. + +RELX ?= $(CURDIR)/relx +RELX_CONFIG ?= $(CURDIR)/relx.config + +RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.5.0/relx +RELX_OPTS ?= +RELX_OUTPUT_DIR ?= _rel + +ifeq ($(firstword $(RELX_OPTS)),-o) + RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) +else + RELX_OPTS += -o $(RELX_OUTPUT_DIR) +endif + +# Core targets. + +ifeq ($(IS_DEP),) +ifneq ($(wildcard $(RELX_CONFIG)),) +rel:: relx-rel +endif +endif + +distclean:: distclean-relx-rel distclean-relx + +# Plugin-specific targets. + +$(RELX): + $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) + $(verbose) chmod +x $(RELX) + +relx-rel: $(RELX) rel-deps app + $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) + +distclean-relx-rel: + $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) + +distclean-relx: + $(gen_verbose) rm -rf $(RELX) + +# Run target. + +ifeq ($(wildcard $(RELX_CONFIG)),) +run: +else + +define get_relx_release.erl + {ok, Config} = file:consult("$(RELX_CONFIG)"), + {release, {Name, _}, _} = lists:keyfind(release, 1, Config), + io:format("~s", [Name]), + halt(0). +endef + +RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))` + +run: all + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console + +help:: + $(verbose) printf "%s\n" "" \ + "Relx targets:" \ + " run Compile the project, build the release and run it" + +endif + +# Copyright (c) 2014, M Robert Martin +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: shell + +# Configuration. + +SHELL_ERL ?= erl +SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin +SHELL_OPTS ?= + +ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) + +# Core targets + +help:: + $(verbose) printf "%s\n" "" \ + "Shell targets:" \ + " shell Run an erlang shell with SHELL_OPTS or reasonable default" + +# Plugin-specific targets. + +$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) + +build-shell-deps: $(ALL_SHELL_DEPS_DIRS) + $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done + +shell: build-shell-deps + $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS) + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq) +.PHONY: triq + +# Targets. + +tests:: triq + +define triq_check.erl + code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]), + try + case $(1) of + all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]); + module -> triq:check($(2)); + function -> triq:check($(2)) + end + of + true -> halt(0); + _ -> halt(1) + catch error:undef -> + io:format("Undefined property or module~n"), + halt(0) + end. +endef + +ifdef t +ifeq (,$(findstring :,$(t))) +triq: test-build + $(verbose) $(call erlang,$(call triq_check.erl,module,$(t))) +else +triq: test-build + $(verbose) echo Testing $(t)/0 + $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)())) +endif +else +triq: test-build + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam)))))) + $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES))) +endif +endif + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: xref distclean-xref + +# Configuration. + +ifeq ($(XREF_CONFIG),) + XREF_ARGS := +else + XREF_ARGS := -c $(XREF_CONFIG) +endif + +XREFR ?= $(CURDIR)/xrefr +export XREFR + +XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Xref targets:" \ + " xref Run Xrefr using $XREF_CONFIG as config file if defined" + +distclean:: distclean-xref + +# Plugin-specific targets. + +$(XREFR): + $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL)) + $(verbose) chmod +x $(XREFR) + +xref: deps app $(XREFR) + $(gen_verbose) $(XREFR) $(XREFR_ARGS) + +distclean-xref: + $(gen_verbose) rm -rf $(XREFR) + +# Copyright 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +COVER_REPORT_DIR = cover + +# Hook in coverage to ct + +ifdef COVER +ifdef CT_RUN +# All modules in 'ebin' +COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam))) + +test-build:: $(TEST_DIR)/ct.cover.spec + +$(TEST_DIR)/ct.cover.spec: + $(verbose) echo Cover mods: $(COVER_MODS) + $(gen_verbose) printf "%s\n" \ + '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \ + '{export,"$(CURDIR)/ct.coverdata"}.' > $@ + +CT_RUN += -cover $(TEST_DIR)/ct.cover.spec +endif +endif + +# Core targets + +ifdef COVER +ifneq ($(COVER_REPORT_DIR),) +tests:: + $(verbose) $(MAKE) --no-print-directory cover-report +endif +endif + +clean:: coverdata-clean + +ifneq ($(COVER_REPORT_DIR),) +distclean:: cover-report-clean +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Cover targets:" \ + " cover-report Generate a HTML coverage report from previously collected" \ + " cover data." \ + " all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \ + "" \ + "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \ + "target tests additionally generates a HTML coverage report from the combined" \ + "coverdata files from each of these testing tools. HTML reports can be disabled" \ + "by setting COVER_REPORT_DIR to empty." + +# Plugin specific targets + +COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata)) + +.PHONY: coverdata-clean +coverdata-clean: + $(gen_verbose) rm -f *.coverdata ct.cover.spec + +# Merge all coverdata files into one. +all.coverdata: $(COVERDATA) + $(gen_verbose) $(ERL) -eval ' \ + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \ + cover:export("$@"), halt(0).' + +# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to +# empty if you want the coverdata files but not the HTML report. +ifneq ($(COVER_REPORT_DIR),) + +.PHONY: cover-report-clean cover-report + +cover-report-clean: + $(gen_verbose) rm -rf $(COVER_REPORT_DIR) + +ifeq ($(COVERDATA),) +cover-report: +else + +# Modules which include eunit.hrl always contain one line without coverage +# because eunit defines test/0 which is never called. We compensate for this. +EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \ + grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ + | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq)) + +define cover_report.erl + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) + Ms = cover:imported_modules(), + [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M) + ++ ".COVER.html", [html]) || M <- Ms], + Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms], + EunitHrlMods = [$(EUNIT_HRL_MODS)], + Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of + true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report], + TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]), + TotalN = lists:sum([N || {_, {_, N}} <- Report1]), + TotalPerc = round(100 * TotalY / (TotalY + TotalN)), + {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]), + io:format(F, "~n" + "~n" + "Coverage report~n" + "~n", []), + io:format(F, "

Coverage

~n

Total: ~p%

~n", [TotalPerc]), + io:format(F, "~n", []), + [io:format(F, "" + "~n", + [M, M, round(100 * Y / (Y + N))]) || {M, {Y, N}} <- Report1], + How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))", + Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")", + io:format(F, "
ModuleCoverage
~p~p%
~n" + "

Generated using ~s and erlang.mk on ~s.

~n" + "", [How, Date]), + halt(). +endef + +cover-report: + $(gen_verbose) mkdir -p $(COVER_REPORT_DIR) + $(gen_verbose) $(call erlang,$(cover_report.erl)) + +endif +endif # ifneq ($(COVER_REPORT_DIR),) + +# Copyright (c) 2013-2015, Loïc Hoguin +# Copyright (c) 2015, Jean-Sébastien Pédron +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Fetch dependencies (without building them). + +.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \ + fetch-shell-deps + +ifneq ($(SKIP_DEPS),) +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: + @: +else +# By default, we fetch "normal" dependencies. They are also included no +# matter the type of requested dependencies. +# +# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS). +fetch-deps: $(ALL_DEPS_DIRS) +fetch-doc-deps: $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS) +fetch-rel-deps: $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS) +fetch-test-deps: $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS) +fetch-shell-deps: $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(filter doc,$(DEP_TYPES)),) +fetch-deps: $(ALL_DOC_DEPS_DIRS) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +fetch-deps: $(ALL_REL_DEPS_DIRS) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +fetch-deps: $(ALL_TEST_DEPS_DIRS) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +fetch-deps: $(ALL_SHELL_DEPS_DIRS) +endif + +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep $@ IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/$@.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $^ ; do \ + if ! grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/$@.log; then \ + echo $$dep >> $(ERLANG_MK_TMP)/$@.log; \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C $$dep fetch-deps IS_DEP=1 || exit $$?; \ + fi \ + fi \ + done +endif # ifneq ($(SKIP_DEPS),) + +# List dependencies recursively. + +.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \ + list-shell-deps + +ifneq ($(SKIP_DEPS),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): + $(verbose) :> $@ +else +LIST_DIRS = $(ALL_DEPS_DIRS) +LIST_DEPS = $(BUILD_DEPS) $(DEPS) + +$(ERLANG_MK_RECURSIVE_DEPS_LIST): fetch-deps + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DIRS += $(ALL_DOC_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DEPS += $(DOC_DEPS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-doc-deps +else +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DIRS += $(ALL_REL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DEPS += $(REL_DEPS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-rel-deps +else +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DIRS += $(ALL_TEST_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DEPS += $(TEST_DEPS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-test-deps +else +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DIRS += $(ALL_SHELL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DEPS += $(SHELL_DEPS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-shell-deps +else +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-deps +endif + +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): +ifneq ($(IS_DEP),1) + $(verbose) rm -f $@.orig +endif +ifndef IS_APP + $(verbose) for app in $(filter-out $(CURDIR),$(ALL_APPS_DIRS)); do \ + $(MAKE) -C "$$app" --no-print-directory $@ IS_APP=1 || :; \ + done +endif + $(verbose) for dep in $(filter-out $(CURDIR),$(LIST_DIRS)); do \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C "$$dep" --no-print-directory $@ IS_DEP=1; \ + fi; \ + done + $(verbose) for dep in $(LIST_DEPS); do \ + echo $(DEPS_DIR)/$$dep; \ + done >> $@.orig +ifndef IS_APP +ifneq ($(IS_DEP),1) + $(verbose) sort < $@.orig | uniq > $@ + $(verbose) rm -f $@.orig +endif +endif +endif # ifneq ($(SKIP_DEPS),) + +ifneq ($(SKIP_DEPS),) +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + @: +else +list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST) +list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(IS_DEP),1) +ifneq ($(filter doc,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) +endif +endif + +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + $(verbose) cat $^ | sort | uniq +endif # ifneq ($(SKIP_DEPS),) diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/amqp_client.hrl b/rabbitmq-server/deps/amqp_client/include/amqp_client.hrl similarity index 97% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/amqp_client.hrl rename to rabbitmq-server/deps/amqp_client/include/amqp_client.hrl index bc74dd7..58ccc49 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/amqp_client.hrl +++ b/rabbitmq-server/deps/amqp_client/include/amqp_client.hrl @@ -29,7 +29,7 @@ port = undefined, channel_max = 0, frame_max = 0, - heartbeat = 0, + heartbeat = 10, connection_timeout = infinity, ssl_options = none, auth_mechanisms = diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/amqp_client_internal.hrl b/rabbitmq-server/deps/amqp_client/include/amqp_client_internal.hrl similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/amqp_client_internal.hrl rename to rabbitmq-server/deps/amqp_client/include/amqp_client_internal.hrl diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/amqp_gen_consumer_spec.hrl b/rabbitmq-server/deps/amqp_client/include/amqp_gen_consumer_spec.hrl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/amqp_gen_consumer_spec.hrl rename to rabbitmq-server/deps/amqp_client/include/amqp_gen_consumer_spec.hrl index fbaa28c..2ccf681 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/amqp_gen_consumer_spec.hrl +++ b/rabbitmq-server/deps/amqp_client/include/amqp_gen_consumer_spec.hrl @@ -16,7 +16,7 @@ -include("amqp_client.hrl"). --ifndef(edoc). +-ifdef(use_specs). -type(state() :: any()). -type(consume() :: #'basic.consume'{}). -type(consume_ok() :: #'basic.consume_ok'{}). diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/rabbit_routing_prefixes.hrl b/rabbitmq-server/deps/amqp_client/include/rabbit_routing_prefixes.hrl similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/include/rabbit_routing_prefixes.hrl rename to rabbitmq-server/deps/amqp_client/include/rabbit_routing_prefixes.hrl diff --git a/rabbitmq-server/deps/amqp_client/rabbitmq-components.mk b/rabbitmq-server/deps/amqp_client/rabbitmq-components.mk new file mode 100644 index 0000000..eed26fd --- /dev/null +++ b/rabbitmq-server/deps/amqp_client/rabbitmq-components.mk @@ -0,0 +1,331 @@ +ifeq ($(.DEFAULT_GOAL),) +# Define default goal to `all` because this file defines some targets +# before the inclusion of erlang.mk leading to the wrong target becoming +# the default. +.DEFAULT_GOAL = all +endif + +# Automatically add rabbitmq-common to the dependencies, at least for +# the Makefiles. +ifneq ($(PROJECT),rabbit_common) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +ifeq ($(filter rabbit_common,$(DEPS)),) +DEPS += rabbit_common +endif +endif +endif + +# -------------------------------------------------------------------- +# RabbitMQ components. +# -------------------------------------------------------------------- + +# For RabbitMQ repositories, we want to checkout branches which match +# the parent project. For instance, if the parent project is on a +# release tag, dependencies must be on the same release tag. If the +# parent project is on a topic branch, dependencies must be on the same +# topic branch or fallback to `stable` or `master` whichever was the +# base of the topic branch. + +dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_clusterer = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_lvc = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_visualiser = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master +dep_sockjs = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master +dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master + +dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master + +# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk +# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch +# needs to add "ranch" as a BUILD_DEPS. The list of projects needing +# this workaround are: +# o rabbitmq-web-stomp +dep_ranch = git https://github.com/ninenines/ranch 1.2.1 + +RABBITMQ_COMPONENTS = amqp_client \ + rabbit \ + rabbit_common \ + rabbitmq_amqp1_0 \ + rabbitmq_auth_backend_amqp \ + rabbitmq_auth_backend_http \ + rabbitmq_auth_backend_ldap \ + rabbitmq_auth_mechanism_ssl \ + rabbitmq_boot_steps_visualiser \ + rabbitmq_clusterer \ + rabbitmq_codegen \ + rabbitmq_consistent_hash_exchange \ + rabbitmq_delayed_message_exchange \ + rabbitmq_dotnet_client \ + rabbitmq_event_exchange \ + rabbitmq_federation \ + rabbitmq_federation_management \ + rabbitmq_java_client \ + rabbitmq_lvc \ + rabbitmq_management \ + rabbitmq_management_agent \ + rabbitmq_management_exchange \ + rabbitmq_management_themes \ + rabbitmq_management_visualiser \ + rabbitmq_message_timestamp \ + rabbitmq_metronome \ + rabbitmq_mqtt \ + rabbitmq_recent_history_exchange \ + rabbitmq_rtopic_exchange \ + rabbitmq_sharding \ + rabbitmq_shovel \ + rabbitmq_shovel_management \ + rabbitmq_stomp \ + rabbitmq_test \ + rabbitmq_toke \ + rabbitmq_top \ + rabbitmq_tracing \ + rabbitmq_web_dispatch \ + rabbitmq_web_stomp \ + rabbitmq_web_stomp_examples \ + rabbitmq_website + +# Several components have a custom erlang.mk/build.config, mainly +# to disable eunit. Therefore, we can't use the top-level project's +# erlang.mk copy. +NO_AUTOPATCH += $(RABBITMQ_COMPONENTS) + +ifeq ($(origin current_rmq_ref),undefined) +ifneq ($(wildcard .git),) +current_rmq_ref := $(shell (\ + ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\ + if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi)) +else +current_rmq_ref := master +endif +endif +export current_rmq_ref + +ifeq ($(origin base_rmq_ref),undefined) +ifneq ($(wildcard .git),) +base_rmq_ref := $(shell \ + (git rev-parse --verify -q stable >/dev/null && \ + git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \ + echo stable) || \ + echo master) +else +base_rmq_ref := master +endif +endif +export base_rmq_ref + +# Repository URL selection. +# +# First, we infer other components' location from the current project +# repository URL, if it's a Git repository: +# - We take the "origin" remote URL as the base +# - The current project name and repository name is replaced by the +# target's properties: +# eg. rabbitmq-common is replaced by rabbitmq-codegen +# eg. rabbit_common is replaced by rabbitmq_codegen +# +# If cloning from this computed location fails, we fallback to RabbitMQ +# upstream which is GitHub. + +# Maccro to transform eg. "rabbit_common" to "rabbitmq-common". +rmq_cmp_repo_name = $(word 2,$(dep_$(1))) + +# Upstream URL for the current project. +RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT)) +RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git +RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git + +# Current URL for the current project. If this is not a Git clone, +# default to the upstream Git repository. +ifneq ($(wildcard .git),) +git_origin_fetch_url := $(shell git config remote.origin.url) +git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url) +RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url) +RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url) +else +RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL) +RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL) +endif + +# Macro to replace the following pattern: +# 1. /foo.git -> /bar.git +# 2. /foo -> /bar +# 3. /foo/ -> /bar/ +subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3)))) + +# Macro to replace both the project's name (eg. "rabbit_common") and +# repository name (eg. "rabbitmq-common") by the target's equivalent. +# +# This macro is kept on one line because we don't want whitespaces in +# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell +# single-quoted string. +dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo)) + +dep_rmq_commits = $(if $(dep_$(1)), \ + $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \ + $(pkg_$(1)_commit)) + +define dep_fetch_git_rmq + fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \ + fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \ + if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \ + git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url1"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \ + elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url2"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \ + fi; \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \ + $(foreach ref,$(call dep_rmq_commits,$(1)), \ + git checkout -q $(ref) >/dev/null 2>&1 || \ + ) \ + (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \ + 1>&2 && false) ) && \ + (test "$$$$fetch_url" = "$$$$push_url" || \ + git remote set-url --push origin "$$$$push_url") +endef + +# -------------------------------------------------------------------- +# Component distribution. +# -------------------------------------------------------------------- + +list-dist-deps:: + @: + +prepare-dist:: + @: + +# -------------------------------------------------------------------- +# Run a RabbitMQ node (moved from rabbitmq-run.mk as a workaround). +# -------------------------------------------------------------------- + +# Add "rabbit" to the build dependencies when the user wants to start +# a broker or to the test dependencies when the user wants to test a +# project. +# +# NOTE: This should belong to rabbitmq-run.mk. Unfortunately, it is +# loaded *after* erlang.mk which is too late to add a dependency. That's +# why rabbitmq-components.mk knows the list of targets which start a +# broker and add "rabbit" to the dependencies in this case. + +ifneq ($(PROJECT),rabbit) +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS)),) +RUN_RMQ_TARGETS = run-broker \ + run-background-broker \ + run-node \ + run-background-node \ + start-background-node + +ifneq ($(filter $(RUN_RMQ_TARGETS),$(MAKECMDGOALS)),) +BUILD_DEPS += rabbit +endif +endif + +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +ifneq ($(filter check tests tests-with-broker test,$(MAKECMDGOALS)),) +TEST_DEPS += rabbit +endif +endif +endif + +ifeq ($(filter rabbit_public_umbrella amqp_client rabbit_common rabbitmq_test,$(PROJECT)),) +ifeq ($(filter rabbitmq_test,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +TEST_DEPS += rabbitmq_test +endif +endif + +# -------------------------------------------------------------------- +# rabbitmq-components.mk checks. +# -------------------------------------------------------------------- + +ifeq ($(PROJECT),rabbit_common) +else ifdef SKIP_RMQCOMP_CHECK +else ifeq ($(IS_DEP),1) +else ifneq ($(filter co up,$(MAKECMDGOALS)),) +else +# In all other cases, rabbitmq-components.mk must be in sync. +deps:: check-rabbitmq-components.mk +fetch-deps: check-rabbitmq-components.mk +endif + +# If this project is under the Umbrella project, we override $(DEPS_DIR) +# to point to the Umbrella's one. We also disable `make distclean` so +# $(DEPS_DIR) is not accidentally removed. + +ifneq ($(wildcard ../../UMBRELLA.md),) +UNDER_UMBRELLA = 1 +else ifneq ($(wildcard UMBRELLA.md),) +UNDER_UMBRELLA = 1 +endif + +ifeq ($(UNDER_UMBRELLA),1) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +DEPS_DIR ?= $(abspath ..) + +distclean:: distclean-components + @: + +distclean-components: +endif + +ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),) +SKIP_DEPS = 1 +endif +endif + +UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk + +check-rabbitmq-components.mk: + $(verbose) cmp -s rabbitmq-components.mk \ + $(UPSTREAM_RMQ_COMPONENTS_MK) || \ + (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \ + false) + +ifeq ($(PROJECT),rabbit_common) +rabbitmq-components-mk: + @: +else +rabbitmq-components-mk: + $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) . +ifeq ($(DO_COMMIT),yes) + $(verbose) git diff --quiet rabbitmq-components.mk \ + || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk +endif +endif diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_auth_mechanisms.erl b/rabbitmq-server/deps/amqp_client/src/amqp_auth_mechanisms.erl similarity index 96% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_auth_mechanisms.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_auth_mechanisms.erl index 9192cad..5517811 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_auth_mechanisms.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_auth_mechanisms.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @private diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel.erl b/rabbitmq-server/deps/amqp_client/src/amqp_channel.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_channel.erl index 1121795..fd1e631 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_channel.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @type close_reason(Type) = {shutdown, amqp_reason(Type)}. @@ -560,7 +560,9 @@ handle_method_to_server(Method, AmqpMsg, From, Sender, Flow, check_block(Method, AmqpMsg, State)} of {ok, _, ok} -> State1 = case {Method, State#state.next_pub_seqno} of - {#'confirm.select'{}, _} -> + {#'confirm.select'{}, 0} -> + %% The confirm seqno is set to 1 on the + %% first confirm.select only. State#state{next_pub_seqno = 1}; {#'basic.publish'{}, 0} -> State; diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel_sup.erl b/rabbitmq-server/deps/amqp_client/src/amqp_channel_sup.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel_sup.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_channel_sup.erl index 8fc4d8f..230d040 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel_sup.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_channel_sup.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @private diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel_sup_sup.erl b/rabbitmq-server/deps/amqp_client/src/amqp_channel_sup_sup.erl similarity index 96% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel_sup_sup.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_channel_sup_sup.erl index 6d461e1..a206c9f 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channel_sup_sup.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_channel_sup_sup.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @private diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channels_manager.erl b/rabbitmq-server/deps/amqp_client/src/amqp_channels_manager.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channels_manager.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_channels_manager.erl index b260cd2..ceb223a 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_channels_manager.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_channels_manager.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @private diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/ebin/amqp_client.app.in b/rabbitmq-server/deps/amqp_client/src/amqp_client.app.src similarity index 72% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/ebin/amqp_client.app.in rename to rabbitmq-server/deps/amqp_client/src/amqp_client.app.src index c9c96ad..56c1ee0 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/ebin/amqp_client.app.in +++ b/rabbitmq-server/deps/amqp_client/src/amqp_client.app.src @@ -1,9 +1,9 @@ {application, amqp_client, [{description, "RabbitMQ AMQP Client"}, - {vsn, "%%VSN%%"}, + {vsn, "3.6.1"}, {modules, []}, {registered, [amqp_sup]}, {env, [{prefer_ipv6, false}, {ssl_options, []}]}, {mod, {amqp_client, []}}, - {applications, [kernel, stdlib, xmerl]}]}. + {applications, [kernel, stdlib, xmerl, rabbit_common]}]}. diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_client.erl b/rabbitmq-server/deps/amqp_client/src/amqp_client.erl similarity index 92% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_client.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_client.erl index 83905d0..0e0d141 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_client.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_client.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @private @@ -27,6 +27,7 @@ %%--------------------------------------------------------------------------- start() -> + application:start(rabbit_common), application:start(amqp_client). %%--------------------------------------------------------------------------- diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection.erl b/rabbitmq-server/deps/amqp_client/src/amqp_connection.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_connection.erl index 371b225..148d302 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_connection.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @type close_reason(Type) = {shutdown, amqp_reason(Type)}. @@ -168,7 +168,7 @@ start(AmqpParams) -> %% application controller is in the process of shutting down the very %% application which is making this call. ensure_started() -> - [ensure_started(App) || App <- [xmerl, amqp_client]]. + [ensure_started(App) || App <- [xmerl, rabbit_common, amqp_client]]. ensure_started(App) -> case application_controller:get_master(App) of diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection_sup.erl b/rabbitmq-server/deps/amqp_client/src/amqp_connection_sup.erl similarity index 96% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection_sup.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_connection_sup.erl index 7bc8a2d..be9da63 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection_sup.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_connection_sup.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @private diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection_type_sup.erl b/rabbitmq-server/deps/amqp_client/src/amqp_connection_type_sup.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection_type_sup.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_connection_type_sup.erl index 5802375..9aeb311 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_connection_type_sup.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_connection_type_sup.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @private diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_direct_connection.erl b/rabbitmq-server/deps/amqp_client/src/amqp_direct_connection.erl similarity index 94% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_direct_connection.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_direct_connection.erl index 5cd7df7..52d5fa7 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_direct_connection.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_direct_connection.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @private @@ -130,7 +130,8 @@ connect(Params = #amqp_params_direct{username = Username, vhost = VHost, params = Params, adapter_info = ensure_adapter_info(Info), - connected_at = rabbit_misc:now_to_ms(os:timestamp())}, + connected_at = + time_compat:os_system_time(milli_seconds)}, case rpc:call(Node, rabbit_direct, connect, [{Username, Password}, VHost, ?PROTOCOL, self(), connection_info(State1)]) of @@ -192,9 +193,10 @@ maybe_ssl_info(Sock) -> ssl_info(Sock) -> {Protocol, KeyExchange, Cipher, Hash} = case rabbit_net:ssl_info(Sock) of - {ok, {P, {K, C, H}}} -> {P, K, C, H}; - {ok, {P, {K, C, H, _}}} -> {P, K, C, H}; - _ -> {unknown, unknown, unknown, unknown} + {ok, Infos} -> {_, P} = lists:keyfind(protocol, 1, Infos), + {_, {K, C, H}} = lists:keyfind(cipher_suite, 1, Infos), + {P, K, C, H}; + _ -> {unknown, unknown, unknown, unknown} end, [{ssl_protocol, Protocol}, {ssl_key_exchange, KeyExchange}, diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_direct_consumer.erl b/rabbitmq-server/deps/amqp_client/src/amqp_direct_consumer.erl similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_direct_consumer.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_direct_consumer.erl diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_gen_connection.erl b/rabbitmq-server/deps/amqp_client/src/amqp_gen_connection.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_gen_connection.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_gen_connection.erl index 55618ac..2829248 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_gen_connection.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_gen_connection.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @private diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_gen_consumer.erl b/rabbitmq-server/deps/amqp_client/src/amqp_gen_consumer.erl similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_gen_consumer.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_gen_consumer.erl diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_main_reader.erl b/rabbitmq-server/deps/amqp_client/src/amqp_main_reader.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_main_reader.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_main_reader.erl index b8e4ff9..33a6947 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_main_reader.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_main_reader.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @private diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_network_connection.erl b/rabbitmq-server/deps/amqp_client/src/amqp_network_connection.erl similarity index 97% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_network_connection.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_network_connection.erl index 5edb44b..91f2f89 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_network_connection.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_network_connection.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @private @@ -151,9 +151,8 @@ do_connect({Addr, Family}, orddict:from_list(SslOpts0)))), case ssl:connect(Sock, SslOpts) of {ok, SslSock} -> - RabbitSslSock = #ssl_socket{ssl = SslSock, tcp = Sock}, try_handshake(AmqpParams, SIF, - State#state{sock = RabbitSslSock}); + State#state{sock = SslSock}); {error, _} = E -> E end; @@ -304,7 +303,7 @@ client_properties(UserProperties) -> {<<"version">>, longstr, list_to_binary(Vsn)}, {<<"platform">>, longstr, <<"Erlang">>}, {<<"copyright">>, longstr, - <<"Copyright (c) 2007-2015 Pivotal Software, Inc.">>}, + <<"Copyright (c) 2007-2016 Pivotal Software, Inc.">>}, {<<"information">>, longstr, <<"Licensed under the MPL. " "See http://www.rabbitmq.com/">>}, @@ -323,6 +322,8 @@ handshake_recv(Expecting) -> Method; {'connection.tune', 'connection.close'} -> Method; + {'connection.open_ok', 'connection.close'} -> + exit(get_reason(Method)); {'connection.open_ok', _} -> {closing, #amqp_error{name = command_invalid, @@ -369,3 +370,6 @@ obtain() -> false -> ok; _ -> file_handle_cache:obtain() end. + +get_reason(#'connection.close'{reply_code = ErrCode}) -> + ?PROTOCOL:amqp_exception(ErrCode). diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_rpc_client.erl b/rabbitmq-server/deps/amqp_client/src/amqp_rpc_client.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_rpc_client.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_rpc_client.erl index c5bed0d..6fadba8 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_rpc_client.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_rpc_client.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @doc This module allows the simple execution of an asynchronous RPC over diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_rpc_server.erl b/rabbitmq-server/deps/amqp_client/src/amqp_rpc_server.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_rpc_server.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_rpc_server.erl index 1452536..25be89e 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_rpc_server.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_rpc_server.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @doc This is a utility module that is used to expose an arbitrary function diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_selective_consumer.erl b/rabbitmq-server/deps/amqp_client/src/amqp_selective_consumer.erl similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_selective_consumer.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_selective_consumer.erl diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_sup.erl b/rabbitmq-server/deps/amqp_client/src/amqp_sup.erl similarity index 95% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_sup.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_sup.erl index 9c928d5..c2fa89d 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_sup.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_sup.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% @private diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_uri.erl b/rabbitmq-server/deps/amqp_client/src/amqp_uri.erl similarity index 92% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_uri.erl rename to rabbitmq-server/deps/amqp_client/src/amqp_uri.erl index 04446b0..95447ab 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/amqp_uri.erl +++ b/rabbitmq-server/deps/amqp_client/src/amqp_uri.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(amqp_uri). @@ -60,8 +60,8 @@ remove_credentials(URI) -> %% The extra parameters that may be specified are channel_max, %% frame_max, heartbeat and auth_mechanism (the latter can appear more %% than once). The extra parameters that may be specified for an SSL -%% connection are cacertfile, certfile, keyfile, verify, and -%% fail_if_no_peer_cert. +%% connection are cacertfile, certfile, keyfile, verify, +%% fail_if_no_peer_cert, password, and depth. parse(Uri) -> parse(Uri, <<"/">>). parse(Uri, DefaultVHost) -> @@ -89,6 +89,8 @@ parse1(_, _DefaultVHost) -> unescape_string(Atom) when is_atom(Atom) -> Atom; +unescape_string(Integer) when is_integer(Integer) -> + Integer; unescape_string([]) -> []; unescape_string([$%, N1, N2 | Rest]) -> @@ -170,7 +172,9 @@ build_ssl_broker(ParsedUri, DefaultVHost) -> {fun find_path_parameter/1, certfile}, {fun find_path_parameter/1, keyfile}, {fun find_atom_parameter/1, verify}, - {fun find_boolean_parameter/1, fail_if_no_peer_cert}]], + {fun find_boolean_parameter/1, fail_if_no_peer_cert}, + {fun find_identity_parameter/1, password}, + {fun find_integer_parameter/1, depth}]], []), Params#amqp_params_network{ssl_options = SSLOptions}. @@ -207,14 +211,23 @@ broker_add_query(Params, ParsedUri, Fields) -> parse_amqp_param(Field, String) when Field =:= channel_max orelse Field =:= frame_max orelse Field =:= heartbeat orelse - Field =:= connection_timeout -> - try return(list_to_integer(String)) - catch error:badarg -> fail({not_an_integer, String}) - end; + Field =:= connection_timeout orelse + Field =:= depth -> + find_integer_parameter(String); +parse_amqp_param(Field, String) when Field =:= password -> + find_identity_parameter(String); parse_amqp_param(Field, String) -> fail({parameter_unconfigurable_in_query, Field, String}). -find_path_parameter(Value) -> return(Value). +find_path_parameter(Value) -> + find_identity_parameter(Value). + +find_identity_parameter(Value) -> return(Value). + +find_integer_parameter(Value) -> + try return(list_to_integer(Value)) + catch error:badarg -> fail({not_an_integer, Value}) + end. find_boolean_parameter(Value) -> Bool = list_to_atom(Value), diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/overview.edoc.in b/rabbitmq-server/deps/amqp_client/src/overview.edoc.in similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/overview.edoc.in rename to rabbitmq-server/deps/amqp_client/src/overview.edoc.in diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/rabbit_routing_util.erl b/rabbitmq-server/deps/amqp_client/src/rabbit_routing_util.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/rabbit_routing_util.erl rename to rabbitmq-server/deps/amqp_client/src/rabbit_routing_util.erl index 7daa728..537dfc6 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/rabbit_routing_util.erl +++ b/rabbitmq-server/deps/amqp_client/src/rabbit_routing_util.erl @@ -98,7 +98,7 @@ ensure_endpoint(_Dir, _Channel, {queue, undefined}, _Params, State) -> {ok, undefined, State}; ensure_endpoint(_, Channel, {queue, Name}, Params, State) -> - Params1 = rabbit_misc:pset(durable, true, Params), + Params1 = rabbit_misc:pmerge(durable, true, Params), Queue = list_to_binary(Name), State1 = case sets:is_element(Queue, State) of true -> State; diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/uri_parser.erl b/rabbitmq-server/deps/amqp_client/src/uri_parser.erl similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/src/uri_parser.erl rename to rabbitmq-server/deps/amqp_client/src/uri_parser.erl diff --git a/rabbitmq-server/deps/amqp_client/test.mk b/rabbitmq-server/deps/amqp_client/test.mk new file mode 100644 index 0000000..8c5825e --- /dev/null +++ b/rabbitmq-server/deps/amqp_client/test.mk @@ -0,0 +1,173 @@ +# The contents of this file are subject to the Mozilla Public License +# Version 1.1 (the "License"); you may not use this file except in +# compliance with the License. You may obtain a copy of the License at +# http://www.mozilla.org/MPL/ +# +# Software distributed under the License is distributed on an "AS IS" +# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the +# License for the specific language governing rights and limitations +# under the License. +# +# The Original Code is RabbitMQ. +# +# The Initial Developer of the Original Code is GoPivotal, Inc. +# Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +# + +IS_SUCCESS:=egrep -E "(All .+ tests (successful|passed).|Test passed.)" +TESTING_MESSAGE:=-eval "error_logger:tty(false), error_logger:logfile({open, \"$(TMPDIR)/erlang-client-tests.log\"}), io:format(\"~nTesting in progress. Please wait...~n~n\")." + +NODE_NAME := amqp_client +RUN := erl -pa test -sname $(NODE_NAME) + +MKTEMP=$$(mktemp $(TMPDIR)/tmp.XXXXXXXXXX) + +ifdef SSL_CERTS_DIR +SSL := true +ALL_SSL := $(MAKE) --no-print-directory test_ssl +ALL_SSL_COVERAGE := $(MAKE) --no-print-directory test_ssl_coverage +SSL_BROKER_ARGS := -rabbit ssl_listeners [{\\\"0.0.0.0\\\",5671},{\\\"::1\\\",5671}] \ + -rabbit ssl_options [{cacertfile,\\\"$(SSL_CERTS_DIR)/testca/cacert.pem\\\"},{certfile,\\\"$(SSL_CERTS_DIR)/server/cert.pem\\\"},{keyfile,\\\"$(SSL_CERTS_DIR)/server/key.pem\\\"},{verify,verify_peer},{fail_if_no_peer_cert,true}] +SSL_CLIENT_ARGS := -erlang_client_ssl_dir $(SSL_CERTS_DIR) +else +SSL := @echo No SSL_CERTS_DIR defined. && false +ALL_SSL := true +ALL_SSL_COVERAGE := true +SSL_BROKER_ARGS := +SSL_CLIENT_ARGS := +endif + +all_tests: + $(test_verbose) rm -f failed-$@ + -$(verbose) $(MAKE) --no-print-directory test_suites || touch failed-$@ + -$(verbose) $(MAKE) --no-print-directory test_common_package || touch failed-$@ + -$(verbose) $(MAKE) --no-print-directory test_direct || touch failed-$@ + $(verbose) ! rm failed-$@ 2>/dev/null + +test_suites: + $(test_verbose) rm -f failed-$@ + -$(verbose) $(MAKE) --no-print-directory test_network || touch failed-$@ + -$(verbose) $(MAKE) --no-print-directory test_remote_direct || touch failed-$@ + -$(verbose) $(ALL_SSL) || touch failed-$@ + $(verbose) ! rm failed-$@ 2>/dev/null + +test_suites_coverage: + $(test_verbose) rm -f failed-$@ + -$(verbose) $(MAKE) --no-print-directory test_network_coverage || touch failed-$@ + -$(verbose) $(MAKE) --no-print-directory test_direct_coverage || touch failed-$@ + $(ALL_SSL_COVERAGE) || touch failed-$@ + $(verbose) ! rm failed-$@ 2>/dev/null + +## Starts a broker, configures users and runs the tests on the same node +run_test_in_broker: + $(verbose) $(MAKE) --no-print-directory start_test_broker_node + $(verbose) $(MAKE) --no-print-directory unboot_broker + $(verbose) rm -f failed-$@ + -$(verbose) TMPFILE=$(MKTEMP) && \ + ( echo "Redirecting output to $$TMPFILE" && \ + $(MAKE) --no-print-directory run-node \ + RABBITMQ_SERVER_START_ARGS="-pa test $(SSL_BROKER_ARGS) \ + -noshell -s rabbit $(RUN_TEST_ARGS) -s init stop" 2>&1 | \ + tee $$TMPFILE && \ + $(IS_SUCCESS) $$TMPFILE ) || touch failed-$@; \ + rm $$TMPFILE + -$(verbose) $(MAKE) --no-print-directory boot_broker || touch failed-$@ + -$(verbose) $(MAKE) --no-print-directory stop_test_broker_node || touch failed-$@ + $(verbose) ! rm failed-$@ 2>/dev/null + +## Starts a broker, configures users and runs the tests from a different node +run_test_detached: start_test_broker_node + $(verbose) rm -f failed-$@ + -$(verbose) TMPFILE=$(MKTEMP) && \ + ( echo "Redirecting output to $$TMPFILE" && \ + MAKE=$(MAKE) \ + ERL_LIBS='$(CURDIR)/$(DIST_DIR):$(DIST_ERL_LIBS)' \ + $(RUN) -noinput $(TESTING_MESSAGE) \ + $(SSL_CLIENT_ARGS) $(RUN_TEST_ARGS) -s init stop 2>&1 | \ + tee $$TMPFILE && \ + $(IS_SUCCESS) $$TMPFILE ) || touch failed-$@; \ + rm $$TMPFILE + -$(verbose) $(MAKE) --no-print-directory stop_test_broker_node || touch failed-$@ + $(verbose) ! rm failed-$@ 2>/dev/null + +## Starts a broker, configures users and runs the tests from a different node +run_test_foreground: start_test_broker_node + $(verbose) rm -f failed-$@ + -$(verbose) MAKE=$(MAKE) $(RUN) -noinput $(TESTING_MESSAGE) \ + $(SSL_CLIENT_ARGS) $(RUN_TEST_ARGS) -s init stop || touch failed-$@ + -$(verbose) $(MAKE) --no-print-directory stop_test_broker_node || touch failed-$@ + $(verbose) ! rm failed-$@ 2>/dev/null + +start_test_broker_node: boot_broker + $(exec_verbose) sleep 1 + $(verbose) $(RABBITMQCTL) -n $(RABBITMQ_NODENAME) delete_user test_user_no_perm || : + $(verbose) $(RABBITMQCTL) -n $(RABBITMQ_NODENAME) add_user test_user_no_perm test_user_no_perm + $(verbose) sleep 1 + +stop_test_broker_node: + $(exec_verbose) sleep 1 + -$(verbose) $(RABBITMQCTL) -n $(RABBITMQ_NODENAME) delete_user test_user_no_perm + $(verbose) $(MAKE) --no-print-directory unboot_broker + +boot_broker: virgin-test-tmpdir + $(exec_verbose) $(MAKE) --no-print-directory start-background-node \ + RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS) \ + $(SSL_BROKER_ARGS)" + $(verbose) $(MAKE) --no-print-directory start-rabbit-on-node + +unboot_broker: + $(exec_verbose) $(MAKE) --no-print-directory stop-rabbit-on-node + $(verbose) $(MAKE) --no-print-directory stop-node + +ssl: + $(verbose) $(SSL) + +test_ssl: test-dist ssl + $(test_verbose) $(MAKE) --no-print-directory run_test_detached \ + AMQP_CLIENT_TEST_CONNECTION_TYPE="network_ssl" \ + RUN_TEST_ARGS="-s amqp_client_SUITE test" + +test_network: test-dist + $(test_verbose) $(MAKE) --no-print-directory run_test_detached \ + AMQP_CLIENT_TEST_CONNECTION_TYPE="network" \ + RUN_TEST_ARGS="-s amqp_client_SUITE test" + +test_direct: test-dist + $(test_verbose) $(MAKE) --no-print-directory run_test_in_broker \ + AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" \ + RUN_TEST_ARGS="-s amqp_client_SUITE test" + +test_remote_direct: test-dist + $(test_verbose) $(MAKE) --no-print-directory run_test_detached \ + AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" \ + RUN_TEST_ARGS="-s amqp_client_SUITE test" + +test_common_package: test-dist + $(test_verbose) $(MAKE) --no-print-directory run_test_detached \ + RUN="erl -pa test" \ + AMQP_CLIENT_TEST_CONNECTION_TYPE="network" \ + RUN_TEST_ARGS="-s amqp_client_SUITE test" + $(verbose) $(MAKE) --no-print-directory run_test_detached \ + RUN="erl -pa test -sname amqp_client" \ + AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" \ + RUN_TEST_ARGS="-s amqp_client_SUITE test" + +test_ssl_coverage: test-dist ssl + $(test_verbose) $(MAKE) --no-print-directory run_test_detached \ + AMQP_CLIENT_TEST_CONNECTION_TYPE="network_ssl" \ + RUN_TEST_ARGS="-s amqp_client_SUITE test_coverage" + +test_network_coverage: test-dist + $(test_verbose) $(MAKE) --no-print-directory run_test_detached \ + AMQP_CLIENT_TEST_CONNECTION_TYPE="network" \ + RUN_TEST_ARGS="-s amqp_client_SUITE test_coverage" + +test_remote_direct_coverage: test-dist + $(test_verbose) $(MAKE) --no-print-directory run_test_detached \ + AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" \ + RUN_TEST_ARGS="-s amqp_client_SUITE test_coverage" + +test_direct_coverage: test-dist + $(test_verbose) $(MAKE) --no-print-directory run_test_in_broker \ + AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" \ + RUN_TEST_ARGS="-s amqp_client_SUITE test_coverage" diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/Makefile b/rabbitmq-server/deps/amqp_client/test/Makefile similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/Makefile rename to rabbitmq-server/deps/amqp_client/test/Makefile diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/amqp_client_SUITE.erl b/rabbitmq-server/deps/amqp_client/test/amqp_client_SUITE.erl similarity index 96% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/amqp_client_SUITE.erl rename to rabbitmq-server/deps/amqp_client/test/amqp_client_SUITE.erl index e0bef04..aaa7f94 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/amqp_client_SUITE.erl +++ b/rabbitmq-server/deps/amqp_client/test/amqp_client_SUITE.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(amqp_client_SUITE). @@ -51,6 +51,8 @@ large_content_test_() -> ?RUN([]). lifecycle_test_() -> ?RUN([]). direct_no_user_test_() -> ?RUN([]). direct_no_password_test_() -> ?RUN([]). +direct_no_vhost_test_() -> ?RUN([]). +network_no_vhost_test_() -> ?RUN([]). nowait_exchange_declare_test_() -> ?RUN([]). channel_repeat_open_close_test_() -> ?RUN([]). channel_multi_open_close_test_() -> ?RUN([]). diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/amqp_dbg.erl b/rabbitmq-server/deps/amqp_client/test/amqp_dbg.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/amqp_dbg.erl rename to rabbitmq-server/deps/amqp_client/test/amqp_dbg.erl index cb20555..6bd9b07 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/amqp_dbg.erl +++ b/rabbitmq-server/deps/amqp_client/test/amqp_dbg.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(amqp_dbg). diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/negative_test_util.erl b/rabbitmq-server/deps/amqp_client/test/negative_test_util.erl similarity index 97% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/negative_test_util.erl rename to rabbitmq-server/deps/amqp_client/test/negative_test_util.erl index a4f962c..945ff7d 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/negative_test_util.erl +++ b/rabbitmq-server/deps/amqp_client/test/negative_test_util.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(negative_test_util). @@ -207,9 +207,9 @@ invalid_password_test() -> non_existent_vhost_test() -> Params = [{virtual_host, <<"oops">>}], - ?assertMatch({error, access_refused}, test_util:new_connection(Params)). + ?assertMatch({error, not_allowed}, test_util:new_connection(Params)). no_permission_test() -> Params = [{username, <<"test_user_no_perm">>}, {password, <<"test_user_no_perm">>}], - ?assertMatch({error, access_refused}, test_util:new_connection(Params)). + ?assertMatch({error, not_allowed}, test_util:new_connection(Params)). diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/test_util.erl b/rabbitmq-server/deps/amqp_client/test/test_util.erl similarity index 95% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/test_util.erl rename to rabbitmq-server/deps/amqp_client/test/test_util.erl index b674423..949d35c 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test/test_util.erl +++ b/rabbitmq-server/deps/amqp_client/test/test_util.erl @@ -124,6 +124,38 @@ amqp_uri_parse_test() -> port = 100}}, amqp_uri:parse("amqp://user:pass@[::1]:100")), + %% TLS options + {ok, #amqp_params_network{ssl_options = TLSOpts1}} = + amqp_uri:parse("amqps://host/%2f?cacertfile=/path/to/cacertfile.pem"), + ?assertEqual(lists:usort([{cacertfile,"/path/to/cacertfile.pem"}]), + lists:usort(TLSOpts1)), + + {ok, #amqp_params_network{ssl_options = TLSOpts2}} = + amqp_uri:parse("amqps://host/%2f?cacertfile=/path/to/cacertfile.pem" + "&certfile=/path/to/certfile.pem"), + ?assertEqual(lists:usort([{certfile, "/path/to/certfile.pem"}, + {cacertfile,"/path/to/cacertfile.pem"}]), + lists:usort(TLSOpts2)), + + {ok, #amqp_params_network{ssl_options = TLSOpts3}} = + amqp_uri:parse("amqps://host/%2f?verify=verify_peer" + "&fail_if_no_peer_cert=true"), + ?assertEqual(lists:usort([{fail_if_no_peer_cert, true}, + {verify, verify_peer} + ]), lists:usort(TLSOpts3)), + + {ok, #amqp_params_network{ssl_options = TLSOpts4}} = + amqp_uri:parse("amqps://host/%2f?cacertfile=/path/to/cacertfile.pem" + "&certfile=/path/to/certfile.pem" + "&password=topsecret" + "&depth=5"), + ?assertEqual(lists:usort([{certfile, "/path/to/certfile.pem"}, + {cacertfile,"/path/to/cacertfile.pem"}, + {password, "topsecret"}, + {depth, 5}]), + lists:usort(TLSOpts4)), + + %% Various failure cases ?assertMatch({error, _}, amqp_uri:parse("http://www.rabbitmq.com")), ?assertMatch({error, _}, amqp_uri:parse("amqp://foo:bar:baz")), @@ -265,6 +297,18 @@ direct_no_password_test() -> amqp_connection:close(Connection), wait_for_death(Connection). +direct_no_vhost_test() -> + {error, not_allowed} = new_connection( + just_direct, [{username, <<"guest">>}, + {virtual_host, <<"/noexist">>}]), + ok. + +network_no_vhost_test() -> + {error, not_allowed} = + new_connection(just_network, [{username, <<"guest">>}, + {virtual_host, <<"/noexist">>}]), + ok. + queue_exchange_binding(Channel, X, Parent, Tag) -> receive nothing -> ok @@ -863,7 +907,9 @@ large_content_test() -> {ok, Channel} = amqp_connection:open_channel(Connection), #'queue.declare_ok'{queue = Q} = amqp_channel:call(Channel, #'queue.declare'{}), - {A1,A2,A3} = now(), random:seed(A1, A2, A3), + random:seed(erlang:phash2([node()]), + time_compat:monotonic_time(), + time_compat:unique_integer()), F = list_to_binary([random:uniform(256)-1 || _ <- lists:seq(1, 1000)]), Payload = list_to_binary([[F || _ <- lists:seq(1, 1000)]]), Publish = #'basic.publish'{exchange = <<>>, routing_key = Q}, @@ -1154,15 +1200,20 @@ make_direct_params(Props) -> virtual_host = Pgv(virtual_host, <<"/">>), node = Pgv(node, node())}. +make_cmd() -> + case os:getenv("MAKE") of + false -> "make"; + Cmd -> Cmd + end. + set_resource_alarm(memory) -> - os:cmd("cd ../rabbitmq-test; make set-resource-alarm SOURCE=memory"); + os:cmd(make_cmd() ++ " set-resource-alarm SOURCE=memory"); set_resource_alarm(disk) -> - os:cmd("cd ../rabbitmq-test; make set-resource-alarm SOURCE=disk"). - + os:cmd(make_cmd() ++ " set-resource-alarm SOURCE=disk"). clear_resource_alarm(memory) -> - os:cmd("cd ../rabbitmq-test; make clear-resource-alarm SOURCE=memory"); + os:cmd(make_cmd() ++ " clear-resource-alarm SOURCE=memory"); clear_resource_alarm(disk) -> - os:cmd("cd ../rabbitmq-test; make clear-resource-alarm SOURCE=disk"). + os:cmd(make_cmd() ++ " clear-resource-alarm SOURCE=disk"). fmt(Fmt, Args) -> list_to_binary(rabbit_misc:format(Fmt, Args)). diff --git a/rabbitmq-server/deps/cowboy/AUTHORS b/rabbitmq-server/deps/cowboy/AUTHORS new file mode 100644 index 0000000..1cc04b4 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/AUTHORS @@ -0,0 +1,91 @@ +Cowboy is available thanks to the work of: + +Loïc Hoguin +Magnus Klaar +Ali Sabil +Anthony Ramine +Adam Cammack +Tom Burdick +James Fish +Paul Oliver +Slava Yurin +Vladimir Dronnikov +YAMAMOTO Takashi +Yurii Rashkovskii +Andrew Majorov +Eduardo Gurgel +Egobrain +Josh Toft +Steven Gravell +Alex Kovalev +Andrew Thompson +Hunter Morris +Ivan Lisenkov +Martin Rehfeld +Matthias Endler +Seletskiy Stanislav +Sina Samavati +Tristan Sloughter +0x00F6 +0xAX +Adam Cammmack +Adrian Roe +Alexander Færøy +Alexei Sholik +Alexey Lebedeff +Andre Graf +Andrzej Sliwa +Blake Gentry +Bob Ippolito +Boris Faure +Cameron Bytheway +Cristian Hancila +Daniel White +Danielle Sucher +Dave Peticolas +David Kelly +David N. Welton +DeadZen +Dmitry Groshev +Drew +Drew Varner +Eiichi Tsukata +Fred Hebert +Hans Ulrich Niedermann +Ivan Blinkov +Jeremy Ong +Jesper Louis Andersen +Josh Allmann +Josh Marchán +José Valim +Julian Squires +Klaus Trainer +Kuk-Hyun Lee +Mathieu Lecarme +Max Lapshin +Michael Truog +Michiel Hakvoort +Nakai Ryosuke +Ori Bar +Pablo Vieytes +Paulo Oliveira +Peter Ericson +RJ +Radosław Szymczyszyn +Richard Ramsden +Roberto Ostinelli +Sergey Prokhorov +Sergey Rublev +Sergey Urbanovich +Seven Du +Thomas Nordström +Tim Dysinger +Tomas Morstein +Unix1 +alisdair sullivan +dbmercer +derdesign +mocchira +pmyarchon +rambocoder +serge diff --git a/rabbitmq-server/deps/cowboy/CHANGELOG.md b/rabbitmq-server/deps/cowboy/CHANGELOG.md new file mode 100644 index 0000000..edeb748 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/CHANGELOG.md @@ -0,0 +1,910 @@ +CHANGELOG +========= + +1.0.3 +----- + + * Add CircleCI for automated testing across all supported Erlang versions + + * Only send the Connection header when necessary + + * Send Connection: keep-alive when HTTP/1.0 connections are kept open + + * Fix a typo in a return value of cowboy_http:asctime_date + + * Fix Dialyzer warnings in cowboy_clock + +1.0.2 +----- + + * Fix cowboy_clock inbox overflow when system clock changes (but see 1.0.3) + + * Fix stacktrace shown on REST handler crashes + + * Reply with 400 on if-match and if-not-match parsing crash + + * Improve static_world example by adding index.html file + +1.0.1 +----- + + * Add missing cowboy_req:part/2 export + + * Fix an issue building dependencies on FreeBSD + + * Improve support for requests with an absolute URI + + * Small fixes to the documentation + +1.0.0 +----- + + * Drop R15 support + + * Update erlang.mk, Ranch and Cowlib to 1.0.0 + + * Complete the user guide and simplify the Getting started chapter + + * Document the HTTP status codes Cowboy may send in the manual + + * Provide installable man pages (see README) + + * Support ad-hoc keep-alive for HTTP/1.0 clients + + * Fix SPDY parsing error when many frames were received in one packet + + * Reply with 400 instead of 422 in cowboy_rest + + * Reply with 400 instead of 500 on header parsing crash + + * Remove deprecated body reading interface (see 0.10.0 notes) + +0.10.0 +------ + + * Update Ranch to 0.10 and Cowlib to 0.6.2 + + * Update the body reading API to allow controlling rate of transfer + + The lack of this feature was causing various timeout issues + in some environments. + + The body/2 function now takes a Req and a list of options. The older + interface can still be used until Cowboy 1.0. + + The body_qs/2, part/2 and part_body/2 also accept this list of + options, and pass it down to the body/2 call. The default options + vary between the different functions. + + The return value of the function has changed. Older code should work + without modification but you should definitely still test it. + + All functions appropriately decode transfer and content encoding. + There is no need to have a special case for that anymore. + + The body/1,2 function supports streaming, with the same interface + as the part_body/1,2 function. + + * Deprecate the cowboy_req:init_stream, stream_body and skip_body functions + + They will be removed in Cowboy 1.0. + + * Add support for multipart + + The cowboy_req:part/1,2 and cowboy_req:part_body/1,2 can be + used for reading a multipart body. + + Documentation has been added. + + The old undocumented multipart functions were removed. + + * Allow the onresponse hook to override only status and headers + + Previously it wasn't possible to override them without also + overriding the body. The new return value is currently marked + as experimental. + + * Make loop handlers work with SPDY + + * Fix a race condition with loop handlers and keepalive requests + + * Fix parsing of accept-language header + + * Fix parsing of authorization header with empty passwords + + * Fix multiline headers handling + + * Various optimizations + + All code that is moved to cowlib is optimized at the same time + and benchmarks get added for all functions. + + The parsing of connection, content-length and transfer-encoding + has been optimized. + + Chunked transfer decoding has been optimized. + + * Enable +warn_missing_spec by default and fix specs + + * Remove the non-documented cowboy_client module; use gun instead + + * Numerous documentation updates and tweaks + + The guide now has a REST principles chapter and revised + and completed Websocket chapters, alongside a new multipart chapter. + + * Add a multipart file upload example + + * Test suites are being reworked and greatly improved + * Test Cowboy across all releases from R15B01 to 17.0, plus maint and master + * The Autobahn test suite is now always ran (as long as it's installed) + * Expected error reports are now silenced (but still logged) + * Applications are now only started once from a ct hook + +0.9.0 +----- + + * Update Ranch to 0.9.0 + + * SPDY is no longer experimental and is documented + + The SPDY development has been sponsored by the LeoFS project. + + * Review, improve and document cowboy_static + * Much simplified configuration + * Etag generation is now enabled by default + * Web mimetypes are now detected by default + * Optionally a huge list of mimetypes can also be used + * It not try to magically find the priv directory anymore, use ERL_LIBS + + * Remove the pretty printing of errors + + Cowboy will no longer print errors, it will instead let the process + crash properly, so that links can work with Cowboy. Ranch will catch + errors and print a one-liner containing all the error information + instead. + + * Trigger a human readable error when routes lack the starting slash + + * Add websocket_compress metadata + + * Fix parsing of hosts given as IPv6 addresses + + * Fix the decoding of chunked bodies + + * Fix handling of close, ping and pong Websocket replies + + * Fix the x-webkit-deflate-frame Websocket extension + + * Fix PUT behavior in cowboy_rest when creating a resource at the request URL + + * Fix warnings with the reltool GUI + + * Start moving code in a new dependency, cowlib + + The code moved there is mostly parsing code and utility functions. + Most of it was in the undocumented cowboy_http module before. + + * Optimize cookie date building and query string building/parsing + + * Great number of improvements and additions to the user guide + + * Convert all examples to releases using the erlang.mk+relx combo + + Some examples have also been fixed or slightly improved. + The elixir example is now on a separate repository maintained + independently. More examples in this language exist in that + other repository. + +0.8.6 +----- + + * Make sure Cowboy compiles on R16B01 + + * Update Ranch to 0.8.4 + + * Add experimental support for the x-webkit-deflate-frame Websocket extension + + This allows Cowboy to handle compressed Websocket frames, + lowering the amount of data that needs to be sent over the + socket. + + The extension will only be used if compression was enabled + using the `compress` protocol option. + + * Add experimental SPDY support + + SPDY is a new protocol implemented by most browsers. It is + the basis for what will become HTTP/2.0. + + To use SPDY, you need to call `start_spdy` where you would + have used `start_https` before. + + This protocol is still incomplete. It cannot accept request + bodies yet, making most methods other than GET and HEAD + not too useful at this point. + + * Allow an empty method list in allowed_methods + + * The charset parameter of content-types is now always lowercase + + * Don't overwrite the stacktrace when a REST handler crashes + + * Don't crash when the Cookie header is empty + + * Don't crash on invalid Accept-Encoding header when replying + +0.8.5 +----- + + * Add the Cowboy Function Reference + + Everything documented in the function reference is the API + that will make it to Cowboy 1.0. + + * Use erlang.mk + + The project is of course still compatible with rebar + and can be used as a dependency just fine. + + * Update Ranch to 0.8.3 + + * Remove cowboy_req:fragment/1 + + No well-written client is sending the fragment with the URL. + + * Add cowboy_req:set_resp_body_fun(chunked, Fun, Req) + + * Improve various typespecs + + * Change the return value of cowboy_req:version/1 + + We now have 'HTTP/1.1' instead of {1, 1} and 'HTTP/1.0' + instead of {1, 0}. + + * Change the return value of REST accept callbacks + + The Path return value becomes {true, Path}. + + * Change the return value of REST charsets_provided/2 + + It was incorrectly expecting a list of tuples instead of + a list of charsets. + + * Move various types to the cowboy module + * cowboy_http:version() to cowboy:http_version() + * cowboy_http:headers() to cowboy:http_headers() + * cowboy_http:status() to cowboy:http_status() + * cowboy_protocol:onrequest_fun() to cowboy:onrequest_fun() + * cowboy_protocol:onresponse_fun() to cowboy:onresponse_fun() + + * Add type cowboy_protocol:opts() + + * Fix a REST bug with the OPTIONS method + + * Fix a REST bug where iso-8859-1 would be incoditionally selected + +0.8.4 +----- + + * Cookie names are now back to being case sensitive + + This should be more in line with what browsers do and what + users would expect. + + * REST is no longer experimental and is documented + + * REST behavior fixed when used with the POST method + + Removes process_post, post_is_create, create_path, created_path + callbacks. It is up to the resource accept callback to decide + what to do when the POST method is used. Depending on the return + value Cowboy will determine if the resource was created or not. + + * Removes the put_path meta value in REST + + * Fix an issue in REST with the PATCH method + + Content-types were not normalized as expected, preventing the + use of the binary form for content-types. + + * Add default operations for the OPTIONS method in REST + + The default will be to set the Allow header in the response + based on the return value from allowed_methods. + + * Add default content_types_provided "text/html" maps to to_html + + This allows non-HEAD/GET methods to work without defining + the callback explicitly. + + * Reject invalid content-types explicitly in REST + + * Don't accept TRACE or CONNECT methods by default in REST + + * Remove cowboy_req:peer_addr/1 + + Because each server's proxy situation differs, it is better + that this function is implemented by the application directly. + + The X-Forwarded-For header can now be parsed using + cowboy_req:parse_header/2. + + * Switch the arguments to cowboy_req:stream_body/2 + + They were in the wrong order compared to the rest of the module. + + * Add parser for the Range header + + * Do not crash if connection times out while sending a file using sendfile + + * Ensure we can fetch the body in the info/3 function of loop handlers + + * Update Ranch to 0.8.1 + + * Reorganize and optimize the test suites + +0.8.3 +----- + + * Remove init_stream/5, add stream_body/2 + + It's better to allow configuring the streamed chunk size on + a per chunk basis. Also easier to use. + + * Update Ranch to 0.8.0 + + Much faster. Also improved stability. + +0.8.2 +----- + + * Add error_hook and ssl_hello_world example + + * Greatly improve the performance of body reading operations + + The streamed chunk size is now configurable through the new + function cowboy_req:init_stream/5. + + * Add cowboy_req:body/2 and cowboy_req:body_qs/2 + + These functions take an additional argument indicating the + maximum size of the body. They will return {error, badlength} + if the size is too large, or {error, chunked} if the body + was sent using the chunked Transfer-Encoding and its size + cannot be determined. + + The function body/1 is now an alias to body/2 with a maximum + body size of 8MB. Likewise, the function body_qs/1 is an alias + of body_qs/2 with a maximum body size of 16KB. + + * Properly handle explicit identity Transfer-Encoding in body_length/1 + + * Small but noticeable performance improvement in the critical path + + We stopped using binary:match/2 in favor of custom functions. + This makes Cowboy 0.5ms faster per request. + + * Prevent loop handlers from awakening after sending a response + + * Optimize cowboy_static initialization code + + * Make path checks in cowboy_static cross-platform + + * Allow '*' for REST content types parameters in content_types_provided + + * Fix cowboy_router types + + * Update Ranch to 0.6.2; adds support for two new SSL options + + * Improve documentation + +0.8.1 +----- + + * Add eventsource, web_server examples; improve rest_pastebin example + + * Add cowboy:set_env/3 to more conveniently update the dispatch list + + * Add cowboy_sub_protocol behaviour + + * Fix cowboy_req:has_body/1 when Content-Length == 0 + + * Fix passing of state to websocket_terminate/3 on server close + + * Fix compilation with +native + + * Compile with more warnings enabled by default; fix warnings + + * Set the socket in passive mode after the loop handler terminates + + * Improve typespecs + +0.8.0 +----- + + * This release drops R14 compatibility + + Behaviours now use the -callback attribute which is supported only + since R15B. + + * Add a user guide + + * Add or update many examples + + Add basic_auth, compress_response, cookie, elixir_hello_world, + markdown_middleware, rest_pastebin, rest_stream_response + and websocket examples. + + Rename the static example to static_world for clarity. + + * Add CONTRIBUTING.md file + + * Use Ranch 0.6.1 for connection handling + + To start listeners you can now use cowboy:start_http/4 for HTTP, + and cowboy:start_https/4 for HTTPS. The proper transport and + protocol modules will be used. + + * Add protection against slowloris vulnerability + + This protection is always enabled and has no impact on the performance + of the system. + + * Add a better routing syntax + + * If a binding is used twice in routing, values must now be identical + + * Add support for a configurable chain of middlewares + + Routing and handling are now two separate middlewares that can be + replaced as needed. + + * Fix application dependencies + + The crypto application must be started before Cowboy. + + The inets application is no longer needed. A few functions from + that application were used by mistake in the REST code. + + * Shorten the name of many modules + * cowboy_http_protocol becomes cowboy_protocol + * cowboy_http_req becomes cowboy_req + * cowboy_http_rest becomes cowboy_rest + * cowboy_http_static becomes cowboy_static + * cowboy_http_websocket becomes cowboy_websocket + + * Introduce the cowboy_req:req() opaque type + + The include/http.hrl file was removed. Users are expected to use + the cowboy_req API to access or modify the Req object. + + This required a lot of changes so cleanup and optimizations were + performed where possible. + + * Add many cowboy_req functions + * cowboy_req:delete_resp_header/2 deletes a previously set resp header + * cowboy_req:set_meta/3 sets metadata in the Req object + * cowboy_req:to_list/1 converts the Req object to a list of key/values + * cowboy_req:fragment/1 returns the request URL fragment + * cowboy_req:host_url/1 returns the request URL without the path or qs + * cowboy_req:url/1 returns the full request URL + * cowboy_req:set_resp_body_fun/2 for body streaming with no known length + + * Improve the body streaming interface in cowboy_req + + The function now receives the Transport and Socket directly as arguments. + + * Rename or drop many cowboy_req functions + * cowboy_req:raw_host/1 becomes cowboy_req:host/1, old function dropped + * cowboy_req:raw_path/1 becomes cowboy_req:path/1, old function dropped + * cowboy_req:raw_qs/1 becomes cowboy_req:qs/1 + * Remove cowboy_req:body/2 + * Remove cowboy_req:transport/1 + + * Change the signature of many cowboy_req functions + * parse_header now returns {ok, any(), Req} instead of {any(), Req} + * body_qs now returns {ok, QsVals, Req} instead of {QsVals, Req} + * multipart_data now returns {headers, Headers, Req} instead of + {{headers, Headers}, Req} and {body, Body, Req} instead of + {{body, Body}, Req} + * set_resp_* functions now return Req instead of {ok, Req} + * has_body now returns boolean() + + * Rewrote cookie code + + In short we now do the same thing as PHP when setting cookies. This + allows us to be fairly confident that our code will work on the vast + majority of browsers. + + * Fix consistency issues caused by erlang:decode_packet/3 + * The method is now always a case sensitive binary string + * Note that standard method names are uppercase (e.g. <<"GET">>) + * Header names are now always lowercase binary string + + * The max_line_length cowboy_protocol option was replaced by 3 new options: + * max_request_line_length, defaults to 4096 bytes + * max_header_name_length, defaults to 64 bytes + * max_header_value_length, defaults to 4096 bytes + + * Add max_headers option, limiting the number of headers; defaults to 100 + + * The max_keepalive option now defaults to 100 instead of infinity + + * Change terminate/2 to terminate/3 in the HTTP handler interface + + * Enhance the loop handler API + * Connection close is now better detected + * Fix an internal message leak + + * Enhance the Websocket API + * Change a websocket error from {error, protocol} to {error, badframe} + * Allow websocket handlers to reply more than one frame + * Check for errors when calling Transport:send/2 to avoid crashes + * Add close, {close, Payload}, {close, StatusCode, Payload}, + ping, pong frame types for replies + * Ensure websocket_terminate is always called + * Improve timeout handling + * Remove support for the old hixie76 protocol + * Add parsing support for Sec-Websocket-Protocol + * Check for UTF-8 correctness of text frames + * Perform unmasking and UTF-8 validation on the fly + * Reject clients that send unmasked frames + * Add cowboy_websocket:close_code/0 type + + * Enhance the REST API + * Fix charset handling + * Add PATCH support + * Add created_path callback, used if create_path was not defined + * Make sure rest_terminate is always called + + * Improved HTTP standard compatibility + * Revised status code used in responses + * Implement authorization header parsing + * Add opt-in automatic response body compression + + * Improve lager compatibility + + We format errors in a special way so that lager can recognize Cowboy + errors and put them on a single line. + + * Remove the urldecode cowboy_protocol option + + * Add cowboy_protocol:onrequest_fun/0 and :onresponse_fun/0 types + + * Add the body data to onresponse_fun/0 callback + + * Avoid a duplicate HTTP reply in cowboy_websocket:upgrade_error/1 + + * Fix use of the Vary header, was named Variances in the previous code + + * Improve returned status code for HTTP and REST + + * Fix charsets_provided return value + + * Allow passing {M, F} for the mimetype function to cowboy_static + + * Can now upgrade protocols with {upgrade, protocol, P, Req, Opts} + + * Cowboy now only expects universal time, never local time + + * Do not try skipping the body if the connection is to be closed + + * Add cowboy_bstr:to_upper/1, cowboy_bstr:capitalize_token/1 + + * Many, many optimizations for the most critical code path + +0.6.1 +----- + + * Add hello_world, rest_hello_world, chunked_hello_world, + echo_get, echo_post and static examples. + + * Add support for the "Expect: 100-continue" header. + + * Keep the original 'Host' header value instead of modifying it. + + * Fix use of parsed headers cache. + + * REST: fix the matching of charsets. + + * REST: allow <<"type/subtype">> format for content_types_accepted. + + * Improve typespecs. + +0.6.0 +----- + + * Add multipart support + + * Add chunked transfer decoding support + + Done by reworking the body reading API. Now all the body + reading goes through the cowboy_http_req:stream_body/1 + function. This function takes care of handling both the + Transfer-Encoding and the Content-Encoding, returning + properly decoded data ready for consumption. + + * Add fragmented websocket messages support + + Properly tested by the addition of the Autobahn websocket + test suite to our toolbox. All tests pass except a few + related to UTF-8 handling, as Cowboy does no checks on that + end at this point. + + * Add 'onrequest' and 'onresponse' hooks + + The first can be used for all the special cases you may have + that can't be dealt with otherwise. It's also pretty good for + writing access logs or rewriting URLs. + + The second can be used for logging errors or replacing error + pages, amongst others. + + * Add cowboy:get_protocol_options/1 and cowboy:set_protocol_options/2 + + These functions allow for retrieving a listener's protocol options, + and for modifying them while the listener is running. This is + most useful to upgrade the dispatch list. The upgrade applies + to all the future connections. + + * Add the sockname/1 function to TCP and SSL transports + + * Improve SSL transport support + + Add support for specifying the ciphers. Add CA support. Make + specifying the password optional. + + * Add new HTTP status codes from RFC 6585 + + * Add a 'file' option to cowboy_http_static + + This allows for mapping /folder/ paths to a /folder/index.html file. + + * Add the '*' catch all Content-Type for REST + + * Add {halt, Req, State} as a possible return value for REST + + * Add absolute URI support for requests + + * Add cowboy_http:x_www_form_urlencoded/2 + + * Various REST bug fixes + + * Do not send chunked replies for HTTP/1.0 connections + + * Fix a DST bug in the cookies code + + * Fix a bug with setting cookie values containing slashes + + * Fix a small timer leak when using loop/websocket timeouts + + * Make charset and media type parsing more relaxed + + This is to accomodate some widely used broken clients. + + * Make error messages more readable + + * Fix and improve type specifications + + * Fix a bug preventing documentation from being generated + + * Small improvements to the documentation + + * Rework the HTTP test suite + + The suite now uses an integrated Cowboy HTTP client. The client + is currently experimental and shouldn't be used. + + * Add many many tests. + +0.4.0 +----- + + * Set the cowboy_listener process priority to high + + As it is the central process used by all incoming requests + we need to set its priority to high to avoid timeouts that + would happen otherwise when reaching a huge number of + concurrent requests. + + * Add cowboy:child_spec/6 for embedding in other applications + + * Add cowboy_http_rest, an experimental REST protocol support + + Based on the Webmachine diagram and documentation. It is a + new implementation, not a port, therefore a few changes have + been made. However all the callback names are the same and + should behave similarly to Webmachine. + + There is currently no documentation other than the Webmachine + resource documentation and the comments found in cowboy_http_rest, + which itself should be fairly easy to read and understand. + + * Add cowboy_http_static, an experimental static file handler + + Makes use of the aforementioned REST protocol support to + deliver files with proper content type and cache headers. + + Note that this uses the new file:sendfile support when + appropriate, which currently requires the VM to be started + with the +A option defined, else errors may randomly appear. + + * Add cowboy_bstr module for binary strings related functions + + * Add cowboy_http module for HTTP parsing functions + + This module so far contains various functions for HTTP header + parsing along with URL encoding and decoding. + + * Remove quoted from the default dependencies + + This should make Cowboy much easier to compile and use by default. + It is of course still possible to use quoted as your URL decoding + library in Cowboy thanks to the newly added urldecode option. + + * Fix supervisor spec for non dynamic modules to allow upgrades to complete + + * Add cowboy:accept_ack/1 for a cleaner handling of the shoot message + + Before, when the listener accepted a connection, the newly created + process was waiting for a message containing the atom 'shoot' before + proceeding. This has been replaced by the cowboy:accept_ack/1 function. + + This function should be used where 'shoot' was received because the + contents of the message have changed (and could change again in the + distant future). + + * Update binary parsing expressions to avoid hype crashes + + More specifically, /bits was replaced by /binary. + + * Rename the type cowboy_dispatcher:path_tokens/0 to tokens/0 + + * Remove the cowboy_clock:date/0, time/0 and datetime/0 types + + The calendar module exports those same types properly since R14B04. + + * Add cacertfile configuration option to cowboy_ssl_transport + + * Add cowboy_protocol behaviour + + * Remove -Wbehaviours dialyzer option unavailable in R15B + + * Many tests and specs improvements + +### cowboy_http_req + + * Fix a crash when reading the request body + + * Add parse_header/2 and parse_header/3 + + The following headers can now be semantically parsed: Connection, Accept, + Accept-Charset, Accept-Encoding, Accept-Language, Content-Length, + Content-Type, If-Match, If-None-Match, If-Modified-Since, + If-Unmodified-Since, Upgrade + + * Add set_resp_header/3, set_resp_cookie/4 and set_resp_body/2 + + These functions allow handlers to set response headers and body + without having to reply directly. + + * Add set_resp_body_fun/3 + + This function allows handlers to stream the body of the response + using the given fun. The size of the response must be known beforehand. + + * Add transport/1 to obtain the transport and socket for the request + + This allows handlers to have low-level socket access in those cases + where they do need it, like when streaming a response body with + set_resp_body_fun/3. + + * Add peer_addr/1 + + This function tries to guess the real peer IP based on the HTTP + headers received. + + * Add meta/2 and meta/3 to save useful protocol information + + Currently used to save the Websocket protocol version currently used, + and to save request information in the REST protocol handler. + + * Add reply/2 and reply/3 aliases to reply/4 + + * Add upgrade_reply/3 for protocol upgrades + +### cowboy_http_protocol + + * Add the {urldecode, fun urldecode/2} option + + Added when quoted was removed from the default build. Can be used to + tell Cowboy to use quoted or any other URL decoding routine. + + * Add the max_keepalive option + + * Add the max_line_length option + + * Allow HTTP handlers to stop during init/3 + + To do so they can return {shutdown, Req, State}. + + * Add loops support in HTTP handlers for proper long-polling support + + A loop can be entered by returning either of {loop, Req, State}, + {loop, Req, State, hibernate}, {loop, Req, State, Timeout} or + {loop, Req, State, Timeout, hibernate} from init/3. + + Loops are useful when we cannot reply immediately and instead + are waiting for an Erlang message to be able to complete the request, + as would typically be done for long-polling. + + Loop support in the protocol means that timeouts and hibernating + are well tested and handled so you can use those options without + worrying. It is recommended to set the timeout option. + + When a loop is started, handle/2 will never be called so it does + not need to be defined. When the request process receives an Erlang + message, it will call the info/3 function with the message as the + first argument. + + Like in OTP, you do need to set timeout and hibernate again when + returning from info/3 to enable them until the next call. + + * Fix the sending of 500 errors when handlers crash + + Now we send an error response when no response has been sent, + and do nothing more than close the connection if anything + did get sent. + + * Fix a crash when the server is sent HTTP responses + + * Fix HTTP timeouts handling when the Request-Line wasn't received + + * Fix the handling of the max number of empty lines between requests + + * Fix the handling of HEAD requests + + * Fix HTTP/1.0 Host header handling + + * Reply status 400 if we receive an unexpected value or error for headers + + * Properly close when the application sends "Connection: close" header + + * Close HTTP connections on all errors + + * Improve the error message for HTTP handlers + +### cowboy_http_websocket + + * Add websocket support for all versions up to RFC 6455 + + Support isn't perfect yet according to the specifications, but + is working against all currently known client implementations. + + * Allow websocket_init/3 to return with the hibernate option set + + * Add {shutdown, Req} return value to websocket_init/3 to fail an upgrade + + * Fix websocket timeout handling + + * Fix error messages: wrong callback name was reported on error + + * Fix byte-by-byte websocket handling + + * Fix an issue when using hixie-76 with certain proxies + + * Fix a crash in the hixie-76 handshake + + * Fix the handshake when SSL is used on port 443 + + * Fix a crash in the handshake when cowboy_http_req:compact/1 is used + + * Fix handshake when a query string is present + + * Fix a crash when the Upgrade header contains more than one token + +0.2.0 +----- + + * Initial release. diff --git a/rabbitmq-server/deps/cowboy/CONTRIBUTING.md b/rabbitmq-server/deps/cowboy/CONTRIBUTING.md new file mode 100644 index 0000000..7bc005b --- /dev/null +++ b/rabbitmq-server/deps/cowboy/CONTRIBUTING.md @@ -0,0 +1,173 @@ +Contributing +============ + +Introduction +------------ + +This document describes the usages and rules to follow when contributing +to this project. + +It uses the uppercase keywords SHOULD for optional but highly recommended +conditions and MUST for required conditions. + +`git` is a distributed source code versioning system. This document refers +to three different repositories hosting the source code of the project. +`Your local copy` refers to the copy of the repository that you have on +your computer. The remote repository `origin` refers to your fork of the +project's repository that you can find in your GitHub account. The remote +repository `upstream` refers to the official repository for this project. + +Following this document will ensure prompt merging of your work in the +`master` branch of the project. + +Reporting bugs +-------------- + +Upon identifying a bug or a DoS vulnerability, you SHOULD submit a ticket, +regardless of your plans for fixing it. If you plan to fix the bug, you +SHOULD discuss your plans to avoid having your work rejected. + +Upon identifying a security vulnerability in Erlang/OTP that leaves Cowboy +vulnerable to attack, you SHOULD consult privately with the Erlang/OTP team +to get the issue resolved. + +Upon identifying a security vulnerability in Cowboy's `cowboy_static` module, +you SHOULD submit a ticket, regardless of your plans for fixing it. Please +ensure that all necessary details to reproduce are listed. You then SHOULD +inform users on the mailing list about the issue, advising that they use +another means for sending static files until the issue is resolved. + +Upon identifying a security vulnerability in any other part of Cowboy, you +SHOULD contact us directly by email. Please ensure that all necessary details +to reproduce are listed. + +Before implementing a new feature, you SHOULD submit a ticket for discussion +on your plans. The feature might have been rejected already, or the +implementation might already be decided. + +Cloning +------- + +You MUST fork the project's repository to your GitHub account by clicking +on the `Fork` button. + +Then, from your fork's page, copy the `Git Read-Only` URL to your clipboard. +You MUST perform the following commands in the folder you choose, replacing +`$URL` by the URL you just copied, `$UPSTREAM_URL` by the `Git Read-Only` +project of the official repository, and `$PROJECT` by the name of this project. + +``` bash +$ git clone "$URL" +$ cd $PROJECT +$ git remote add upstream $UPSTREAM_URL +``` + +Branching +--------- + +Before starting working on the code, you MUST update to `upstream`. The +project is always evolving, and as such you SHOULD always strive to keep +up to date when submitting patches to make sure they can be merged without +conflicts. + +To update the current branch to `upstream`, you can use the following commands. + +``` bash +$ git fetch upstream +$ git rebase upstream/master +``` + +It may ask you to stash your changes, in which case you stash with: + +``` bash +$ git stash +``` + +And put your changes back in with: + +``` bash +$ git stash pop +``` + +You SHOULD use these commands both before working on your patch and before +submitting the pull request. If conflicts arise it is your responsability +to deal with them. + +You MUST create a new branch for your work. First, ensure you are on `master`. +You MUST update `master` to `upstream` before doing anything. Then create a +new branch `$BRANCH` and switch to it. + +``` bash +$ git checkout -b $BRANCH +``` + +You MUST use a an insightful branch name. + +If you later need to switch back to an existing branch `$BRANCH`, you can use: + +``` bash +$ git checkout $BRANCH +``` + +Source editing +-------------- + +The following rules MUST be followed: + * Indentation uses horizontal tabs (1 tab = 4 columns) + * Do NOT align code; only indentation is allowed + * Lines MUST NOT span more than 80 columns + +The following rules SHOULD be followed: + * Write small functions whenever possible + * Avoid having too many clauses containing clauses containing clauses + +Committing +---------- + +You MUST ensure that all commits pass all tests and do not have extra +Dialyzer warnings. + +Running tests is fairly straightforward. Note that you need at least +Erlang/OTP R16B01 for the SSL tests to run. + +``` bash +make tests +``` + +Running Dialyzer requires some initial setup. You need to build the PLT +file that Dialyzer will use for its analysis. This is a one-time operation. +Dialyzer will take care of updating that file when needed. + +``` bash +make build-plt +``` + +Once that is done, you can run Dialyzer. + +``` bash +make dialyze +``` + +You MUST put all the related work in a single commit. Fixing a bug is one +commit, adding a feature is one commit, adding two features is two commits. + +You MUST write a proper commit title and message. The commit title MUST be +at most 72 characters; it is the first line of the commit text. The second +line of the commit text MUST be left blank. The third line and beyond is the +commit message. You SHOULD write a commit message. If you do, you MUST make +all lines smaller than 80 characters. You SHOULD explain what the commit +does, what references you used and any other information that helps +understanding your work. + +Submitting the pull request +--------------------------- + +You MUST push your branch `$BRANCH` to GitHub, using the following command: + +``` bash +$ git push origin $BRANCH +``` + +You MUST then submit the pull request by using the GitHub interface. +You SHOULD provide an explanatory message and refer to any previous ticket +related to this patch. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/LICENSE b/rabbitmq-server/deps/cowboy/LICENSE similarity index 92% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/LICENSE rename to rabbitmq-server/deps/cowboy/LICENSE index 7de99bb..e7435f8 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/LICENSE +++ b/rabbitmq-server/deps/cowboy/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2011, Loïc Hoguin +Copyright (c) 2011-2014, Loïc Hoguin Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above diff --git a/rabbitmq-server/deps/cowboy/Makefile b/rabbitmq-server/deps/cowboy/Makefile new file mode 100644 index 0000000..a186286 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/Makefile @@ -0,0 +1,75 @@ +# See LICENSE for licensing information. + +PROJECT = cowboy + +# Options. + +ERLC_OPTS ?= -Werror +debug_info +warn_export_all +warn_export_vars \ + +warn_shadow_vars +warn_obsolete_guard +warn_missing_spec +COMPILE_FIRST = cowboy_middleware cowboy_sub_protocol +CT_OPTS += -pa test -ct_hooks cowboy_ct_hook [] +PLT_APPS = crypto public_key ssl +CI_OTP = OTP_R16B01 OTP_R16B02 OTP_R16B03-1 OTP-17.0.2 OTP-17.1.2 OTP-17.2.2 OTP-17.3.4 OTP-17.4.1 OTP-17.5.6.2 OTP-18.0.2 + +# Dependencies. + +DEPS = cowlib ranch +TEST_DEPS = ct_helper gun +dep_ct_helper = git https://github.com/extend/ct_helper.git master +dep_gun = git https://github.com/ninenines/gun b85c1f726ca49ac0e3abdcf717317cb95b06207d + +# Standard targets. + +include erlang.mk + +# Documentation. + +dep_ezdoc = git https://github.com/ninenines/ezdoc master +$(eval $(call dep_target,ezdoc)) + +build-doc-deps: $(DEPS_DIR)/ezdoc + $(MAKE) -C $(DEPS_DIR)/ezdoc + +define ezdoc_script +io:format("Building manual~n"), +[begin + AST = ezdoc:parse_file(F), + BF = filename:rootname(filename:basename(F)), + io:format(" ~s~n", [BF]), + file:write_file("doc/markdown/manual/" ++ BF ++ ".md", ezdoc_markdown:export(AST)), + case BF of + "cowboy" ++ _ when BF =/= "cowboy_app" -> + file:write_file("doc/man3/" ++ BF ++ ".3", ezdoc_man:export(3, AST)); + _ when BF =/= "index" -> + file:write_file("doc/man7/" ++ BF ++ ".7", ezdoc_man:export(7, AST)); + _ -> + ok + end +end || F <- filelib:wildcard("doc/src/manual/*.ezdoc")], +io:format("Building guide~n"), +[begin + AST = ezdoc:parse_file(F), + BF = filename:rootname(filename:basename(F)), + io:format(" ~s~n", [BF]), + file:write_file("doc/markdown/guide/" ++ BF ++ ".md", ezdoc_markdown:export(AST)) +end || F <- filelib:wildcard("doc/src/guide/*.ezdoc")], +io:format("Done.~n"), +init:stop(). +endef +export ezdoc_script + +docs:: clean-docs build-doc-deps + @mkdir -p doc/man3 doc/man7 doc/markdown/guide doc/markdown/manual + $(gen_verbose) erl -noinput -pa ebin deps/ezdoc/ebin -eval "$$ezdoc_script" + @gzip doc/man3/*.3 doc/man7/*.7 + @cp doc/src/guide/*.png doc/markdown/guide + +clean-docs: + $(gen_verbose) rm -rf doc/man3 doc/man7 doc/markdown + +MAN_INSTALL_PATH ?= /usr/local/share/man + +install-docs: + mkdir -p $(MAN_INSTALL_PATH)/man3/ $(MAN_INSTALL_PATH)/man7/ + install -g 0 -o 0 -m 0644 doc/man3/*.gz $(MAN_INSTALL_PATH)/man3/ + install -g 0 -o 0 -m 0644 doc/man7/*.gz $(MAN_INSTALL_PATH)/man7/ diff --git a/rabbitmq-server/deps/cowboy/README.md b/rabbitmq-server/deps/cowboy/README.md new file mode 100644 index 0000000..2919e63 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/README.md @@ -0,0 +1,50 @@ +Cowboy +====== + +Cowboy is a small, fast and modular HTTP server written in Erlang. + +Goals +----- + +Cowboy aims to provide a **complete** HTTP stack in a **small** code base. +It is optimized for **low latency** and **low memory usage**, in part +because it uses **binary strings**. + +Cowboy provides **routing** capabilities, selectively dispatching requests +to handlers written in Erlang. + +Because it uses Ranch for managing connections, Cowboy can easily be +**embedded** in any other application. + +No parameterized module. No process dictionary. **Clean** Erlang code. + +Sponsors +-------- + +The SPDY implementation was sponsored by +[LeoFS Cloud Storage](http://www.leofs.org). + +The project is currently sponsored by +[Kato.im](https://kato.im). + +Online documentation +-------------------- + + * [User guide](http://ninenines.eu/docs/en/cowboy/HEAD/guide) + * [Function reference](http://ninenines.eu/docs/en/cowboy/HEAD/manual) + +Offline documentation +--------------------- + + * While still online, run `make docs` + * Function reference man pages available in `doc/man3/` and `doc/man7/` + * Run `make install-docs` to install man pages on your system + * Full documentation in Markdown available in `doc/markdown/` + * Examples available in `examples/` + +Getting help +------------ + + * Official IRC Channel: #ninenines on irc.freenode.net + * [Mailing Lists](http://lists.ninenines.eu) + * [Commercial Support](http://ninenines.eu/support) diff --git a/rabbitmq-server/deps/cowboy/ROADMAP.md b/rabbitmq-server/deps/cowboy/ROADMAP.md new file mode 100644 index 0000000..db94461 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/ROADMAP.md @@ -0,0 +1,127 @@ +ROADMAP +======= + +This document explains in as much details as possible the +list of planned changes and work to be done on the Cowboy +server. It is intended to be exhaustive but some elements +might still be missing. + +2.0 (R17 and R18) +----------------- + +The main features that will be added to Cowboy 2.0 are support +for HTTP/2.0 and Websocket permessage deflate compression. + +A complete analysis of the httpbis set of specifications +will be performed and extensive tests will be written to +ensure maximum compatibility. + +A number of backward incompatible changes are planned. These +changes are individually small, but together should result +in a large improvement in usability. + +### cowboy_req + +The interface of `cowboy_req` will be largely changed. The +number one complaint about Cowboy today is that you have +to keep track of the Req whenever you do anything. The new +interface will minimize that. + +All functions will return a single term, excluding the body +reading functions `body/{1,2}`, `body_qs/{1,2}`, `part/{1,2}`, +`part_body/{1,2}`. + +Of the functions returning a single term, some of them will +return a Req object. This includes the functions that already +return Req: `compact/1`, `delete_resp_header/2`, `set_meta/3`, +`set_resp_body/2`, `set_resp_body_fun/{2,3}`, `set_resp_cookie/4`, +`set_resp_header/3`, and adds the `chunked_reply/{2,3}` and +`reply/{2,3,4}` functions to the list. + +Of note is that this will allow chaining all the response +functions if that's what you fancy. + +The `parse_header/{2,3}` function will now only return the +parsed header value, and crash on error. It will also not +cache the parsed value anymore, except for headers that Cowboy +requires, like the connection header. + +It is unsure what will become of the `qs_val`, `qs_vals`, +`cookie` and `cookies` functions. The main idea at this point +is to replace them with a `parse_qs/2` and `parse_cookies/2` +that would return the parsed list, and let the user decide +how to access it. + +### init/terminate unification + +The first argument of the `init/3` function is too rarely used. +It will be removed. + +The return value of the `init/2` function will become +`{http, Req, State} | {loop, Req, State} | {Module, Req, State}` +with `Module` being `cowboy_rest`, `cowboy_websocket` or a +user provided module. + +The `rest_init` and `websocket_init` callbacks will be removed +as they become unnecessary with the new `init/2` interface. + +Similarly, the `rest_terminate` and `websocket_terminate` +callbacks will be removed in favor of a unified `terminate/3`. + +The `terminate/3` callback will become optional. + +### Middlewares + +The error tuple return value brings little value compared to +the halt tuple. The error tuple will therefore be removed. + +### Hooks + +The `onrequest` hook will be removed. It can easily be replaced +by a middleware. + +The interface of the `onresponse` hook will change. There has +been a number of issues and added complexity with the current +interface that warrant fixing. The main problem is that the +hook may be used to change the reply, by calling the reply +function again, forcing us to be careful not to reprocess +everything again. + +To fix that, we will cut the reply mechanism in two steps, +one that is basically some preprocessing of the response +header to follow the protocol requirements, and then the +actual response. The `onresponse` hook will fit in the +middle, being called from the first step and calling the +second step itself. + +If a body streaming function is provided, the hook will +also receive it (unlike today). It will not be able to +inspect its contents however. + +This should greatly simplify the code and allow users to +do any operation they wish. + +### Low-level interface documented + +A special chapter of the manual will document a low-level +interface that may be used in middlewares or hooks (but +nowhere else). This includes the Req access and update +functions and the new response function described above. + +### REST + +The `known_content_type` callback has no purpose, so it +is going to be removed. + +The documentation for all REST callbacks will be updated +to describe whether they can have side effects. This will +allows us to build introspection tools on top of a working +REST API. + +Range support will be added. + +Under consideration +------------------- + + * Convenience API for extracting query string and body + information, similar to PHP's $_GET, $_POST and $_FILES diff --git a/rabbitmq-server/deps/cowboy/all.sh b/rabbitmq-server/deps/cowboy/all.sh new file mode 100755 index 0000000..32b8302 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/all.sh @@ -0,0 +1,19 @@ +#!/bin/sh + +KERL_INSTALL_PATH=~/erlang +KERL_RELEASES="r16b01 r16b02 r16b03-1 17.0 17.1.2" + +make build-ct-suites + +for rel in $KERL_RELEASES +do + echo + echo " TESTING $rel" + echo + . $KERL_INSTALL_PATH/$rel/activate + cp ~/.kerl/builds/$rel/otp_src_*/lib/ssl/test/erl_make_certs.erl \ + deps/ct_helper/src/ + CT_OPTS="-label $rel" make tests +done + +xdg-open logs/all_runs.html diff --git a/rabbitmq-server/deps/cowboy/circle.yml b/rabbitmq-server/deps/cowboy/circle.yml new file mode 100644 index 0000000..fa31fe2 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/circle.yml @@ -0,0 +1,22 @@ +general: + artifacts: + - "logs" + +dependencies: + cache_directories: + - "~/.kerl" + - "~/erlang" + + pre: + - sudo pip install autobahntestsuite + - sudo apt-get install autoconf2.59 + - cd $HOME/bin && ln -s /usr/bin/autoconf2.59 autoconf + - cd $HOME/bin && ln -s /usr/bin/autoheader2.59 autoheader + - make ci-prepare: + timeout: 7200 + +test: + override: + - source $HOME/erlang/OTP-18.0.2/activate && make dialyze + - make -k ci: + timeout: 3600 diff --git a/rabbitmq-server/deps/cowboy/erlang.mk b/rabbitmq-server/deps/cowboy/erlang.mk new file mode 100644 index 0000000..8930dfc --- /dev/null +++ b/rabbitmq-server/deps/cowboy/erlang.mk @@ -0,0 +1 @@ +include ../../erlang.mk diff --git a/rabbitmq-server/deps/cowboy/rebar.config b/rabbitmq-server/deps/cowboy/rebar.config new file mode 100644 index 0000000..b9b2ba0 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/rebar.config @@ -0,0 +1,4 @@ +{deps, [ + {cowlib, ".*", {git, "https://github.com/ninenines/cowlib.git", "1.0.0"}}, + {ranch, ".*", {git, "https://github.com/ninenines/ranch.git", "1.0.0"}} +]}. diff --git a/rabbitmq-server/deps/cowboy/src/cowboy.app.src b/rabbitmq-server/deps/cowboy/src/cowboy.app.src new file mode 100644 index 0000000..b3f3c56 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/src/cowboy.app.src @@ -0,0 +1,9 @@ +{application,cowboy, + [{description,"Small, fast, modular HTTP server."}, + {vsn,"1.0.3"}, + {id,"git"}, + {modules,[]}, + {registered,[cowboy_clock,cowboy_sup]}, + {applications,[kernel,stdlib,ranch,cowlib,crypto]}, + {mod,{cowboy_app,[]}}, + {env,[]}]}. diff --git a/rabbitmq-server/deps/cowboy/src/cowboy.erl b/rabbitmq-server/deps/cowboy/src/cowboy.erl new file mode 100644 index 0000000..2b50dfb --- /dev/null +++ b/rabbitmq-server/deps/cowboy/src/cowboy.erl @@ -0,0 +1,75 @@ +%% Copyright (c) 2011-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy). + +-export([start_http/4]). +-export([start_https/4]). +-export([start_spdy/4]). +-export([stop_listener/1]). +-export([set_env/3]). + +-type http_headers() :: [{binary(), iodata()}]. +-export_type([http_headers/0]). + +-type http_status() :: non_neg_integer() | binary(). +-export_type([http_status/0]). + +-type http_version() :: 'HTTP/1.1' | 'HTTP/1.0'. +-export_type([http_version/0]). + +-type onrequest_fun() :: fun((Req) -> Req). +-export_type([onrequest_fun/0]). + +-type onresponse_fun() :: + fun((http_status(), http_headers(), iodata(), Req) -> Req). +-export_type([onresponse_fun/0]). + +-spec start_http(ranch:ref(), non_neg_integer(), ranch_tcp:opts(), + cowboy_protocol:opts()) -> {ok, pid()} | {error, any()}. +start_http(Ref, NbAcceptors, TransOpts, ProtoOpts) + when is_integer(NbAcceptors), NbAcceptors > 0 -> + ranch:start_listener(Ref, NbAcceptors, + ranch_tcp, TransOpts, cowboy_protocol, ProtoOpts). + +-spec start_https(ranch:ref(), non_neg_integer(), ranch_ssl:opts(), + cowboy_protocol:opts()) -> {ok, pid()} | {error, any()}. +start_https(Ref, NbAcceptors, TransOpts, ProtoOpts) + when is_integer(NbAcceptors), NbAcceptors > 0 -> + ranch:start_listener(Ref, NbAcceptors, + ranch_ssl, TransOpts, cowboy_protocol, ProtoOpts). + +-spec start_spdy(ranch:ref(), non_neg_integer(), ranch_ssl:opts(), + cowboy_spdy:opts()) -> {ok, pid()} | {error, any()}. +start_spdy(Ref, NbAcceptors, TransOpts, ProtoOpts) + when is_integer(NbAcceptors), NbAcceptors > 0 -> + TransOpts2 = [ + {connection_type, supervisor}, + {next_protocols_advertised, + [<<"spdy/3">>, <<"http/1.1">>, <<"http/1.0">>]} + |TransOpts], + ranch:start_listener(Ref, NbAcceptors, + ranch_ssl, TransOpts2, cowboy_spdy, ProtoOpts). + +-spec stop_listener(ranch:ref()) -> ok | {error, not_found}. +stop_listener(Ref) -> + ranch:stop_listener(Ref). + +-spec set_env(ranch:ref(), atom(), any()) -> ok. +set_env(Ref, Name, Value) -> + Opts = ranch:get_protocol_options(Ref), + {_, Env} = lists:keyfind(env, 1, Opts), + Opts2 = lists:keyreplace(env, 1, Opts, + {env, lists:keystore(Name, 1, Env, {Name, Value})}), + ok = ranch:set_protocol_options(Ref, Opts2). diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy.app.src b/rabbitmq-server/deps/cowboy/src/cowboy_app.erl similarity index 71% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy.app.src rename to rabbitmq-server/deps/cowboy/src/cowboy_app.erl index 9b3ee50..1161d91 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy.app.src +++ b/rabbitmq-server/deps/cowboy/src/cowboy_app.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011, Loïc Hoguin +%% Copyright (c) 2011-2014, Loïc Hoguin %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -12,15 +12,16 @@ %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -{application, cowboy, [ - {description, "Small, fast, modular HTTP server."}, - {vsn, "0.5.0"}, - {modules, []}, - {registered, [cowboy_clock, cowboy_sup]}, - {applications, [ - kernel, - stdlib - ]}, - {mod, {cowboy_app, []}}, - {env, []} -]}. +-module(cowboy_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). + +-spec start(_, _) -> {ok, pid()}. +start(_, _) -> + cowboy_sup:start_link(). + +-spec stop(_) -> ok. +stop(_) -> + ok. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_bstr.erl b/rabbitmq-server/deps/cowboy/src/cowboy_bstr.erl similarity index 54% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_bstr.erl rename to rabbitmq-server/deps/cowboy/src/cowboy_bstr.erl index 1c702ef..98d2cf7 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_bstr.erl +++ b/rabbitmq-server/deps/cowboy/src/cowboy_bstr.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011, Loïc Hoguin +%% Copyright (c) 2011-2014, Loïc Hoguin %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -12,20 +12,42 @@ %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -%% @doc Binary string manipulation. -module(cowboy_bstr). --export([to_lower/1]). %% Binary strings. --export([char_to_lower/1, char_to_upper/1]). %% Characters. +%% Binary strings. +-export([capitalize_token/1]). +-export([to_lower/1]). +-export([to_upper/1]). -%% @doc Convert a binary string to lowercase. --spec to_lower(binary()) -> binary(). -to_lower(L) -> - << << (char_to_lower(C)) >> || << C >> <= L >>. +%% Characters. +-export([char_to_lower/1]). +-export([char_to_upper/1]). + +%% The first letter and all letters after a dash are capitalized. +%% This is the form seen for header names in the HTTP/1.1 RFC and +%% others. Note that using this form isn't required, as header names +%% are case insensitive, and it is only provided for use with eventual +%% badly implemented clients. +-spec capitalize_token(B) -> B when B::binary(). +capitalize_token(B) -> + capitalize_token(B, true, <<>>). +capitalize_token(<<>>, _, Acc) -> + Acc; +capitalize_token(<< $-, Rest/bits >>, _, Acc) -> + capitalize_token(Rest, true, << Acc/binary, $- >>); +capitalize_token(<< C, Rest/bits >>, true, Acc) -> + capitalize_token(Rest, false, << Acc/binary, (char_to_upper(C)) >>); +capitalize_token(<< C, Rest/bits >>, false, Acc) -> + capitalize_token(Rest, false, << Acc/binary, (char_to_lower(C)) >>). + +-spec to_lower(B) -> B when B::binary(). +to_lower(B) -> + << << (char_to_lower(C)) >> || << C >> <= B >>. + +-spec to_upper(B) -> B when B::binary(). +to_upper(B) -> + << << (char_to_upper(C)) >> || << C >> <= B >>. -%% @doc Convert [A-Z] characters to lowercase. -%% @end -%% We gain noticeable speed by matching each value directly. -spec char_to_lower(char()) -> char(). char_to_lower($A) -> $a; char_to_lower($B) -> $b; @@ -55,7 +77,6 @@ char_to_lower($Y) -> $y; char_to_lower($Z) -> $z; char_to_lower(Ch) -> Ch. -%% @doc Convert [a-z] characters to uppercase. -spec char_to_upper(char()) -> char(). char_to_upper($a) -> $A; char_to_upper($b) -> $B; @@ -84,3 +105,19 @@ char_to_upper($x) -> $X; char_to_upper($y) -> $Y; char_to_upper($z) -> $Z; char_to_upper(Ch) -> Ch. + +%% Tests. + +-ifdef(TEST). +capitalize_token_test_() -> + Tests = [ + {<<"heLLo-woRld">>, <<"Hello-World">>}, + {<<"Sec-Websocket-Version">>, <<"Sec-Websocket-Version">>}, + {<<"Sec-WebSocket-Version">>, <<"Sec-Websocket-Version">>}, + {<<"sec-websocket-version">>, <<"Sec-Websocket-Version">>}, + {<<"SEC-WEBSOCKET-VERSION">>, <<"Sec-Websocket-Version">>}, + {<<"Sec-WebSocket--Version">>, <<"Sec-Websocket--Version">>}, + {<<"Sec-WebSocket---Version">>, <<"Sec-Websocket---Version">>} + ], + [{H, fun() -> R = capitalize_token(H) end} || {H, R} <- Tests]. +-endif. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_clock.erl b/rabbitmq-server/deps/cowboy/src/cowboy_clock.erl similarity index 74% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_clock.erl rename to rabbitmq-server/deps/cowboy/src/cowboy_clock.erl index c699f4f..5d9a41a 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_clock.erl +++ b/rabbitmq-server/deps/cowboy/src/cowboy_clock.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011, Loïc Hoguin +%% Copyright (c) 2011-2014, Loïc Hoguin %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -12,8 +12,6 @@ %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -%% @doc Date and time related functions. -%% %% While a gen_server process runs in the background to update %% the cache of formatted dates every second, all API calls are %% local and directly read from the ETS cache table, providing @@ -21,110 +19,86 @@ -module(cowboy_clock). -behaviour(gen_server). --export([start_link/0, stop/0, rfc1123/0, rfc2109/1]). %% API. --export([init/1, handle_call/3, handle_cast/2, - handle_info/2, terminate/2, code_change/3]). %% gen_server. +%% API. +-export([start_link/0]). +-export([stop/0]). +-export([rfc1123/0]). +-export([rfc1123/1]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). -record(state, { universaltime = undefined :: undefined | calendar:datetime(), rfc1123 = <<>> :: binary(), - tref = undefined :: undefined | timer:tref() + tref = undefined :: undefined | reference() }). --define(SERVER, ?MODULE). --define(TABLE, ?MODULE). - --include_lib("eunit/include/eunit.hrl"). - %% API. -%% @private -spec start_link() -> {ok, pid()}. start_link() -> - gen_server:start_link({local, ?SERVER}, ?MODULE, [], []). + gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). -%% @private -spec stop() -> stopped. stop() -> - gen_server:call(?SERVER, stop). + gen_server:call(?MODULE, stop). -%% @doc Return the current date and time formatted according to RFC-1123. -%% -%% This format is used in the 'Date' header sent with HTTP responses. -spec rfc1123() -> binary(). rfc1123() -> - ets:lookup_element(?TABLE, rfc1123, 2). + ets:lookup_element(?MODULE, rfc1123, 2). -%% @doc Return the current date and time formatted according to RFC-2109. -%% -%% This format is used in the 'Set-Cookie' header sent with -%% HTTP responses. --spec rfc2109(calendar:datetime()) -> binary(). -rfc2109(LocalTime) -> - {{YYYY,MM,DD},{Hour,Min,Sec}} = - case calendar:local_time_to_universal_time_dst(LocalTime) of - [Gmt] -> Gmt; - [_,Gmt] -> Gmt - end, - Wday = calendar:day_of_the_week({YYYY,MM,DD}), - DayBin = pad_int(DD), - YearBin = list_to_binary(integer_to_list(YYYY)), - HourBin = pad_int(Hour), - MinBin = pad_int(Min), - SecBin = pad_int(Sec), - WeekDay = weekday(Wday), - Month = month(MM), - <>. +-spec rfc1123(calendar:datetime()) -> binary(). +rfc1123(DateTime) -> + update_rfc1123(<<>>, undefined, DateTime). %% gen_server. -%% @private -spec init([]) -> {ok, #state{}}. init([]) -> - ?TABLE = ets:new(?TABLE, [set, protected, + ?MODULE = ets:new(?MODULE, [set, protected, named_table, {read_concurrency, true}]), T = erlang:universaltime(), B = update_rfc1123(<<>>, undefined, T), - {ok, TRef} = timer:send_interval(1000, update), - ets:insert(?TABLE, {rfc1123, B}), + TRef = erlang:send_after(1000, self(), update), + ets:insert(?MODULE, {rfc1123, B}), {ok, #state{universaltime=T, rfc1123=B, tref=TRef}}. -%% @private --spec handle_call(_, _, State) - -> {reply, ignored, State} | {stop, normal, stopped, State}. -handle_call(stop, _From, State=#state{tref=TRef}) -> - {ok, cancel} = timer:cancel(TRef), +-type from() :: {pid(), term()}. +-spec handle_call + (stop, from(), State) -> {stop, normal, stopped, State} + when State::#state{}. +handle_call(stop, _From, State) -> {stop, normal, stopped, State}; handle_call(_Request, _From, State) -> {reply, ignored, State}. -%% @private --spec handle_cast(_, State) -> {noreply, State}. +-spec handle_cast(_, State) -> {noreply, State} when State::#state{}. handle_cast(_Msg, State) -> {noreply, State}. -%% @private --spec handle_info(_, State) -> {noreply, State}. -handle_info(update, #state{universaltime=Prev, rfc1123=B1, tref=TRef}) -> +-spec handle_info(any(), State) -> {noreply, State} when State::#state{}. +handle_info(update, #state{universaltime=Prev, rfc1123=B1, tref=TRef0}) -> + %% Cancel the timer in case an external process sent an update message. + _ = erlang:cancel_timer(TRef0), T = erlang:universaltime(), B2 = update_rfc1123(B1, Prev, T), - ets:insert(?TABLE, {rfc1123, B2}), + ets:insert(?MODULE, {rfc1123, B2}), + TRef = erlang:send_after(1000, self(), update), {noreply, #state{universaltime=T, rfc1123=B2, tref=TRef}}; handle_info(_Info, State) -> {noreply, State}. -%% @private -spec terminate(_, _) -> ok. terminate(_Reason, _State) -> ok. -%% @private --spec code_change(_, State, _) -> {ok, State}. +-spec code_change(_, State, _) -> {ok, State} when State::#state{}. code_change(_OldVsn, State, _Extra) -> {ok, State}. @@ -159,7 +133,7 @@ update_rfc1123(<< _:11/binary, Keep:6/binary, _/bits >>, update_rfc1123(_, _, {Date = {Y, Mo, D}, {H, M, S}}) -> Wday = calendar:day_of_the_week(Date), << (weekday(Wday))/binary, ", ", (pad_int(D))/binary, " ", - (month(Mo))/binary, " ", (list_to_binary(integer_to_list(Y)))/binary, + (month(Mo))/binary, " ", (integer_to_binary(Y))/binary, " ", (pad_int(H))/binary, $:, (pad_int(M))/binary, $:, (pad_int(S))/binary, " GMT" >>. @@ -168,7 +142,7 @@ update_rfc1123(_, _, {Date = {Y, Mo, D}, {H, M, S}}) -> pad_int(X) when X < 10 -> << $0, ($0 + X) >>; pad_int(X) -> - list_to_binary(integer_to_list(X)). + integer_to_binary(X). -spec weekday(1..7) -> <<_:24>>. weekday(1) -> <<"Mon">>; @@ -196,7 +170,6 @@ month(12) -> <<"Dec">>. %% Tests. -ifdef(TEST). - update_rfc1123_test_() -> Tests = [ {<<"Sat, 14 May 2011 14:25:33 GMT">>, undefined, @@ -237,5 +210,4 @@ pad_int_test_() -> {56, <<"56">>}, {57, <<"57">>}, {58, <<"58">>}, {59, <<"59">>} ], [{I, fun() -> O = pad_int(I) end} || {I, O} <- Tests]. - -endif. diff --git a/rabbitmq-server/deps/cowboy/src/cowboy_handler.erl b/rabbitmq-server/deps/cowboy/src/cowboy_handler.erl new file mode 100644 index 0000000..5eb16b4 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/src/cowboy_handler.erl @@ -0,0 +1,304 @@ +%% Copyright (c) 2011-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% Handler middleware. +%% +%% Execute the handler given by the handler and handler_opts +%% environment values. The result of this execution is added to the +%% environment under the result value. +%% +%% When using loop handlers, we are receiving data from the socket because we +%% want to know when the socket gets closed. This is generally not an issue +%% because these kinds of requests are generally not pipelined, and don't have +%% a body. If they do have a body, this body is often read in the +%% init/3 callback and this is no problem. Otherwise, this data +%% accumulates in a buffer until we reach a certain threshold of 5000 bytes +%% by default. This can be configured through the loop_max_buffer +%% environment value. The request will be terminated with an +%% {error, overflow} reason if this threshold is reached. +-module(cowboy_handler). +-behaviour(cowboy_middleware). + +-export([execute/2]). +-export([handler_loop/4]). + +-record(state, { + env :: cowboy_middleware:env(), + hibernate = false :: boolean(), + loop_buffer_size = 0 :: non_neg_integer(), + loop_max_buffer = 5000 :: non_neg_integer() | infinity, + loop_timeout = infinity :: timeout(), + loop_timeout_ref = undefined :: undefined | reference(), + resp_sent = false :: boolean() +}). + +-spec execute(Req, Env) + -> {ok, Req, Env} | {suspend, ?MODULE, handler_loop, [any()]} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +execute(Req, Env) -> + {_, Handler} = lists:keyfind(handler, 1, Env), + {_, HandlerOpts} = lists:keyfind(handler_opts, 1, Env), + MaxBuffer = case lists:keyfind(loop_max_buffer, 1, Env) of + false -> 5000; + {_, MaxBuffer0} -> MaxBuffer0 + end, + handler_init(Req, #state{env=Env, loop_max_buffer=MaxBuffer}, + Handler, HandlerOpts). + +-spec handler_init(Req, #state{}, module(), any()) + -> {ok, Req, cowboy_middleware:env()} | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +handler_init(Req, State, Handler, HandlerOpts) -> + Transport = cowboy_req:get(transport, Req), + try Handler:init({Transport:name(), http}, Req, HandlerOpts) of + {ok, Req2, HandlerState} -> + handler_handle(Req2, State, Handler, HandlerState); + {loop, Req2, HandlerState} -> + handler_after_callback(Req2, State, Handler, HandlerState); + {loop, Req2, HandlerState, hibernate} -> + handler_after_callback(Req2, State#state{hibernate=true}, + Handler, HandlerState); + {loop, Req2, HandlerState, Timeout} -> + State2 = handler_loop_timeout(State#state{loop_timeout=Timeout}), + handler_after_callback(Req2, State2, Handler, HandlerState); + {loop, Req2, HandlerState, Timeout, hibernate} -> + State2 = handler_loop_timeout(State#state{ + hibernate=true, loop_timeout=Timeout}), + handler_after_callback(Req2, State2, Handler, HandlerState); + {shutdown, Req2, HandlerState} -> + terminate_request(Req2, State, Handler, HandlerState, + {normal, shutdown}); + {upgrade, protocol, Module} -> + upgrade_protocol(Req, State, Handler, HandlerOpts, Module); + {upgrade, protocol, Module, Req2, HandlerOpts2} -> + upgrade_protocol(Req2, State, Handler, HandlerOpts2, Module) + catch Class:Reason -> + Stacktrace = erlang:get_stacktrace(), + cowboy_req:maybe_reply(Stacktrace, Req), + erlang:Class([ + {reason, Reason}, + {mfa, {Handler, init, 3}}, + {stacktrace, Stacktrace}, + {req, cowboy_req:to_list(Req)}, + {opts, HandlerOpts} + ]) + end. + +-spec upgrade_protocol(Req, #state{}, module(), any(), module()) + -> {ok, Req, Env} + | {suspend, module(), atom(), any()} + | {halt, Req} + | {error, cowboy:http_status(), Req} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +upgrade_protocol(Req, #state{env=Env}, + Handler, HandlerOpts, Module) -> + Module:upgrade(Req, Env, Handler, HandlerOpts). + +-spec handler_handle(Req, #state{}, module(), any()) + -> {ok, Req, cowboy_middleware:env()} when Req::cowboy_req:req(). +handler_handle(Req, State, Handler, HandlerState) -> + try Handler:handle(Req, HandlerState) of + {ok, Req2, HandlerState2} -> + terminate_request(Req2, State, Handler, HandlerState2, + {normal, shutdown}) + catch Class:Reason -> + Stacktrace = erlang:get_stacktrace(), + cowboy_req:maybe_reply(Stacktrace, Req), + handler_terminate(Req, Handler, HandlerState, Reason), + erlang:Class([ + {reason, Reason}, + {mfa, {Handler, handle, 2}}, + {stacktrace, Stacktrace}, + {req, cowboy_req:to_list(Req)}, + {state, HandlerState} + ]) + end. + +%% Update the state if the response was sent in the callback. +-spec handler_after_callback(Req, #state{}, module(), any()) + -> {ok, Req, cowboy_middleware:env()} | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +handler_after_callback(Req, State=#state{resp_sent=false}, Handler, + HandlerState) -> + receive + {cowboy_req, resp_sent} -> + handler_before_loop(Req, State#state{resp_sent=true}, Handler, + HandlerState) + after 0 -> + handler_before_loop(Req, State, Handler, HandlerState) + end; +handler_after_callback(Req, State, Handler, HandlerState) -> + handler_before_loop(Req, State, Handler, HandlerState). + +-spec handler_before_loop(Req, #state{}, module(), any()) + -> {ok, Req, cowboy_middleware:env()} | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +handler_before_loop(Req, State=#state{hibernate=true}, Handler, HandlerState) -> + [Socket, Transport] = cowboy_req:get([socket, transport], Req), + Transport:setopts(Socket, [{active, once}]), + {suspend, ?MODULE, handler_loop, + [Req, State#state{hibernate=false}, Handler, HandlerState]}; +handler_before_loop(Req, State, Handler, HandlerState) -> + [Socket, Transport] = cowboy_req:get([socket, transport], Req), + Transport:setopts(Socket, [{active, once}]), + handler_loop(Req, State, Handler, HandlerState). + +%% Almost the same code can be found in cowboy_websocket. +-spec handler_loop_timeout(#state{}) -> #state{}. +handler_loop_timeout(State=#state{loop_timeout=infinity}) -> + State#state{loop_timeout_ref=undefined}; +handler_loop_timeout(State=#state{loop_timeout=Timeout, + loop_timeout_ref=PrevRef}) -> + _ = case PrevRef of + undefined -> ignore; + PrevRef -> erlang:cancel_timer(PrevRef) + end, + TRef = erlang:start_timer(Timeout, self(), ?MODULE), + State#state{loop_timeout_ref=TRef}. + +-spec handler_loop(Req, #state{}, module(), any()) + -> {ok, Req, cowboy_middleware:env()} | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +handler_loop(Req, State=#state{loop_buffer_size=NbBytes, + loop_max_buffer=Threshold, loop_timeout_ref=TRef, + resp_sent=RespSent}, Handler, HandlerState) -> + [Socket, Transport] = cowboy_req:get([socket, transport], Req), + {OK, Closed, Error} = Transport:messages(), + receive + {OK, Socket, Data} -> + NbBytes2 = NbBytes + byte_size(Data), + if NbBytes2 > Threshold -> + _ = handler_terminate(Req, Handler, HandlerState, + {error, overflow}), + _ = if RespSent -> ok; true -> + cowboy_req:reply(500, Req) + end, + exit(normal); + true -> + Req2 = cowboy_req:append_buffer(Data, Req), + State2 = handler_loop_timeout(State#state{ + loop_buffer_size=NbBytes2}), + handler_before_loop(Req2, State2, Handler, HandlerState) + end; + {Closed, Socket} -> + terminate_request(Req, State, Handler, HandlerState, + {error, closed}); + {Error, Socket, Reason} -> + terminate_request(Req, State, Handler, HandlerState, + {error, Reason}); + {timeout, TRef, ?MODULE} -> + handler_after_loop(Req, State, Handler, HandlerState, + {normal, timeout}); + {timeout, OlderTRef, ?MODULE} when is_reference(OlderTRef) -> + handler_loop(Req, State, Handler, HandlerState); + Message -> + %% We set the socket back to {active, false} mode in case + %% the handler is going to call recv. We also flush any + %% data received after that and put it into the buffer. + %% We do not check the size here, if data keeps coming + %% we'll error out on the next packet received. + Transport:setopts(Socket, [{active, false}]), + Req2 = receive {OK, Socket, Data} -> + cowboy_req:append_buffer(Data, Req) + after 0 -> + Req + end, + handler_call(Req2, State, Handler, HandlerState, Message) + end. + +-spec handler_call(Req, #state{}, module(), any(), any()) + -> {ok, Req, cowboy_middleware:env()} | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +handler_call(Req, State=#state{resp_sent=RespSent}, + Handler, HandlerState, Message) -> + try Handler:info(Message, Req, HandlerState) of + {ok, Req2, HandlerState2} -> + handler_after_loop(Req2, State, Handler, HandlerState2, + {normal, shutdown}); + {loop, Req2, HandlerState2} -> + handler_after_callback(Req2, State, Handler, HandlerState2); + {loop, Req2, HandlerState2, hibernate} -> + handler_after_callback(Req2, State#state{hibernate=true}, + Handler, HandlerState2) + catch Class:Reason -> + Stacktrace = erlang:get_stacktrace(), + if RespSent -> ok; true -> + cowboy_req:maybe_reply(Stacktrace, Req) + end, + handler_terminate(Req, Handler, HandlerState, Reason), + erlang:Class([ + {reason, Reason}, + {mfa, {Handler, info, 3}}, + {stacktrace, Stacktrace}, + {req, cowboy_req:to_list(Req)}, + {state, HandlerState} + ]) + end. + +%% It is sometimes important to make a socket passive as it was initially +%% and as it is expected to be by cowboy_protocol, right after we're done +%% with loop handling. The browser may freely pipeline a bunch of requests +%% if previous one was, say, a JSONP long-polling request. +-spec handler_after_loop(Req, #state{}, module(), any(), + {normal, timeout | shutdown} | {error, atom()}) -> + {ok, Req, cowboy_middleware:env()} when Req::cowboy_req:req(). +handler_after_loop(Req, State, Handler, HandlerState, Reason) -> + [Socket, Transport] = cowboy_req:get([socket, transport], Req), + Transport:setopts(Socket, [{active, false}]), + {OK, _Closed, _Error} = Transport:messages(), + Req2 = receive + {OK, Socket, Data} -> + cowboy_req:append_buffer(Data, Req) + after 0 -> + Req + end, + terminate_request(Req2, State, Handler, HandlerState, Reason). + +-spec terminate_request(Req, #state{}, module(), any(), + {normal, timeout | shutdown} | {error, atom()}) -> + {ok, Req, cowboy_middleware:env()} when Req::cowboy_req:req(). +terminate_request(Req, #state{env=Env, loop_timeout_ref=TRef}, + Handler, HandlerState, Reason) -> + HandlerRes = handler_terminate(Req, Handler, HandlerState, Reason), + _ = case TRef of + undefined -> ignore; + TRef -> erlang:cancel_timer(TRef) + end, + flush_timeouts(), + {ok, Req, [{result, HandlerRes}|Env]}. + +-spec handler_terminate(cowboy_req:req(), module(), any(), + {normal, timeout | shutdown} | {error, atom()}) -> ok. +handler_terminate(Req, Handler, HandlerState, Reason) -> + try + Handler:terminate(Reason, cowboy_req:lock(Req), HandlerState) + catch Class:Reason2 -> + erlang:Class([ + {reason, Reason2}, + {mfa, {Handler, terminate, 3}}, + {stacktrace, erlang:get_stacktrace()}, + {req, cowboy_req:to_list(Req)}, + {state, HandlerState}, + {terminate_reason, Reason} + ]) + end. + +-spec flush_timeouts() -> ok. +flush_timeouts() -> + receive + {timeout, TRef, ?MODULE} when is_reference(TRef) -> + flush_timeouts() + after 0 -> + ok + end. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http.erl b/rabbitmq-server/deps/cowboy/src/cowboy_http.erl similarity index 69% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http.erl rename to rabbitmq-server/deps/cowboy/src/cowboy_http.erl index d7261c8..e2760e9 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http.erl +++ b/rabbitmq-server/deps/cowboy/src/cowboy_http.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011, Loïc Hoguin +%% Copyright (c) 2011-2014, Loïc Hoguin %% Copyright (c) 2011, Anthony Ramine %% %% Permission to use, copy, modify, and/or distribute this software for any @@ -13,51 +13,37 @@ %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -%% @doc Core HTTP parsing API. +%% Deprecated HTTP parsing API. -module(cowboy_http). %% Parsing. --export([list/2, nonempty_list/2, content_type/1, content_type_params/3, - media_range/2, conneg/2, language_range/2, entity_tag_match/1, - http_date/1, rfc1123_date/1, rfc850_date/1, asctime_date/1, - whitespace/2, digits/1, token/2, token_ci/2, quoted_string/2]). - -%% Interpretation. --export([connection_to_atom/1, urldecode/1, urldecode/2, urlencode/1, - urlencode/2]). - --type method() :: 'OPTIONS' | 'GET' | 'HEAD' - | 'POST' | 'PUT' | 'DELETE' | 'TRACE' | binary(). --type uri() :: '*' | {absoluteURI, http | https, Host::binary(), - Port::integer() | undefined, Path::binary()} - | {scheme, Scheme::binary(), binary()} - | {abs_path, binary()} | binary(). --type version() :: {Major::non_neg_integer(), Minor::non_neg_integer()}. --type header() :: 'Cache-Control' | 'Connection' | 'Date' | 'Pragma' - | 'Transfer-Encoding' | 'Upgrade' | 'Via' | 'Accept' | 'Accept-Charset' - | 'Accept-Encoding' | 'Accept-Language' | 'Authorization' | 'From' | 'Host' - | 'If-Modified-Since' | 'If-Match' | 'If-None-Match' | 'If-Range' - | 'If-Unmodified-Since' | 'Max-Forwards' | 'Proxy-Authorization' | 'Range' - | 'Referer' | 'User-Agent' | 'Age' | 'Location' | 'Proxy-Authenticate' - | 'Public' | 'Retry-After' | 'Server' | 'Vary' | 'Warning' - | 'Www-Authenticate' | 'Allow' | 'Content-Base' | 'Content-Encoding' - | 'Content-Language' | 'Content-Length' | 'Content-Location' - | 'Content-Md5' | 'Content-Range' | 'Content-Type' | 'Etag' - | 'Expires' | 'Last-Modified' | 'Accept-Ranges' | 'Set-Cookie' - | 'Set-Cookie2' | 'X-Forwarded-For' | 'Cookie' | 'Keep-Alive' - | 'Proxy-Connection' | binary(). --type fake_iodata() :: iolist() | binary(). --type headers() :: [{header(), fake_iodata()}]. --type status() :: non_neg_integer() | binary(). - --export_type([method/0, uri/0, version/0, header/0, headers/0, status/0]). - --include("include/http.hrl"). --include_lib("eunit/include/eunit.hrl"). +-export([list/2]). +-export([nonempty_list/2]). +-export([content_type/1]). +-export([media_range/2]). +-export([conneg/2]). +-export([language_range/2]). +-export([entity_tag_match/1]). +-export([expectation/2]). +-export([params/2]). +-export([http_date/1]). +-export([rfc1123_date/1]). +-export([rfc850_date/1]). +-export([asctime_date/1]). +-export([whitespace/2]). +-export([digits/1]). +-export([token/2]). +-export([token_ci/2]). +-export([quoted_string/2]). +-export([authorization/2]). +-export([range/1]). +-export([parameterized_tokens/1]). + +%% Decoding. +-export([ce_identity/1]). %% Parsing. -%% @doc Parse a non-empty list of the given type. -spec nonempty_list(binary(), fun()) -> [any(), ...] | {error, badarg}. nonempty_list(Data, Fun) -> case list(Data, Fun, []) of @@ -66,7 +52,6 @@ nonempty_list(Data, Fun) -> L -> lists:reverse(L) end. -%% @doc Parse a list of the given type. -spec list(binary(), fun()) -> list() | {error, badarg}. list(Data, Fun) -> case list(Data, Fun, []) of @@ -94,42 +79,27 @@ list(Data, Fun, Acc) -> end) end). -%% @doc Parse a content type. +%% We lowercase the charset header as we know it's case insensitive. -spec content_type(binary()) -> any(). content_type(Data) -> media_type(Data, fun (Rest, Type, SubType) -> - content_type_params(Rest, - fun (Params) -> {Type, SubType, Params} end, []) - end). - --spec content_type_params(binary(), fun(), list({binary(), binary()})) - -> any(). -content_type_params(Data, Fun, Acc) -> - whitespace(Data, - fun (<< $;, Rest/binary >>) -> content_type_param(Rest, Fun, Acc); - (<<>>) -> Fun(lists:reverse(Acc)); - (_Rest) -> {error, badarg} - end). - --spec content_type_param(binary(), fun(), list({binary(), binary()})) - -> any(). -content_type_param(Data, Fun, Acc) -> - whitespace(Data, - fun (Rest) -> - token_ci(Rest, - fun (_Rest2, <<>>) -> {error, badarg}; - (<< $=, Rest2/binary >>, Attr) -> - word(Rest2, - fun (Rest3, Value) -> - content_type_params(Rest3, Fun, - [{Attr, Value}|Acc]) - end); - (_Rest2, _Attr) -> {error, badarg} - end) + params(Rest, + fun (<<>>, Params) -> + case lists:keyfind(<<"charset">>, 1, Params) of + false -> + {Type, SubType, Params}; + {_, Charset} -> + Charset2 = cowboy_bstr:to_lower(Charset), + Params2 = lists:keyreplace(<<"charset">>, + 1, Params, {<<"charset">>, Charset2}), + {Type, SubType, Params2} + end; + (_Rest2, _) -> + {error, badarg} + end) end). -%% @doc Parse a media range. -spec media_range(binary(), fun()) -> any(). media_range(Data, Fun) -> media_type(Data, @@ -172,7 +142,6 @@ media_range_param_value(Data, Fun, Type, SubType, Acc, Attr) -> Type, SubType, [{Attr, Value}|Acc]) end). -%% @doc Parse a media type. -spec media_type(binary(), fun()) -> any(). media_type(Data, Fun) -> token_ci(Data, @@ -182,6 +151,13 @@ media_type(Data, Fun) -> fun (_Rest2, <<>>) -> {error, badarg}; (Rest2, SubType) -> Fun(Rest2, Type, SubType) end); + %% This is a non-strict parsing clause required by some user agents + %% that use * instead of */* in the list of media types. + (Rest, <<"*">> = Type) -> + token_ci(<<"*", Rest/binary>>, + fun (_Rest2, <<>>) -> {error, badarg}; + (Rest2, SubType) -> Fun(Rest2, Type, SubType) + end); (_Rest, _Type) -> {error, badarg} end). @@ -225,8 +201,6 @@ accept_ext_value(Data, Fun, Type, SubType, Params, Quality, Acc, Attr) -> Type, SubType, Params, Quality, [{Attr, Value}|Acc]) end). -%% @doc Parse a conneg header (Accept-Charset, Accept-Encoding), -%% followed by an optional quality value. -spec conneg(binary(), fun()) -> any(). conneg(Data, Fun) -> token_ci(Data, @@ -238,7 +212,6 @@ conneg(Data, Fun) -> end) end). -%% @doc Parse a language range, followed by an optional quality value. -spec language_range(binary(), fun()) -> any(). language_range(<< $*, Rest/binary >>, Fun) -> language_range_ret(Rest, Fun, '*'); @@ -268,7 +241,7 @@ language_tag(Data, Fun) -> -spec language_subtag(binary(), fun(), binary(), [binary()]) -> any(). language_subtag(Data, Fun, Tag, Acc) -> - alpha(Data, + alphanumeric(Data, fun (_Rest, SubTag) when byte_size(SubTag) =:= 0; byte_size(SubTag) > 8 -> {error, badarg}; (<< $-, Rest/binary >>, SubTag) -> @@ -285,18 +258,24 @@ maybe_qparam(Data, Fun) -> fun (<< $;, Rest/binary >>) -> whitespace(Rest, fun (Rest2) -> - qparam(Rest2, Fun) + %% This is a non-strict parsing clause required by some user agents + %% that use the wrong delimiter putting a charset where a qparam is + %% expected. + try qparam(Rest2, Fun) of + Result -> Result + catch + error:function_clause -> + Fun(<<",", Rest2/binary>>, 1000) + end end); (Rest) -> Fun(Rest, 1000) end). -%% @doc Parse a quality parameter string (for example q=0.500). -spec qparam(binary(), fun()) -> any(). qparam(<< Q, $=, Data/binary >>, Fun) when Q =:= $q; Q =:= $Q -> qvalue(Data, Fun). -%% @doc Parse either a list of entity tags or a "*". -spec entity_tag_match(binary()) -> any(). entity_tag_match(<< $*, Rest/binary >>) -> whitespace(Rest, @@ -306,7 +285,6 @@ entity_tag_match(<< $*, Rest/binary >>) -> entity_tag_match(Data) -> nonempty_list(Data, fun entity_tag/2). -%% @doc Parse an entity-tag. -spec entity_tag(binary(), fun()) -> any(). entity_tag(<< "W/", Rest/binary >>, Fun) -> opaque_tag(Rest, Fun, weak); @@ -320,9 +298,52 @@ opaque_tag(Data, Fun, Strength) -> (Rest, OpaqueTag) -> Fun(Rest, {Strength, OpaqueTag}) end). -%% @doc Parse an HTTP date (RFC1123, RFC850 or asctime date). -%% @end -%% +-spec expectation(binary(), fun()) -> any(). +expectation(Data, Fun) -> + token_ci(Data, + fun (_Rest, <<>>) -> {error, badarg}; + (<< $=, Rest/binary >>, Expectation) -> + word(Rest, + fun (Rest2, ExtValue) -> + params(Rest2, fun (Rest3, ExtParams) -> + Fun(Rest3, {Expectation, ExtValue, ExtParams}) + end) + end); + (Rest, Expectation) -> + Fun(Rest, Expectation) + end). + +-spec params(binary(), fun()) -> any(). +params(Data, Fun) -> + params(Data, Fun, []). + +-spec params(binary(), fun(), [{binary(), binary()}]) -> any(). +params(Data, Fun, Acc) -> + whitespace(Data, + fun (<< $;, Rest/binary >>) -> + param(Rest, + fun (Rest2, Attr, Value) -> + params(Rest2, Fun, [{Attr, Value}|Acc]) + end); + (Rest) -> + Fun(Rest, lists:reverse(Acc)) + end). + +-spec param(binary(), fun()) -> any(). +param(Data, Fun) -> + whitespace(Data, + fun (Rest) -> + token_ci(Rest, + fun (_Rest2, <<>>) -> {error, badarg}; + (<< $=, Rest2/binary >>, Attr) -> + word(Rest2, + fun (Rest3, Value) -> + Fun(Rest3, Attr, Value) + end); + (_Rest2, _Attr) -> {error, badarg} + end) + end). + %% While this may not be the most efficient date parsing we can do, %% it should work fine for our purposes because all HTTP dates should %% be sent as RFC1123 dates in HTTP/1.1. @@ -345,7 +366,6 @@ http_date(Data) -> HTTPDate end. -%% @doc Parse an RFC1123 date. -spec rfc1123_date(binary()) -> any(). rfc1123_date(Data) -> wkday(Data, @@ -365,7 +385,6 @@ rfc1123_date(Data) -> {error, badarg} end). -%% @doc Parse an RFC850 date. -spec rfc850_date(binary()) -> any(). %% From the RFC: %% HTTP/1.1 clients and caches SHOULD assume that an RFC-850 date @@ -389,7 +408,6 @@ rfc850_date(Data) -> {error, badarg} end). -%% @doc Parse an asctime date. -spec asctime_date(binary()) -> any(). asctime_date(Data) -> wkday(Data, @@ -407,7 +425,7 @@ asctime_date(Data) -> {error, badarg} end); (_Any, _WkDay) -> - {error, badarg1} + {error, badarg} end). -spec asctime_year(binary(), tuple(), tuple()) -> any(). @@ -548,7 +566,6 @@ time(<< H1, H2, ":", M1, M2, ":", S1, S2, Rest/binary >>, Fun) {error, badarg} end. -%% @doc Skip whitespace. -spec whitespace(binary(), fun()) -> any(). whitespace(<< C, Rest/binary >>, Fun) when C =:= $\s; C =:= $\t -> @@ -556,7 +573,6 @@ whitespace(<< C, Rest/binary >>, Fun) whitespace(Data, Fun) -> Fun(Data). -%% @doc Parse a list of digits as a non negative integer. -spec digits(binary()) -> non_neg_integer() | {error, badarg}. digits(Data) -> digits(Data, @@ -583,8 +599,6 @@ digits(<< C, Rest/binary >>, Fun, Acc) digits(Data, Fun, Acc) -> Fun(Data, Acc). -%% @doc Parse a list of case-insensitive alpha characters. -%% %% Changes all characters to lowercase. -spec alpha(binary(), fun()) -> any(). alpha(Data, Fun) -> @@ -601,6 +615,22 @@ alpha(<< C, Rest/binary >>, Fun, Acc) alpha(Data, Fun, Acc) -> Fun(Data, Acc). +-spec alphanumeric(binary(), fun()) -> any(). +alphanumeric(Data, Fun) -> + alphanumeric(Data, Fun, <<>>). + +-spec alphanumeric(binary(), fun(), binary()) -> any(). +alphanumeric(<<>>, Fun, Acc) -> + Fun(<<>>, Acc); +alphanumeric(<< C, Rest/binary >>, Fun, Acc) + when C >= $a andalso C =< $z; + C >= $A andalso C =< $Z; + C >= $0 andalso C =< $9 -> + C2 = cowboy_bstr:char_to_lower(C), + alphanumeric(Rest, Fun, << Acc/binary, C2 >>); +alphanumeric(Data, Fun, Acc) -> + Fun(Data, Acc). + %% @doc Parse either a token or a quoted string. -spec word(binary(), fun()) -> any(). word(Data = << $", _/binary >>, Fun) -> @@ -611,14 +641,11 @@ word(Data, Fun) -> (Rest, Token) -> Fun(Rest, Token) end). -%% @doc Parse a case-insensitive token. -%% %% Changes all characters to lowercase. -spec token_ci(binary(), fun()) -> any(). token_ci(Data, Fun) -> token(Data, Fun, ci, <<>>). -%% @doc Parse a token. -spec token(binary(), fun()) -> any(). token(Data, Fun) -> token(Data, Fun, cs, <<>>). @@ -639,10 +666,11 @@ token(<< C, Rest/binary >>, Fun, Case = ci, Acc) -> token(<< C, Rest/binary >>, Fun, Case, Acc) -> token(Rest, Fun, Case, << Acc/binary, C >>). -%% @doc Parse a quoted string. -spec quoted_string(binary(), fun()) -> any(). quoted_string(<< $", Rest/binary >>, Fun) -> - quoted_string(Rest, Fun, <<>>). + quoted_string(Rest, Fun, <<>>); +quoted_string(_, _Fun) -> + {error, badarg}. -spec quoted_string(binary(), fun(), binary()) -> any(). quoted_string(<<>>, _Fun, _Acc) -> @@ -654,10 +682,12 @@ quoted_string(<< $\\, C, Rest/binary >>, Fun, Acc) -> quoted_string(<< C, Rest/binary >>, Fun, Acc) -> quoted_string(Rest, Fun, << Acc/binary, C >>). -%% @doc Parse a quality value. -spec qvalue(binary(), fun()) -> any(). qvalue(<< $0, $., Rest/binary >>, Fun) -> qvalue(Rest, Fun, 0, 100); +%% Some user agents use q=.x instead of q=0.x +qvalue(<< $., Rest/binary >>, Fun) -> + qvalue(Rest, Fun, 0, 100); qvalue(<< $0, Rest/binary >>, Fun) -> Fun(Rest, 0); qvalue(<< $1, $., $0, $0, $0, Rest/binary >>, Fun) -> @@ -680,143 +710,193 @@ qvalue(<< C, Rest/binary >>, Fun, Q, M) qvalue(Data, Fun, Q, _M) -> Fun(Data, Q). +%% Only RFC2617 Basic authorization is supported so far. +-spec authorization(binary(), binary()) -> {binary(), any()} | {error, badarg}. +authorization(UserPass, Type = <<"basic">>) -> + whitespace(UserPass, + fun(D) -> + authorization_basic_userid(base64:mime_decode(D), + fun(Rest, Userid) -> + authorization_basic_password(Rest, + fun(Password) -> + {Type, {Userid, Password}} + end) + end) + end); +authorization(String, Type) -> + whitespace(String, fun(Rest) -> {Type, Rest} end). -%% Interpretation. +-spec authorization_basic_userid(binary(), fun()) -> any(). +authorization_basic_userid(Data, Fun) -> + authorization_basic_userid(Data, Fun, <<>>). -%% @doc Walk through a tokens list and return whether -%% the connection is keepalive or closed. -%% -%% The connection token is expected to be lower-case. --spec connection_to_atom([binary()]) -> keepalive | close. -connection_to_atom([]) -> - keepalive; -connection_to_atom([<<"keep-alive">>|_Tail]) -> - keepalive; -connection_to_atom([<<"close">>|_Tail]) -> - close; -connection_to_atom([_Any|Tail]) -> - connection_to_atom(Tail). - -%% @doc Decode a URL encoded binary. -%% @equiv urldecode(Bin, crash) --spec urldecode(binary()) -> binary(). -urldecode(Bin) when is_binary(Bin) -> - urldecode(Bin, <<>>, crash). - -%% @doc Decode a URL encoded binary. -%% The second argument specifies how to handle percent characters that are not -%% followed by two valid hex characters. Use `skip' to ignore such errors, -%% if `crash' is used the function will fail with the reason `badarg'. --spec urldecode(binary(), crash | skip) -> binary(). -urldecode(Bin, OnError) when is_binary(Bin) -> - urldecode(Bin, <<>>, OnError). - --spec urldecode(binary(), binary(), crash | skip) -> binary(). -urldecode(<<$%, H, L, Rest/binary>>, Acc, OnError) -> - G = unhex(H), - M = unhex(L), - if G =:= error; M =:= error -> - case OnError of skip -> ok; crash -> erlang:error(badarg) end, - urldecode(<>, <>, OnError); - true -> - urldecode(Rest, <>, OnError) - end; -urldecode(<<$%, Rest/binary>>, Acc, OnError) -> - case OnError of skip -> ok; crash -> erlang:error(badarg) end, - urldecode(Rest, <>, OnError); -urldecode(<<$+, Rest/binary>>, Acc, OnError) -> - urldecode(Rest, <>, OnError); -urldecode(<>, Acc, OnError) -> - urldecode(Rest, <>, OnError); -urldecode(<<>>, Acc, _OnError) -> - Acc. - --spec unhex(byte()) -> byte() | error. -unhex(C) when C >= $0, C =< $9 -> C - $0; -unhex(C) when C >= $A, C =< $F -> C - $A + 10; -unhex(C) when C >= $a, C =< $f -> C - $a + 10; -unhex(_) -> error. - - -%% @doc URL encode a string binary. -%% @equiv urlencode(Bin, []) --spec urlencode(binary()) -> binary(). -urlencode(Bin) -> - urlencode(Bin, []). - -%% @doc URL encode a string binary. -%% The `noplus' option disables the default behaviour of quoting space -%% characters, `\s', as `+'. The `upper' option overrides the default behaviour -%% of writing hex numbers using lowecase letters to using uppercase letters -%% instead. --spec urlencode(binary(), [noplus|upper]) -> binary(). -urlencode(Bin, Opts) -> - Plus = not proplists:get_value(noplus, Opts, false), - Upper = proplists:get_value(upper, Opts, false), - urlencode(Bin, <<>>, Plus, Upper). - -urlencode(<>, Acc, P=Plus, U=Upper) -> - if C >= $0, C =< $9 -> urlencode(Rest, <>, P, U); - C >= $A, C =< $Z -> urlencode(Rest, <>, P, U); - C >= $a, C =< $z -> urlencode(Rest, <>, P, U); - C =:= $.; C =:= $-; C =:= $~; C =:= $_ -> - urlencode(Rest, <>, P, U); - C =:= $ , Plus -> - urlencode(Rest, <>, P, U); - true -> - H = C band 16#F0 bsr 4, L = C band 16#0F, - H1 = if Upper -> tohexu(H); true -> tohexl(H) end, - L1 = if Upper -> tohexu(L); true -> tohexl(L) end, - urlencode(Rest, <>, P, U) - end; -urlencode(<<>>, Acc, _Plus, _Upper) -> - Acc. +authorization_basic_userid(<<>>, _Fun, _Acc) -> + {error, badarg}; +authorization_basic_userid(<>, _Fun, Acc) + when C < 32; C =:= 127; (C =:=$: andalso Acc =:= <<>>) -> + {error, badarg}; +authorization_basic_userid(<<$:, Rest/binary>>, Fun, Acc) -> + Fun(Rest, Acc); +authorization_basic_userid(<>, Fun, Acc) -> + authorization_basic_userid(Rest, Fun, <>). --spec tohexu(byte()) -> byte(). -tohexu(C) when C < 10 -> $0 + C; -tohexu(C) when C < 17 -> $A + C - 10. +-spec authorization_basic_password(binary(), fun()) -> any(). +authorization_basic_password(Data, Fun) -> + authorization_basic_password(Data, Fun, <<>>). --spec tohexl(byte()) -> byte(). -tohexl(C) when C < 10 -> $0 + C; -tohexl(C) when C < 17 -> $a + C - 10. +authorization_basic_password(<>, _Fun, _Acc) + when C < 32; C=:= 127 -> + {error, badarg}; +authorization_basic_password(<<>>, Fun, Acc) -> + Fun(Acc); +authorization_basic_password(<>, Fun, Acc) -> + authorization_basic_password(Rest, Fun, <>). + +-spec range(binary()) -> {Unit, [Range]} | {error, badarg} when + Unit :: binary(), + Range :: {non_neg_integer(), non_neg_integer() | infinity} | neg_integer(). +range(Data) -> + token_ci(Data, fun range/2). + +range(Data, Token) -> + whitespace(Data, + fun(<<"=", Rest/binary>>) -> + case list(Rest, fun range_beginning/2) of + {error, badarg} -> + {error, badarg}; + Ranges -> + {Token, Ranges} + end; + (_) -> + {error, badarg} + end). +range_beginning(Data, Fun) -> + range_digits(Data, suffix, + fun(D, RangeBeginning) -> + range_ending(D, Fun, RangeBeginning) + end). + +range_ending(Data, Fun, RangeBeginning) -> + whitespace(Data, + fun(<<"-", R/binary>>) -> + case RangeBeginning of + suffix -> + range_digits(R, fun(D, RangeEnding) -> Fun(D, -RangeEnding) end); + _ -> + range_digits(R, infinity, + fun(D, RangeEnding) -> + Fun(D, {RangeBeginning, RangeEnding}) + end) + end; + (_) -> + {error, badarg} + end). + +-spec range_digits(binary(), fun()) -> any(). +range_digits(Data, Fun) -> + whitespace(Data, + fun(D) -> + digits(D, Fun) + end). + +-spec range_digits(binary(), any(), fun()) -> any(). +range_digits(Data, Default, Fun) -> + whitespace(Data, + fun(<< C, Rest/binary >>) when C >= $0, C =< $9 -> + digits(Rest, Fun, C - $0); + (_) -> + Fun(Data, Default) + end). + +-spec parameterized_tokens(binary()) -> any(). +parameterized_tokens(Data) -> + nonempty_list(Data, + fun (D, Fun) -> + token(D, + fun (_Rest, <<>>) -> {error, badarg}; + (Rest, Token) -> + parameterized_tokens_params(Rest, + fun (Rest2, Params) -> + Fun(Rest2, {Token, Params}) + end, []) + end) + end). + +-spec parameterized_tokens_params(binary(), fun(), [binary() | {binary(), binary()}]) -> any(). +parameterized_tokens_params(Data, Fun, Acc) -> + whitespace(Data, + fun (<< $;, Rest/binary >>) -> + parameterized_tokens_param(Rest, + fun (Rest2, Param) -> + parameterized_tokens_params(Rest2, Fun, [Param|Acc]) + end); + (Rest) -> + Fun(Rest, lists:reverse(Acc)) + end). + +-spec parameterized_tokens_param(binary(), fun()) -> any(). +parameterized_tokens_param(Data, Fun) -> + whitespace(Data, + fun (Rest) -> + token(Rest, + fun (_Rest2, <<>>) -> {error, badarg}; + (<< $=, Rest2/binary >>, Attr) -> + word(Rest2, + fun (Rest3, Value) -> + Fun(Rest3, {Attr, Value}) + end); + (Rest2, Attr) -> + Fun(Rest2, Attr) + end) + end). + +%% Decoding. + +%% @todo Move this to cowlib too I suppose. :-) +-spec ce_identity(binary()) -> {ok, binary()}. +ce_identity(Data) -> + {ok, Data}. %% Tests. -ifdef(TEST). - nonempty_charset_list_test_() -> - %% {Value, Result} Tests = [ {<<>>, {error, badarg}}, {<<"iso-8859-5, unicode-1-1;q=0.8">>, [ {<<"iso-8859-5">>, 1000}, {<<"unicode-1-1">>, 800} + ]}, + %% Some user agents send this invalid value for the Accept-Charset header + {<<"ISO-8859-1;utf-8;q=0.7,*;q=0.7">>, [ + {<<"iso-8859-1">>, 1000}, + {<<"utf-8">>, 700}, + {<<"*">>, 700} ]} ], [{V, fun() -> R = nonempty_list(V, fun conneg/2) end} || {V, R} <- Tests]. nonempty_language_range_list_test_() -> - %% {Value, Result} Tests = [ {<<"da, en-gb;q=0.8, en;q=0.7">>, [ {<<"da">>, 1000}, {<<"en-gb">>, 800}, {<<"en">>, 700} ]}, - {<<"en, en-US, en-cockney, i-cherokee, x-pig-latin">>, [ + {<<"en, en-US, en-cockney, i-cherokee, x-pig-latin, es-419">>, [ {<<"en">>, 1000}, {<<"en-us">>, 1000}, {<<"en-cockney">>, 1000}, {<<"i-cherokee">>, 1000}, - {<<"x-pig-latin">>, 1000} + {<<"x-pig-latin">>, 1000}, + {<<"es-419">>, 1000} ]} ], [{V, fun() -> R = nonempty_list(V, fun language_range/2) end} || {V, R} <- Tests]. nonempty_token_list_test_() -> - %% {Value, Result} Tests = [ {<<>>, {error, badarg}}, {<<" ">>, {error, badarg}}, @@ -832,7 +912,6 @@ nonempty_token_list_test_() -> [{V, fun() -> R = nonempty_list(V, fun token/2) end} || {V, R} <- Tests]. media_range_list_test_() -> - %% {Tokens, Result} Tests = [ {<<"audio/*; q=0.2, audio/basic">>, [ {{<<"audio">>, <<"*">>, []}, 200, []}, @@ -865,12 +944,18 @@ media_range_list_test_() -> [{<<"level">>, <<"1">>}, {<<"quoted">>, <<"hi hi hi">>}]}, 123, [<<"standalone">>, {<<"complex">>, <<"gits">>}]}, {{<<"text">>, <<"plain">>, []}, 1000, []} + ]}, + {<<"text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2">>, [ + {{<<"text">>, <<"html">>, []}, 1000, []}, + {{<<"image">>, <<"gif">>, []}, 1000, []}, + {{<<"image">>, <<"jpeg">>, []}, 1000, []}, + {{<<"*">>, <<"*">>, []}, 200, []}, + {{<<"*">>, <<"*">>, []}, 200, []} ]} ], [{V, fun() -> R = list(V, fun media_range/2) end} || {V, R} <- Tests]. entity_tag_match_test_() -> - %% {Tokens, Result} Tests = [ {<<"\"xyzzy\"">>, [{strong, <<"xyzzy">>}]}, {<<"\"xyzzy\", W/\"r2d2xxxx\", \"c3piozzzz\"">>, @@ -882,7 +967,6 @@ entity_tag_match_test_() -> [{V, fun() -> R = entity_tag_match(V) end} || {V, R} <- Tests]. http_date_test_() -> - %% {Tokens, Result} Tests = [ {<<"Sun, 06 Nov 1994 08:49:37 GMT">>, {{1994, 11, 6}, {8, 49, 37}}}, {<<"Sunday, 06-Nov-94 08:49:37 GMT">>, {{1994, 11, 6}, {8, 49, 37}}}, @@ -891,38 +975,24 @@ http_date_test_() -> [{V, fun() -> R = http_date(V) end} || {V, R} <- Tests]. rfc1123_date_test_() -> - %% {Tokens, Result} Tests = [ {<<"Sun, 06 Nov 1994 08:49:37 GMT">>, {{1994, 11, 6}, {8, 49, 37}}} ], [{V, fun() -> R = rfc1123_date(V) end} || {V, R} <- Tests]. rfc850_date_test_() -> - %% {Tokens, Result} Tests = [ {<<"Sunday, 06-Nov-94 08:49:37 GMT">>, {{1994, 11, 6}, {8, 49, 37}}} ], [{V, fun() -> R = rfc850_date(V) end} || {V, R} <- Tests]. asctime_date_test_() -> - %% {Tokens, Result} Tests = [ {<<"Sun Nov 6 08:49:37 1994">>, {{1994, 11, 6}, {8, 49, 37}}} ], [{V, fun() -> R = asctime_date(V) end} || {V, R} <- Tests]. -connection_to_atom_test_() -> - %% {Tokens, Result} - Tests = [ - {[<<"close">>], close}, - {[<<"keep-alive">>], keepalive}, - {[<<"keep-alive">>, <<"upgrade">>], keepalive} - ], - [{lists:flatten(io_lib:format("~p", [T])), - fun() -> R = connection_to_atom(T) end} || {T, R} <- Tests]. - content_type_test_() -> - %% {ContentType, Result} Tests = [ {<<"text/plain; charset=iso-8859-4">>, {<<"text">>, <<"plain">>, [{<<"charset">>, <<"iso-8859-4">>}]}}, @@ -938,8 +1008,17 @@ content_type_test_() -> ], [{V, fun () -> R = content_type(V) end} || {V, R} <- Tests]. +parameterized_tokens_test_() -> + Tests = [ + {<<"foo">>, [{<<"foo">>, []}]}, + {<<"bar; baz=2">>, [{<<"bar">>, [{<<"baz">>, <<"2">>}]}]}, + {<<"bar; baz=2;bat">>, [{<<"bar">>, [{<<"baz">>, <<"2">>}, <<"bat">>]}]}, + {<<"bar; baz=2;bat=\"z=1,2;3\"">>, [{<<"bar">>, [{<<"baz">>, <<"2">>}, {<<"bat">>, <<"z=1,2;3">>}]}]}, + {<<"foo, bar; baz=2">>, [{<<"foo">>, []}, {<<"bar">>, [{<<"baz">>, <<"2">>}]}]} + ], + [{V, fun () -> R = parameterized_tokens(V) end} || {V, R} <- Tests]. + digits_test_() -> - %% {Digits, Result} Tests = [ {<<"42 ">>, 42}, {<<"69\t">>, 69}, @@ -947,28 +1026,43 @@ digits_test_() -> ], [{V, fun() -> R = digits(V) end} || {V, R} <- Tests]. -urldecode_test_() -> - U = fun urldecode/2, - [?_assertEqual(<<" ">>, U(<<"%20">>, crash)), - ?_assertEqual(<<" ">>, U(<<"+">>, crash)), - ?_assertEqual(<<0>>, U(<<"%00">>, crash)), - ?_assertEqual(<<255>>, U(<<"%fF">>, crash)), - ?_assertEqual(<<"123">>, U(<<"123">>, crash)), - ?_assertEqual(<<"%i5">>, U(<<"%i5">>, skip)), - ?_assertEqual(<<"%5">>, U(<<"%5">>, skip)), - ?_assertError(badarg, U(<<"%i5">>, crash)), - ?_assertError(badarg, U(<<"%5">>, crash)) - ]. - -urlencode_test_() -> - U = fun urlencode/2, - [?_assertEqual(<<"%ff%00">>, U(<<255,0>>, [])), - ?_assertEqual(<<"%FF%00">>, U(<<255,0>>, [upper])), - ?_assertEqual(<<"+">>, U(<<" ">>, [])), - ?_assertEqual(<<"%20">>, U(<<" ">>, [noplus])), - ?_assertEqual(<<"aBc">>, U(<<"aBc">>, [])), - ?_assertEqual(<<".-~_">>, U(<<".-~_">>, [])), - ?_assertEqual(<<"%ff+">>, urlencode(<<255, " ">>)) - ]. +http_authorization_test_() -> + Tests = [ + {<<"basic">>, <<"QWxsYWRpbjpvcGVuIHNlc2FtZQ==">>, + {<<"basic">>, {<<"Alladin">>, <<"open sesame">>}}}, + {<<"basic">>, <<"dXNlcm5hbWU6">>, + {<<"basic">>, {<<"username">>, <<>>}}}, + {<<"basic">>, <<"dXNlcm5hbWUK">>, + {error, badarg}}, + {<<"basic">>, <<"_[]@#$%^&*()-AA==">>, + {error, badarg}}, + {<<"basic">>, <<"dXNlcjpwYXNzCA==">>, + {error, badarg}}, + {<<"bearer">>, <<" some_secret_key">>, + {<<"bearer">>,<<"some_secret_key">>}} + ], + [{V, fun() -> R = authorization(V,T) end} || {T, V, R} <- Tests]. +http_range_test_() -> + Tests = [ + {<<"bytes=1-20">>, + {<<"bytes">>, [{1, 20}]}}, + {<<"bytes=-100">>, + {<<"bytes">>, [-100]}}, + {<<"bytes=1-">>, + {<<"bytes">>, [{1, infinity}]}}, + {<<"bytes=1-20,30-40,50-">>, + {<<"bytes">>, [{1, 20}, {30, 40}, {50, infinity}]}}, + {<<"bytes = 1 - 20 , 50 - , - 300 ">>, + {<<"bytes">>, [{1, 20}, {50, infinity}, -300]}}, + {<<"bytes=1-20,-500,30-40">>, + {<<"bytes">>, [{1, 20}, -500, {30, 40}]}}, + {<<"test=1-20,-500,30-40">>, + {<<"test">>, [{1, 20}, -500, {30, 40}]}}, + {<<"bytes=-">>, + {error, badarg}}, + {<<"bytes=-30,-">>, + {error, badarg}} + ], + [fun() -> R = range(V) end ||{V, R} <- Tests]. -endif. diff --git a/rabbitmq-server/deps/cowboy/src/cowboy_http_handler.erl b/rabbitmq-server/deps/cowboy/src/cowboy_http_handler.erl new file mode 100644 index 0000000..14c7987 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/src/cowboy_http_handler.erl @@ -0,0 +1,37 @@ +%% Copyright (c) 2011-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_http_handler). + +-type opts() :: any(). +-type state() :: any(). +-type terminate_reason() :: {normal, shutdown} + | {normal, timeout} %% Only occurs in loop handlers. + | {error, closed} %% Only occurs in loop handlers. + | {error, overflow} %% Only occurs in loop handlers. + | {error, atom()}. + +-callback init({atom(), http}, Req, opts()) + -> {ok, Req, state()} + | {loop, Req, state()} + | {loop, Req, state(), hibernate} + | {loop, Req, state(), timeout()} + | {loop, Req, state(), timeout(), hibernate} + | {shutdown, Req, state()} + | {upgrade, protocol, module()} + | {upgrade, protocol, module(), Req, opts()} + when Req::cowboy_req:req(). +-callback handle(Req, State) -> {ok, Req, State} + when Req::cowboy_req:req(), State::state(). +-callback terminate(terminate_reason(), cowboy_req:req(), state()) -> ok. diff --git a/rabbitmq-server/deps/cowboy/src/cowboy_loop_handler.erl b/rabbitmq-server/deps/cowboy/src/cowboy_loop_handler.erl new file mode 100644 index 0000000..edef77f --- /dev/null +++ b/rabbitmq-server/deps/cowboy/src/cowboy_loop_handler.erl @@ -0,0 +1,40 @@ +%% Copyright (c) 2011-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_loop_handler). + +-type opts() :: any(). +-type state() :: any(). +-type terminate_reason() :: {normal, shutdown} + | {normal, timeout} + | {error, closed} + | {error, overflow} + | {error, atom()}. + +-callback init({atom(), http}, Req, opts()) + -> {ok, Req, state()} + | {loop, Req, state()} + | {loop, Req, state(), hibernate} + | {loop, Req, state(), timeout()} + | {loop, Req, state(), timeout(), hibernate} + | {shutdown, Req, state()} + | {upgrade, protocol, module()} + | {upgrade, protocol, module(), Req, opts()} + when Req::cowboy_req:req(). +-callback info(any(), Req, State) + -> {ok, Req, State} + | {loop, Req, State} + | {loop, Req, State, hibernate} + when Req::cowboy_req:req(), State::state(). +-callback terminate(terminate_reason(), cowboy_req:req(), state()) -> ok. diff --git a/rabbitmq-server/deps/cowboy/src/cowboy_middleware.erl b/rabbitmq-server/deps/cowboy/src/cowboy_middleware.erl new file mode 100644 index 0000000..fa0f5bc --- /dev/null +++ b/rabbitmq-server/deps/cowboy/src/cowboy_middleware.erl @@ -0,0 +1,25 @@ +%% Copyright (c) 2013-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_middleware). + +-type env() :: [{atom(), any()}]. +-export_type([env/0]). + +-callback execute(Req, Env) + -> {ok, Req, Env} + | {suspend, module(), atom(), [any()]} + | {halt, Req} + | {error, cowboy:http_status(), Req} + when Req::cowboy_req:req(), Env::env(). diff --git a/rabbitmq-server/deps/cowboy/src/cowboy_protocol.erl b/rabbitmq-server/deps/cowboy/src/cowboy_protocol.erl new file mode 100644 index 0000000..1026d28 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/src/cowboy_protocol.erl @@ -0,0 +1,510 @@ +%% Copyright (c) 2011-2014, Loïc Hoguin +%% Copyright (c) 2011, Anthony Ramine +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_protocol). + +%% API. +-export([start_link/4]). + +%% Internal. +-export([init/4]). +-export([parse_request/3]). +-export([resume/6]). + +-type opts() :: [{compress, boolean()} + | {env, cowboy_middleware:env()} + | {max_empty_lines, non_neg_integer()} + | {max_header_name_length, non_neg_integer()} + | {max_header_value_length, non_neg_integer()} + | {max_headers, non_neg_integer()} + | {max_keepalive, non_neg_integer()} + | {max_request_line_length, non_neg_integer()} + | {middlewares, [module()]} + | {onrequest, cowboy:onrequest_fun()} + | {onresponse, cowboy:onresponse_fun()} + | {timeout, timeout()}]. +-export_type([opts/0]). + +-record(state, { + socket :: inet:socket(), + transport :: module(), + middlewares :: [module()], + compress :: boolean(), + env :: cowboy_middleware:env(), + onrequest :: undefined | cowboy:onrequest_fun(), + onresponse = undefined :: undefined | cowboy:onresponse_fun(), + max_empty_lines :: non_neg_integer(), + req_keepalive = 1 :: non_neg_integer(), + max_keepalive :: non_neg_integer(), + max_request_line_length :: non_neg_integer(), + max_header_name_length :: non_neg_integer(), + max_header_value_length :: non_neg_integer(), + max_headers :: non_neg_integer(), + timeout :: timeout(), + until :: non_neg_integer() | infinity +}). + +-include_lib("cowlib/include/cow_inline.hrl"). + +%% API. + +-spec start_link(ranch:ref(), inet:socket(), module(), opts()) -> {ok, pid()}. +start_link(Ref, Socket, Transport, Opts) -> + Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]), + {ok, Pid}. + +%% Internal. + +%% Faster alternative to proplists:get_value/3. +get_value(Key, Opts, Default) -> + case lists:keyfind(Key, 1, Opts) of + {_, Value} -> Value; + _ -> Default + end. + +-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Ref, Socket, Transport, Opts) -> + Compress = get_value(compress, Opts, false), + MaxEmptyLines = get_value(max_empty_lines, Opts, 5), + MaxHeaderNameLength = get_value(max_header_name_length, Opts, 64), + MaxHeaderValueLength = get_value(max_header_value_length, Opts, 4096), + MaxHeaders = get_value(max_headers, Opts, 100), + MaxKeepalive = get_value(max_keepalive, Opts, 100), + MaxRequestLineLength = get_value(max_request_line_length, Opts, 4096), + Middlewares = get_value(middlewares, Opts, [cowboy_router, cowboy_handler]), + Env = [{listener, Ref}|get_value(env, Opts, [])], + OnRequest = get_value(onrequest, Opts, undefined), + OnResponse = get_value(onresponse, Opts, undefined), + Timeout = get_value(timeout, Opts, 5000), + ok = ranch:accept_ack(Ref), + wait_request(<<>>, #state{socket=Socket, transport=Transport, + middlewares=Middlewares, compress=Compress, env=Env, + max_empty_lines=MaxEmptyLines, max_keepalive=MaxKeepalive, + max_request_line_length=MaxRequestLineLength, + max_header_name_length=MaxHeaderNameLength, + max_header_value_length=MaxHeaderValueLength, max_headers=MaxHeaders, + onrequest=OnRequest, onresponse=OnResponse, + timeout=Timeout, until=until(Timeout)}, 0). + +-spec until(timeout()) -> non_neg_integer() | infinity. +until(infinity) -> + infinity; +until(Timeout) -> + {Me, S, Mi} = os:timestamp(), + Me * 1000000000 + S * 1000 + Mi div 1000 + Timeout. + +%% Request parsing. +%% +%% The next set of functions is the request parsing code. All of it +%% runs using a single binary match context. This optimization ends +%% right after the header parsing is finished and the code becomes +%% more interesting past that point. + +-spec recv(inet:socket(), module(), non_neg_integer() | infinity) + -> {ok, binary()} | {error, closed | timeout | atom()}. +recv(Socket, Transport, infinity) -> + Transport:recv(Socket, 0, infinity); +recv(Socket, Transport, Until) -> + {Me, S, Mi} = os:timestamp(), + Now = Me * 1000000000 + S * 1000 + Mi div 1000, + Timeout = Until - Now, + if Timeout < 0 -> + {error, timeout}; + true -> + Transport:recv(Socket, 0, Timeout) + end. + +-spec wait_request(binary(), #state{}, non_neg_integer()) -> ok. +wait_request(Buffer, State=#state{socket=Socket, transport=Transport, + until=Until}, ReqEmpty) -> + case recv(Socket, Transport, Until) of + {ok, Data} -> + parse_request(<< Buffer/binary, Data/binary >>, State, ReqEmpty); + {error, _} -> + terminate(State) + end. + +-spec parse_request(binary(), #state{}, non_neg_integer()) -> ok. +%% Empty lines must be using \r\n. +parse_request(<< $\n, _/binary >>, State, _) -> + error_terminate(400, State); +%% We limit the length of the Request-line to MaxLength to avoid endlessly +%% reading from the socket and eventually crashing. +parse_request(Buffer, State=#state{max_request_line_length=MaxLength, + max_empty_lines=MaxEmpty}, ReqEmpty) -> + case match_eol(Buffer, 0) of + nomatch when byte_size(Buffer) > MaxLength -> + error_terminate(414, State); + nomatch -> + wait_request(Buffer, State, ReqEmpty); + 1 when ReqEmpty =:= MaxEmpty -> + error_terminate(400, State); + 1 -> + << _:16, Rest/binary >> = Buffer, + parse_request(Rest, State, ReqEmpty + 1); + _ -> + parse_method(Buffer, State, <<>>) + end. + +match_eol(<< $\n, _/bits >>, N) -> + N; +match_eol(<< _, Rest/bits >>, N) -> + match_eol(Rest, N + 1); +match_eol(_, _) -> + nomatch. + +parse_method(<< C, Rest/bits >>, State, SoFar) -> + case C of + $\r -> error_terminate(400, State); + $\s -> parse_uri(Rest, State, SoFar); + _ -> parse_method(Rest, State, << SoFar/binary, C >>) + end. + +parse_uri(<< $\r, _/bits >>, State, _) -> + error_terminate(400, State); +parse_uri(<< "* ", Rest/bits >>, State, Method) -> + parse_version(Rest, State, Method, <<"*">>, <<>>); +parse_uri(<< "http://", Rest/bits >>, State, Method) -> + parse_uri_skip_host(Rest, State, Method); +parse_uri(<< "https://", Rest/bits >>, State, Method) -> + parse_uri_skip_host(Rest, State, Method); +parse_uri(<< "HTTP://", Rest/bits >>, State, Method) -> + parse_uri_skip_host(Rest, State, Method); +parse_uri(<< "HTTPS://", Rest/bits >>, State, Method) -> + parse_uri_skip_host(Rest, State, Method); +parse_uri(Buffer, State, Method) -> + parse_uri_path(Buffer, State, Method, <<>>). + +parse_uri_skip_host(<< C, Rest/bits >>, State, Method) -> + case C of + $\r -> error_terminate(400, State); + $/ -> parse_uri_path(Rest, State, Method, <<"/">>); + $\s -> parse_version(Rest, State, Method, <<"/">>, <<>>); + $? -> parse_uri_query(Rest, State, Method, <<"/">>, <<>>); + $# -> skip_uri_fragment(Rest, State, Method, <<"/">>, <<>>); + _ -> parse_uri_skip_host(Rest, State, Method) + end. + +parse_uri_path(<< C, Rest/bits >>, State, Method, SoFar) -> + case C of + $\r -> error_terminate(400, State); + $\s -> parse_version(Rest, State, Method, SoFar, <<>>); + $? -> parse_uri_query(Rest, State, Method, SoFar, <<>>); + $# -> skip_uri_fragment(Rest, State, Method, SoFar, <<>>); + _ -> parse_uri_path(Rest, State, Method, << SoFar/binary, C >>) + end. + +parse_uri_query(<< C, Rest/bits >>, S, M, P, SoFar) -> + case C of + $\r -> error_terminate(400, S); + $\s -> parse_version(Rest, S, M, P, SoFar); + $# -> skip_uri_fragment(Rest, S, M, P, SoFar); + _ -> parse_uri_query(Rest, S, M, P, << SoFar/binary, C >>) + end. + +skip_uri_fragment(<< C, Rest/bits >>, S, M, P, Q) -> + case C of + $\r -> error_terminate(400, S); + $\s -> parse_version(Rest, S, M, P, Q); + _ -> skip_uri_fragment(Rest, S, M, P, Q) + end. + +parse_version(<< "HTTP/1.1\r\n", Rest/bits >>, S, M, P, Q) -> + parse_header(Rest, S, M, P, Q, 'HTTP/1.1', []); +parse_version(<< "HTTP/1.0\r\n", Rest/bits >>, S, M, P, Q) -> + parse_header(Rest, S, M, P, Q, 'HTTP/1.0', []); +parse_version(_, State, _, _, _) -> + error_terminate(505, State). + +%% Stop receiving data if we have more than allowed number of headers. +wait_header(_, State=#state{max_headers=MaxHeaders}, _, _, _, _, Headers) + when length(Headers) >= MaxHeaders -> + error_terminate(400, State); +wait_header(Buffer, State=#state{socket=Socket, transport=Transport, + until=Until}, M, P, Q, V, H) -> + case recv(Socket, Transport, Until) of + {ok, Data} -> + parse_header(<< Buffer/binary, Data/binary >>, + State, M, P, Q, V, H); + {error, timeout} -> + error_terminate(408, State); + {error, _} -> + terminate(State) + end. + +parse_header(<< $\r, $\n, Rest/bits >>, S, M, P, Q, V, Headers) -> + request(Rest, S, M, P, Q, V, lists:reverse(Headers)); +parse_header(Buffer, State=#state{max_header_name_length=MaxLength}, + M, P, Q, V, H) -> + case match_colon(Buffer, 0) of + nomatch when byte_size(Buffer) > MaxLength -> + error_terminate(400, State); + nomatch -> + wait_header(Buffer, State, M, P, Q, V, H); + _ -> + parse_hd_name(Buffer, State, M, P, Q, V, H, <<>>) + end. + +match_colon(<< $:, _/bits >>, N) -> + N; +match_colon(<< _, Rest/bits >>, N) -> + match_colon(Rest, N + 1); +match_colon(_, _) -> + nomatch. + +parse_hd_name(<< C, Rest/bits >>, S, M, P, Q, V, H, SoFar) -> + case C of + $: -> parse_hd_before_value(Rest, S, M, P, Q, V, H, SoFar); + $\s -> parse_hd_name_ws(Rest, S, M, P, Q, V, H, SoFar); + $\t -> parse_hd_name_ws(Rest, S, M, P, Q, V, H, SoFar); + ?INLINE_LOWERCASE(parse_hd_name, Rest, S, M, P, Q, V, H, SoFar) + end. + +parse_hd_name_ws(<< C, Rest/bits >>, S, M, P, Q, V, H, Name) -> + case C of + $\s -> parse_hd_name_ws(Rest, S, M, P, Q, V, H, Name); + $\t -> parse_hd_name_ws(Rest, S, M, P, Q, V, H, Name); + $: -> parse_hd_before_value(Rest, S, M, P, Q, V, H, Name) + end. + +wait_hd_before_value(Buffer, State=#state{ + socket=Socket, transport=Transport, until=Until}, + M, P, Q, V, H, N) -> + case recv(Socket, Transport, Until) of + {ok, Data} -> + parse_hd_before_value(<< Buffer/binary, Data/binary >>, + State, M, P, Q, V, H, N); + {error, timeout} -> + error_terminate(408, State); + {error, _} -> + terminate(State) + end. + +parse_hd_before_value(<< $\s, Rest/bits >>, S, M, P, Q, V, H, N) -> + parse_hd_before_value(Rest, S, M, P, Q, V, H, N); +parse_hd_before_value(<< $\t, Rest/bits >>, S, M, P, Q, V, H, N) -> + parse_hd_before_value(Rest, S, M, P, Q, V, H, N); +parse_hd_before_value(Buffer, State=#state{ + max_header_value_length=MaxLength}, M, P, Q, V, H, N) -> + case match_eol(Buffer, 0) of + nomatch when byte_size(Buffer) > MaxLength -> + error_terminate(400, State); + nomatch -> + wait_hd_before_value(Buffer, State, M, P, Q, V, H, N); + _ -> + parse_hd_value(Buffer, State, M, P, Q, V, H, N, <<>>) + end. + +%% We completely ignore the first argument which is always +%% the empty binary. We keep it there because we don't want +%% to change the other arguments' position and trigger costy +%% operations for no reasons. +wait_hd_value(_, State=#state{ + socket=Socket, transport=Transport, until=Until}, + M, P, Q, V, H, N, SoFar) -> + case recv(Socket, Transport, Until) of + {ok, Data} -> + parse_hd_value(Data, State, M, P, Q, V, H, N, SoFar); + {error, timeout} -> + error_terminate(408, State); + {error, _} -> + terminate(State) + end. + +%% Pushing back as much as we could the retrieval of new data +%% to check for multilines allows us to avoid a few tests in +%% the critical path, but forces us to have a special function. +wait_hd_value_nl(_, State=#state{ + socket=Socket, transport=Transport, until=Until}, + M, P, Q, V, Headers, Name, SoFar) -> + case recv(Socket, Transport, Until) of + {ok, << C, Data/bits >>} when C =:= $\s; C =:= $\t -> + parse_hd_value(Data, State, M, P, Q, V, Headers, Name, SoFar); + {ok, Data} -> + parse_header(Data, State, M, P, Q, V, [{Name, SoFar}|Headers]); + {error, timeout} -> + error_terminate(408, State); + {error, _} -> + terminate(State) + end. + +parse_hd_value(<< $\r, Rest/bits >>, S, M, P, Q, V, Headers, Name, SoFar) -> + case Rest of + << $\n >> -> + wait_hd_value_nl(<<>>, S, M, P, Q, V, Headers, Name, SoFar); + << $\n, C, Rest2/bits >> when C =:= $\s; C =:= $\t -> + parse_hd_value(Rest2, S, M, P, Q, V, Headers, Name, + << SoFar/binary, C >>); + << $\n, Rest2/bits >> -> + parse_header(Rest2, S, M, P, Q, V, [{Name, SoFar}|Headers]) + end; +parse_hd_value(<< C, Rest/bits >>, S, M, P, Q, V, H, N, SoFar) -> + parse_hd_value(Rest, S, M, P, Q, V, H, N, << SoFar/binary, C >>); +parse_hd_value(<<>>, State=#state{max_header_value_length=MaxLength}, + _, _, _, _, _, _, SoFar) when byte_size(SoFar) > MaxLength -> + error_terminate(400, State); +parse_hd_value(<<>>, S, M, P, Q, V, H, N, SoFar) -> + wait_hd_value(<<>>, S, M, P, Q, V, H, N, SoFar). + +request(B, State=#state{transport=Transport}, M, P, Q, Version, Headers) -> + case lists:keyfind(<<"host">>, 1, Headers) of + false when Version =:= 'HTTP/1.1' -> + error_terminate(400, State); + false -> + request(B, State, M, P, Q, Version, Headers, + <<>>, default_port(Transport:name())); + {_, RawHost} -> + try parse_host(RawHost, false, <<>>) of + {Host, undefined} -> + request(B, State, M, P, Q, Version, Headers, + Host, default_port(Transport:name())); + {Host, Port} -> + request(B, State, M, P, Q, Version, Headers, + Host, Port) + catch _:_ -> + error_terminate(400, State) + end + end. + +-spec default_port(atom()) -> 80 | 443. +default_port(ssl) -> 443; +default_port(_) -> 80. + +%% Same code as cow_http:parse_fullhost/1, but inline because we +%% really want this to go fast. +parse_host(<< $[, Rest/bits >>, false, <<>>) -> + parse_host(Rest, true, << $[ >>); +parse_host(<<>>, false, Acc) -> + {Acc, undefined}; +parse_host(<< $:, Rest/bits >>, false, Acc) -> + {Acc, list_to_integer(binary_to_list(Rest))}; +parse_host(<< $], Rest/bits >>, true, Acc) -> + parse_host(Rest, false, << Acc/binary, $] >>); +parse_host(<< C, Rest/bits >>, E, Acc) -> + case C of + ?INLINE_LOWERCASE(parse_host, Rest, E, Acc) + end. + +%% End of request parsing. +%% +%% We create the Req object and start handling the request. + +request(Buffer, State=#state{socket=Socket, transport=Transport, + req_keepalive=ReqKeepalive, max_keepalive=MaxKeepalive, + compress=Compress, onresponse=OnResponse}, + Method, Path, Query, Version, Headers, Host, Port) -> + case Transport:peername(Socket) of + {ok, Peer} -> + Req = cowboy_req:new(Socket, Transport, Peer, Method, Path, + Query, Version, Headers, Host, Port, Buffer, + ReqKeepalive < MaxKeepalive, Compress, OnResponse), + onrequest(Req, State); + {error, _} -> + %% Couldn't read the peer address; connection is gone. + terminate(State) + end. + +%% Call the global onrequest callback. The callback can send a reply, +%% in which case we consider the request handled and move on to the next +%% one. Note that since we haven't dispatched yet, we don't know the +%% handler, host_info, path_info or bindings yet. +-spec onrequest(cowboy_req:req(), #state{}) -> ok. +onrequest(Req, State=#state{onrequest=undefined}) -> + execute(Req, State); +onrequest(Req, State=#state{onrequest=OnRequest}) -> + Req2 = OnRequest(Req), + case cowboy_req:get(resp_state, Req2) of + waiting -> execute(Req2, State); + _ -> next_request(Req2, State, ok) + end. + +-spec execute(cowboy_req:req(), #state{}) -> ok. +execute(Req, State=#state{middlewares=Middlewares, env=Env}) -> + execute(Req, State, Env, Middlewares). + +-spec execute(cowboy_req:req(), #state{}, cowboy_middleware:env(), [module()]) + -> ok. +execute(Req, State, Env, []) -> + next_request(Req, State, get_value(result, Env, ok)); +execute(Req, State, Env, [Middleware|Tail]) -> + case Middleware:execute(Req, Env) of + {ok, Req2, Env2} -> + execute(Req2, State, Env2, Tail); + {suspend, Module, Function, Args} -> + erlang:hibernate(?MODULE, resume, + [State, Env, Tail, Module, Function, Args]); + {halt, Req2} -> + next_request(Req2, State, ok); + {error, Code, Req2} -> + error_terminate(Code, Req2, State) + end. + +-spec resume(#state{}, cowboy_middleware:env(), [module()], + module(), module(), [any()]) -> ok. +resume(State, Env, Tail, Module, Function, Args) -> + case apply(Module, Function, Args) of + {ok, Req2, Env2} -> + execute(Req2, State, Env2, Tail); + {suspend, Module2, Function2, Args2} -> + erlang:hibernate(?MODULE, resume, + [State, Env, Tail, Module2, Function2, Args2]); + {halt, Req2} -> + next_request(Req2, State, ok); + {error, Code, Req2} -> + error_terminate(Code, Req2, State) + end. + +-spec next_request(cowboy_req:req(), #state{}, any()) -> ok. +next_request(Req, State=#state{req_keepalive=Keepalive, timeout=Timeout}, + HandlerRes) -> + cowboy_req:ensure_response(Req, 204), + %% If we are going to close the connection, + %% we do not want to attempt to skip the body. + case cowboy_req:get(connection, Req) of + close -> + terminate(State); + _ -> + %% Skip the body if it is reasonably sized. Close otherwise. + Buffer = case cowboy_req:body(Req) of + {ok, _, Req2} -> cowboy_req:get(buffer, Req2); + _ -> close + end, + %% Flush the resp_sent message before moving on. + if HandlerRes =:= ok, Buffer =/= close -> + receive {cowboy_req, resp_sent} -> ok after 0 -> ok end, + ?MODULE:parse_request(Buffer, + State#state{req_keepalive=Keepalive + 1, + until=until(Timeout)}, 0); + true -> + terminate(State) + end + end. + +-spec error_terminate(cowboy:http_status(), #state{}) -> ok. +error_terminate(Status, State=#state{socket=Socket, transport=Transport, + compress=Compress, onresponse=OnResponse}) -> + error_terminate(Status, cowboy_req:new(Socket, Transport, + undefined, <<"GET">>, <<>>, <<>>, 'HTTP/1.1', [], <<>>, + undefined, <<>>, false, Compress, OnResponse), State). + +-spec error_terminate(cowboy:http_status(), cowboy_req:req(), #state{}) -> ok. +error_terminate(Status, Req, State) -> + _ = cowboy_req:reply(Status, Req), + terminate(State). + +-spec terminate(#state{}) -> ok. +terminate(#state{socket=Socket, transport=Transport}) -> + Transport:close(Socket), + ok. diff --git a/rabbitmq-server/deps/cowboy/src/cowboy_req.erl b/rabbitmq-server/deps/cowboy/src/cowboy_req.erl new file mode 100644 index 0000000..fcc9744 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/src/cowboy_req.erl @@ -0,0 +1,1385 @@ +%% Copyright (c) 2011-2014, Loïc Hoguin +%% Copyright (c) 2011, Anthony Ramine +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_req). + +%% Request API. +-export([new/14]). +-export([method/1]). +-export([version/1]). +-export([peer/1]). +-export([host/1]). +-export([host_info/1]). +-export([port/1]). +-export([path/1]). +-export([path_info/1]). +-export([qs/1]). +-export([qs_val/2]). +-export([qs_val/3]). +-export([qs_vals/1]). +-export([host_url/1]). +-export([url/1]). +-export([binding/2]). +-export([binding/3]). +-export([bindings/1]). +-export([header/2]). +-export([header/3]). +-export([headers/1]). +-export([parse_header/2]). +-export([parse_header/3]). +-export([cookie/2]). +-export([cookie/3]). +-export([cookies/1]). +-export([meta/2]). +-export([meta/3]). +-export([set_meta/3]). + +%% Request body API. +-export([has_body/1]). +-export([body_length/1]). +-export([body/1]). +-export([body/2]). +-export([body_qs/1]). +-export([body_qs/2]). + +%% Multipart API. +-export([part/1]). +-export([part/2]). +-export([part_body/1]). +-export([part_body/2]). + +%% Response API. +-export([set_resp_cookie/4]). +-export([set_resp_header/3]). +-export([set_resp_body/2]). +-export([set_resp_body_fun/2]). +-export([set_resp_body_fun/3]). +-export([has_resp_header/2]). +-export([has_resp_body/1]). +-export([delete_resp_header/2]). +-export([reply/2]). +-export([reply/3]). +-export([reply/4]). +-export([chunked_reply/2]). +-export([chunked_reply/3]). +-export([chunk/2]). +-export([upgrade_reply/3]). +-export([continue/1]). +-export([maybe_reply/2]). +-export([ensure_response/2]). + +%% Private setter/getter API. +-export([append_buffer/2]). +-export([get/2]). +-export([set/2]). +-export([set_bindings/4]). + +%% Misc API. +-export([compact/1]). +-export([lock/1]). +-export([to_list/1]). + +-type cookie_opts() :: cow_cookie:cookie_opts(). +-export_type([cookie_opts/0]). + +-type content_decode_fun() :: fun((binary()) + -> {ok, binary()} + | {error, atom()}). +-type transfer_decode_fun() :: fun((binary(), any()) + -> cow_http_te:decode_ret()). + +-type body_opts() :: [{continue, boolean()} + | {length, non_neg_integer()} + | {read_length, non_neg_integer()} + | {read_timeout, timeout()} + | {transfer_decode, transfer_decode_fun(), any()} + | {content_decode, content_decode_fun()}]. +-export_type([body_opts/0]). + +-type resp_body_fun() :: fun((any(), module()) -> ok). +-type send_chunk_fun() :: fun((iodata()) -> ok | {error, atom()}). +-type resp_chunked_fun() :: fun((send_chunk_fun()) -> ok). + +-record(http_req, { + %% Transport. + socket = undefined :: any(), + transport = undefined :: undefined | module(), + connection = keepalive :: keepalive | close, + + %% Request. + pid = undefined :: pid(), + method = <<"GET">> :: binary(), + version = 'HTTP/1.1' :: cowboy:http_version(), + peer = undefined :: undefined | {inet:ip_address(), inet:port_number()}, + host = undefined :: undefined | binary(), + host_info = undefined :: undefined | cowboy_router:tokens(), + port = undefined :: undefined | inet:port_number(), + path = undefined :: binary(), + path_info = undefined :: undefined | cowboy_router:tokens(), + qs = undefined :: binary(), + qs_vals = undefined :: undefined | list({binary(), binary() | true}), + bindings = undefined :: undefined | cowboy_router:bindings(), + headers = [] :: cowboy:http_headers(), + p_headers = [] :: [any()], + cookies = undefined :: undefined | [{binary(), binary()}], + meta = [] :: [{atom(), any()}], + + %% Request body. + body_state = waiting :: waiting | done | {stream, non_neg_integer(), + transfer_decode_fun(), any(), content_decode_fun()}, + buffer = <<>> :: binary(), + multipart = undefined :: undefined | {binary(), binary()}, + + %% Response. + resp_compress = false :: boolean(), + resp_state = waiting :: locked | waiting | waiting_stream + | chunks | stream | done, + resp_headers = [] :: cowboy:http_headers(), + resp_body = <<>> :: iodata() | resp_body_fun() + | {non_neg_integer(), resp_body_fun()} + | {chunked, resp_chunked_fun()}, + + %% Functions. + onresponse = undefined :: undefined | already_called + | cowboy:onresponse_fun() +}). + +-opaque req() :: #http_req{}. +-export_type([req/0]). + +%% Request API. + +-spec new(any(), module(), + undefined | {inet:ip_address(), inet:port_number()}, + binary(), binary(), binary(), + cowboy:http_version(), cowboy:http_headers(), binary(), + inet:port_number() | undefined, binary(), boolean(), boolean(), + undefined | cowboy:onresponse_fun()) + -> req(). +new(Socket, Transport, Peer, Method, Path, Query, + Version, Headers, Host, Port, Buffer, CanKeepalive, + Compress, OnResponse) -> + Req = #http_req{socket=Socket, transport=Transport, pid=self(), peer=Peer, + method=Method, path=Path, qs=Query, version=Version, + headers=Headers, host=Host, port=Port, buffer=Buffer, + resp_compress=Compress, onresponse=OnResponse}, + case CanKeepalive of + false -> + Req#http_req{connection=close}; + true -> + case lists:keyfind(<<"connection">>, 1, Headers) of + false -> + case Version of + 'HTTP/1.1' -> Req; %% keepalive + 'HTTP/1.0' -> Req#http_req{connection=close} + end; + {_, ConnectionHeader} -> + Tokens = cow_http_hd:parse_connection(ConnectionHeader), + Connection = connection_to_atom(Tokens), + Req#http_req{connection=Connection, + p_headers=[{<<"connection">>, Tokens}]} + end + end. + +-spec method(Req) -> {binary(), Req} when Req::req(). +method(Req) -> + {Req#http_req.method, Req}. + +-spec version(Req) -> {cowboy:http_version(), Req} when Req::req(). +version(Req) -> + {Req#http_req.version, Req}. + +-spec peer(Req) + -> {{inet:ip_address(), inet:port_number()}, Req} + when Req::req(). +peer(Req) -> + {Req#http_req.peer, Req}. + +-spec host(Req) -> {binary(), Req} when Req::req(). +host(Req) -> + {Req#http_req.host, Req}. + +-spec host_info(Req) + -> {cowboy_router:tokens() | undefined, Req} when Req::req(). +host_info(Req) -> + {Req#http_req.host_info, Req}. + +-spec port(Req) -> {inet:port_number(), Req} when Req::req(). +port(Req) -> + {Req#http_req.port, Req}. + +-spec path(Req) -> {binary(), Req} when Req::req(). +path(Req) -> + {Req#http_req.path, Req}. + +-spec path_info(Req) + -> {cowboy_router:tokens() | undefined, Req} when Req::req(). +path_info(Req) -> + {Req#http_req.path_info, Req}. + +-spec qs(Req) -> {binary(), Req} when Req::req(). +qs(Req) -> + {Req#http_req.qs, Req}. + +-spec qs_val(binary(), Req) + -> {binary() | true | undefined, Req} when Req::req(). +qs_val(Name, Req) when is_binary(Name) -> + qs_val(Name, Req, undefined). + +-spec qs_val(binary(), Req, Default) + -> {binary() | true | Default, Req} when Req::req(), Default::any(). +qs_val(Name, Req=#http_req{qs=RawQs, qs_vals=undefined}, Default) + when is_binary(Name) -> + QsVals = cow_qs:parse_qs(RawQs), + qs_val(Name, Req#http_req{qs_vals=QsVals}, Default); +qs_val(Name, Req, Default) -> + case lists:keyfind(Name, 1, Req#http_req.qs_vals) of + {Name, Value} -> {Value, Req}; + false -> {Default, Req} + end. + +-spec qs_vals(Req) -> {list({binary(), binary() | true}), Req} when Req::req(). +qs_vals(Req=#http_req{qs=RawQs, qs_vals=undefined}) -> + QsVals = cow_qs:parse_qs(RawQs), + qs_vals(Req#http_req{qs_vals=QsVals}); +qs_vals(Req=#http_req{qs_vals=QsVals}) -> + {QsVals, Req}. + +%% The URL includes the scheme, host and port only. +-spec host_url(Req) -> {undefined | binary(), Req} when Req::req(). +host_url(Req=#http_req{port=undefined}) -> + {undefined, Req}; +host_url(Req=#http_req{transport=Transport, host=Host, port=Port}) -> + TransportName = Transport:name(), + Secure = case TransportName of + ssl -> <<"s">>; + _ -> <<>> + end, + PortBin = case {TransportName, Port} of + {ssl, 443} -> <<>>; + {tcp, 80} -> <<>>; + _ -> << ":", (integer_to_binary(Port))/binary >> + end, + {<< "http", Secure/binary, "://", Host/binary, PortBin/binary >>, Req}. + +%% The URL includes the scheme, host, port, path and query string. +-spec url(Req) -> {undefined | binary(), Req} when Req::req(). +url(Req=#http_req{}) -> + {HostURL, Req2} = host_url(Req), + url(HostURL, Req2). + +url(undefined, Req=#http_req{}) -> + {undefined, Req}; +url(HostURL, Req=#http_req{path=Path, qs=QS}) -> + QS2 = case QS of + <<>> -> <<>>; + _ -> << "?", QS/binary >> + end, + {<< HostURL/binary, Path/binary, QS2/binary >>, Req}. + +-spec binding(atom(), Req) -> {any() | undefined, Req} when Req::req(). +binding(Name, Req) when is_atom(Name) -> + binding(Name, Req, undefined). + +-spec binding(atom(), Req, Default) + -> {any() | Default, Req} when Req::req(), Default::any(). +binding(Name, Req, Default) when is_atom(Name) -> + case lists:keyfind(Name, 1, Req#http_req.bindings) of + {Name, Value} -> {Value, Req}; + false -> {Default, Req} + end. + +-spec bindings(Req) -> {[{atom(), any()}], Req} when Req::req(). +bindings(Req) -> + {Req#http_req.bindings, Req}. + +-spec header(binary(), Req) + -> {binary() | undefined, Req} when Req::req(). +header(Name, Req) -> + header(Name, Req, undefined). + +-spec header(binary(), Req, Default) + -> {binary() | Default, Req} when Req::req(), Default::any(). +header(Name, Req, Default) -> + case lists:keyfind(Name, 1, Req#http_req.headers) of + {Name, Value} -> {Value, Req}; + false -> {Default, Req} + end. + +-spec headers(Req) -> {cowboy:http_headers(), Req} when Req::req(). +headers(Req) -> + {Req#http_req.headers, Req}. + +-spec parse_header(binary(), Req) + -> {ok, any(), Req} | {undefined, binary(), Req} + | {error, badarg} when Req::req(). +parse_header(Name, Req=#http_req{p_headers=PHeaders}) -> + case lists:keyfind(Name, 1, PHeaders) of + false -> parse_header(Name, Req, parse_header_default(Name)); + {Name, Value} -> {ok, Value, Req} + end. + +-spec parse_header_default(binary()) -> any(). +parse_header_default(<<"transfer-encoding">>) -> [<<"identity">>]; +parse_header_default(_Name) -> undefined. + +-spec parse_header(binary(), Req, any()) + -> {ok, any(), Req} | {undefined, binary(), Req} + | {error, badarg} when Req::req(). +parse_header(Name = <<"accept">>, Req, Default) -> + parse_header(Name, Req, Default, + fun (Value) -> + cowboy_http:list(Value, fun cowboy_http:media_range/2) + end); +parse_header(Name = <<"accept-charset">>, Req, Default) -> + parse_header(Name, Req, Default, + fun (Value) -> + cowboy_http:nonempty_list(Value, fun cowboy_http:conneg/2) + end); +parse_header(Name = <<"accept-encoding">>, Req, Default) -> + parse_header(Name, Req, Default, + fun (Value) -> + cowboy_http:list(Value, fun cowboy_http:conneg/2) + end); +parse_header(Name = <<"accept-language">>, Req, Default) -> + parse_header(Name, Req, Default, + fun (Value) -> + cowboy_http:nonempty_list(Value, fun cowboy_http:language_range/2) + end); +parse_header(Name = <<"authorization">>, Req, Default) -> + parse_header(Name, Req, Default, + fun (Value) -> + cowboy_http:token_ci(Value, fun cowboy_http:authorization/2) + end); +parse_header(Name = <<"content-length">>, Req, Default) -> + parse_header(Name, Req, Default, fun cow_http_hd:parse_content_length/1); +parse_header(Name = <<"content-type">>, Req, Default) -> + parse_header(Name, Req, Default, fun cowboy_http:content_type/1); +parse_header(Name = <<"cookie">>, Req, Default) -> + parse_header(Name, Req, Default, fun cow_cookie:parse_cookie/1); +parse_header(Name = <<"expect">>, Req, Default) -> + parse_header(Name, Req, Default, + fun (Value) -> + cowboy_http:nonempty_list(Value, fun cowboy_http:expectation/2) + end); +parse_header(Name, Req, Default) + when Name =:= <<"if-match">>; + Name =:= <<"if-none-match">> -> + parse_header(Name, Req, Default, fun cowboy_http:entity_tag_match/1); +parse_header(Name, Req, Default) + when Name =:= <<"if-modified-since">>; + Name =:= <<"if-unmodified-since">> -> + parse_header(Name, Req, Default, fun cowboy_http:http_date/1); +parse_header(Name = <<"range">>, Req, Default) -> + parse_header(Name, Req, Default, fun cowboy_http:range/1); +parse_header(Name, Req, Default) + when Name =:= <<"sec-websocket-protocol">>; + Name =:= <<"x-forwarded-for">> -> + parse_header(Name, Req, Default, + fun (Value) -> + cowboy_http:nonempty_list(Value, fun cowboy_http:token/2) + end); +parse_header(Name = <<"transfer-encoding">>, Req, Default) -> + parse_header(Name, Req, Default, fun cow_http_hd:parse_transfer_encoding/1); +%% @todo Product version. +parse_header(Name = <<"upgrade">>, Req, Default) -> + parse_header(Name, Req, Default, + fun (Value) -> + cowboy_http:nonempty_list(Value, fun cowboy_http:token_ci/2) + end); +parse_header(Name = <<"sec-websocket-extensions">>, Req, Default) -> + parse_header(Name, Req, Default, fun cowboy_http:parameterized_tokens/1); +parse_header(Name, Req, Default) -> + {Value, Req2} = header(Name, Req, Default), + {undefined, Value, Req2}. + +parse_header(Name, Req=#http_req{p_headers=PHeaders}, Default, Fun) -> + case header(Name, Req) of + {undefined, Req2} -> + {ok, Default, Req2#http_req{p_headers=[{Name, Default}|PHeaders]}}; + {Value, Req2} -> + case Fun(Value) of + {error, badarg} -> + {error, badarg}; + P -> + {ok, P, Req2#http_req{p_headers=[{Name, P}|PHeaders]}} + end + end. + +-spec cookie(binary(), Req) + -> {binary() | undefined, Req} when Req::req(). +cookie(Name, Req) when is_binary(Name) -> + cookie(Name, Req, undefined). + +-spec cookie(binary(), Req, Default) + -> {binary() | Default, Req} when Req::req(), Default::any(). +cookie(Name, Req=#http_req{cookies=undefined}, Default) when is_binary(Name) -> + case parse_header(<<"cookie">>, Req) of + {ok, undefined, Req2} -> + {Default, Req2#http_req{cookies=[]}}; + {ok, Cookies, Req2} -> + cookie(Name, Req2#http_req{cookies=Cookies}, Default) + end; +cookie(Name, Req, Default) -> + case lists:keyfind(Name, 1, Req#http_req.cookies) of + {Name, Value} -> {Value, Req}; + false -> {Default, Req} + end. + +-spec cookies(Req) -> {list({binary(), binary()}), Req} when Req::req(). +cookies(Req=#http_req{cookies=undefined}) -> + case parse_header(<<"cookie">>, Req) of + {ok, undefined, Req2} -> + {[], Req2#http_req{cookies=[]}}; + {ok, Cookies, Req2} -> + cookies(Req2#http_req{cookies=Cookies}); + %% Flash player incorrectly sends an empty Cookie header. + {error, badarg} -> + {[], Req#http_req{cookies=[]}} + end; +cookies(Req=#http_req{cookies=Cookies}) -> + {Cookies, Req}. + +-spec meta(atom(), Req) -> {any() | undefined, Req} when Req::req(). +meta(Name, Req) -> + meta(Name, Req, undefined). + +-spec meta(atom(), Req, any()) -> {any(), Req} when Req::req(). +meta(Name, Req, Default) -> + case lists:keyfind(Name, 1, Req#http_req.meta) of + {Name, Value} -> {Value, Req}; + false -> {Default, Req} + end. + +-spec set_meta(atom(), any(), Req) -> Req when Req::req(). +set_meta(Name, Value, Req=#http_req{meta=Meta}) -> + Req#http_req{meta=lists:keystore(Name, 1, Meta, {Name, Value})}. + +%% Request Body API. + +-spec has_body(req()) -> boolean(). +has_body(Req) -> + case lists:keyfind(<<"content-length">>, 1, Req#http_req.headers) of + {_, <<"0">>} -> + false; + {_, _} -> + true; + _ -> + lists:keymember(<<"transfer-encoding">>, 1, Req#http_req.headers) + end. + +%% The length may not be known if Transfer-Encoding is not identity, +%% and the body hasn't been read at the time of the call. +-spec body_length(Req) -> {undefined | non_neg_integer(), Req} when Req::req(). +body_length(Req) -> + case parse_header(<<"transfer-encoding">>, Req) of + {ok, [<<"identity">>], Req2} -> + {ok, Length, Req3} = parse_header(<<"content-length">>, Req2, 0), + {Length, Req3}; + {ok, _, Req2} -> + {undefined, Req2} + end. + +-spec body(Req) + -> {ok, binary(), Req} | {more, binary(), Req} + | {error, atom()} when Req::req(). +body(Req) -> + body(Req, []). + +-spec body(Req, body_opts()) + -> {ok, binary(), Req} | {more, binary(), Req} + | {error, atom()} when Req::req(). +body(Req=#http_req{body_state=waiting}, Opts) -> + %% Send a 100 continue if needed (enabled by default). + Req1 = case lists:keyfind(continue, 1, Opts) of + {_, false} -> + Req; + _ -> + {ok, ExpectHeader, Req0} = parse_header(<<"expect">>, Req), + ok = case ExpectHeader of + [<<"100-continue">>] -> continue(Req0); + _ -> ok + end, + Req0 + end, + %% Initialize body streaming state. + CFun = case lists:keyfind(content_decode, 1, Opts) of + false -> + fun cowboy_http:ce_identity/1; + {_, CFun0} -> + CFun0 + end, + case lists:keyfind(transfer_decode, 1, Opts) of + false -> + case parse_header(<<"transfer-encoding">>, Req1) of + {ok, [<<"chunked">>], Req2} -> + body(Req2#http_req{body_state={stream, 0, + fun cow_http_te:stream_chunked/2, {0, 0}, CFun}}, Opts); + {ok, [<<"identity">>], Req2} -> + {Len, Req3} = body_length(Req2), + case Len of + 0 -> + {ok, <<>>, Req3#http_req{body_state=done}}; + _ -> + body(Req3#http_req{body_state={stream, Len, + fun cow_http_te:stream_identity/2, {0, Len}, + CFun}}, Opts) + end + end; + {_, TFun, TState} -> + body(Req1#http_req{body_state={stream, 0, + TFun, TState, CFun}}, Opts) + end; +body(Req=#http_req{body_state=done}, _) -> + {ok, <<>>, Req}; +body(Req, Opts) -> + ChunkLen = case lists:keyfind(length, 1, Opts) of + false -> 8000000; + {_, ChunkLen0} -> ChunkLen0 + end, + ReadLen = case lists:keyfind(read_length, 1, Opts) of + false -> 1000000; + {_, ReadLen0} -> ReadLen0 + end, + ReadTimeout = case lists:keyfind(read_timeout, 1, Opts) of + false -> 15000; + {_, ReadTimeout0} -> ReadTimeout0 + end, + body_loop(Req, ReadTimeout, ReadLen, ChunkLen, <<>>). + +body_loop(Req=#http_req{buffer=Buffer, body_state={stream, Length, _, _, _}}, + ReadTimeout, ReadLength, ChunkLength, Acc) -> + {Tag, Res, Req2} = case Buffer of + <<>> -> + body_recv(Req, ReadTimeout, min(Length, ReadLength)); + _ -> + body_decode(Req, ReadTimeout) + end, + case {Tag, Res} of + {ok, {ok, Data}} -> + {ok, << Acc/binary, Data/binary >>, Req2}; + {more, {ok, Data}} -> + Acc2 = << Acc/binary, Data/binary >>, + case byte_size(Acc2) >= ChunkLength of + true -> {more, Acc2, Req2}; + false -> body_loop(Req2, ReadTimeout, ReadLength, ChunkLength, Acc2) + end; + _ -> %% Error. + Res + end. + +body_recv(Req=#http_req{transport=Transport, socket=Socket, buffer=Buffer}, + ReadTimeout, ReadLength) -> + case Transport:recv(Socket, ReadLength, ReadTimeout) of + {ok, Data} -> + body_decode(Req#http_req{buffer= << Buffer/binary, Data/binary >>}, + ReadTimeout); + Error = {error, _} -> + {error, Error, Req} + end. + +%% Two decodings happen. First a decoding function is applied to the +%% transferred data, and then another is applied to the actual content. +%% +%% Transfer encoding is generally used for chunked bodies. The decoding +%% function uses a state to keep track of how much it has read, which is +%% also initialized through this function. +%% +%% Content encoding is generally used for compression. +%% +%% @todo Handle chunked after-the-facts headers. +%% @todo Depending on the length returned we might want to 0 or +5 it. +body_decode(Req=#http_req{buffer=Data, body_state={stream, _, + TDecode, TState, CDecode}}, ReadTimeout) -> + case TDecode(Data, TState) of + more -> + body_recv(Req#http_req{body_state={stream, 0, + TDecode, TState, CDecode}}, ReadTimeout, 0); + {more, Data2, TState2} -> + {more, CDecode(Data2), Req#http_req{body_state={stream, 0, + TDecode, TState2, CDecode}, buffer= <<>>}}; + {more, Data2, Length, TState2} when is_integer(Length) -> + {more, CDecode(Data2), Req#http_req{body_state={stream, Length, + TDecode, TState2, CDecode}, buffer= <<>>}}; + {more, Data2, Rest, TState2} -> + {more, CDecode(Data2), Req#http_req{body_state={stream, 0, + TDecode, TState2, CDecode}, buffer=Rest}}; + {done, TotalLength, Rest} -> + {ok, {ok, <<>>}, body_decode_end(Req, TotalLength, Rest)}; + {done, Data2, TotalLength, Rest} -> + {ok, CDecode(Data2), body_decode_end(Req, TotalLength, Rest)} + end. + +body_decode_end(Req=#http_req{headers=Headers, p_headers=PHeaders}, + TotalLength, Rest) -> + Headers2 = lists:keystore(<<"content-length">>, 1, Headers, + {<<"content-length">>, integer_to_binary(TotalLength)}), + %% At this point we just assume TEs were all decoded. + Headers3 = lists:keydelete(<<"transfer-encoding">>, 1, Headers2), + PHeaders2 = lists:keystore(<<"content-length">>, 1, PHeaders, + {<<"content-length">>, TotalLength}), + PHeaders3 = lists:keydelete(<<"transfer-encoding">>, 1, PHeaders2), + Req#http_req{buffer=Rest, body_state=done, + headers=Headers3, p_headers=PHeaders3}. + +-spec body_qs(Req) + -> {ok, [{binary(), binary() | true}], Req} | {error, atom()} + when Req::req(). +body_qs(Req) -> + body_qs(Req, [ + {length, 64000}, + {read_length, 64000}, + {read_timeout, 5000}]). + +-spec body_qs(Req, body_opts()) -> {ok, [{binary(), binary() | true}], Req} + | {badlength, Req} | {error, atom()} when Req::req(). +body_qs(Req, Opts) -> + case body(Req, Opts) of + {ok, Body, Req2} -> + {ok, cow_qs:parse_qs(Body), Req2}; + {more, _, Req2} -> + {badlength, Req2}; + {error, Reason} -> + {error, Reason} + end. + +%% Multipart API. + +-spec part(Req) + -> {ok, cow_multipart:headers(), Req} | {done, Req} + when Req::req(). +part(Req) -> + part(Req, [ + {length, 64000}, + {read_length, 64000}, + {read_timeout, 5000}]). + +-spec part(Req, body_opts()) + -> {ok, cow_multipart:headers(), Req} | {done, Req} + when Req::req(). +part(Req=#http_req{multipart=undefined}, Opts) -> + part(init_multipart(Req), Opts); +part(Req, Opts) -> + {Data, Req2} = stream_multipart(Req, Opts), + part(Data, Opts, Req2). + +part(Buffer, Opts, Req=#http_req{multipart={Boundary, _}}) -> + case cow_multipart:parse_headers(Buffer, Boundary) of + more -> + {Data, Req2} = stream_multipart(Req, Opts), + part(<< Buffer/binary, Data/binary >>, Opts, Req2); + {more, Buffer2} -> + {Data, Req2} = stream_multipart(Req, Opts), + part(<< Buffer2/binary, Data/binary >>, Opts, Req2); + {ok, Headers, Rest} -> + {ok, Headers, Req#http_req{multipart={Boundary, Rest}}}; + %% Ignore epilogue. + {done, _} -> + {done, Req#http_req{multipart=undefined}} + end. + +-spec part_body(Req) + -> {ok, binary(), Req} | {more, binary(), Req} + when Req::req(). +part_body(Req) -> + part_body(Req, []). + +-spec part_body(Req, body_opts()) + -> {ok, binary(), Req} | {more, binary(), Req} + when Req::req(). +part_body(Req=#http_req{multipart=undefined}, Opts) -> + part_body(init_multipart(Req), Opts); +part_body(Req, Opts) -> + part_body(<<>>, Opts, Req, <<>>). + +part_body(Buffer, Opts, Req=#http_req{multipart={Boundary, _}}, Acc) -> + ChunkLen = case lists:keyfind(length, 1, Opts) of + false -> 8000000; + {_, ChunkLen0} -> ChunkLen0 + end, + case byte_size(Acc) > ChunkLen of + true -> + {more, Acc, Req#http_req{multipart={Boundary, Buffer}}}; + false -> + {Data, Req2} = stream_multipart(Req, Opts), + case cow_multipart:parse_body(<< Buffer/binary, Data/binary >>, Boundary) of + {ok, Body} -> + part_body(<<>>, Opts, Req2, << Acc/binary, Body/binary >>); + {ok, Body, Rest} -> + part_body(Rest, Opts, Req2, << Acc/binary, Body/binary >>); + done -> + {ok, Acc, Req2}; + {done, Body} -> + {ok, << Acc/binary, Body/binary >>, Req2}; + {done, Body, Rest} -> + {ok, << Acc/binary, Body/binary >>, + Req2#http_req{multipart={Boundary, Rest}}} + end + end. + +init_multipart(Req) -> + {ok, {<<"multipart">>, _, Params}, Req2} + = parse_header(<<"content-type">>, Req), + {_, Boundary} = lists:keyfind(<<"boundary">>, 1, Params), + Req2#http_req{multipart={Boundary, <<>>}}. + +stream_multipart(Req=#http_req{body_state=BodyState, multipart={_, <<>>}}, Opts) -> + true = BodyState =/= done, + {_, Data, Req2} = body(Req, Opts), + {Data, Req2}; +stream_multipart(Req=#http_req{multipart={Boundary, Buffer}}, _) -> + {Buffer, Req#http_req{multipart={Boundary, <<>>}}}. + +%% Response API. + +%% The cookie name cannot contain any of the following characters: +%% =,;\s\t\r\n\013\014 +%% +%% The cookie value cannot contain any of the following characters: +%% ,; \t\r\n\013\014 +-spec set_resp_cookie(iodata(), iodata(), cookie_opts(), Req) + -> Req when Req::req(). +set_resp_cookie(Name, Value, Opts, Req) -> + Cookie = cow_cookie:setcookie(Name, Value, Opts), + set_resp_header(<<"set-cookie">>, Cookie, Req). + +-spec set_resp_header(binary(), iodata(), Req) + -> Req when Req::req(). +set_resp_header(Name, Value, Req=#http_req{resp_headers=RespHeaders}) -> + Req#http_req{resp_headers=[{Name, Value}|RespHeaders]}. + +-spec set_resp_body(iodata(), Req) -> Req when Req::req(). +set_resp_body(Body, Req) -> + Req#http_req{resp_body=Body}. + +-spec set_resp_body_fun(resp_body_fun(), Req) -> Req when Req::req(). +set_resp_body_fun(StreamFun, Req) when is_function(StreamFun) -> + Req#http_req{resp_body=StreamFun}. + +%% If the body function crashes while writing the response body or writes +%% fewer bytes than declared the behaviour is undefined. +-spec set_resp_body_fun(non_neg_integer(), resp_body_fun(), Req) + -> Req when Req::req(); + (chunked, resp_chunked_fun(), Req) + -> Req when Req::req(). +set_resp_body_fun(StreamLen, StreamFun, Req) + when is_integer(StreamLen), is_function(StreamFun) -> + Req#http_req{resp_body={StreamLen, StreamFun}}; +set_resp_body_fun(chunked, StreamFun, Req) + when is_function(StreamFun) -> + Req#http_req{resp_body={chunked, StreamFun}}. + +-spec has_resp_header(binary(), req()) -> boolean(). +has_resp_header(Name, #http_req{resp_headers=RespHeaders}) -> + lists:keymember(Name, 1, RespHeaders). + +-spec has_resp_body(req()) -> boolean(). +has_resp_body(#http_req{resp_body=RespBody}) when is_function(RespBody) -> + true; +has_resp_body(#http_req{resp_body={chunked, _}}) -> + true; +has_resp_body(#http_req{resp_body={Length, _}}) -> + Length > 0; +has_resp_body(#http_req{resp_body=RespBody}) -> + iolist_size(RespBody) > 0. + +-spec delete_resp_header(binary(), Req) + -> Req when Req::req(). +delete_resp_header(Name, Req=#http_req{resp_headers=RespHeaders}) -> + RespHeaders2 = lists:keydelete(Name, 1, RespHeaders), + Req#http_req{resp_headers=RespHeaders2}. + +-spec reply(cowboy:http_status(), Req) -> {ok, Req} when Req::req(). +reply(Status, Req=#http_req{resp_body=Body}) -> + reply(Status, [], Body, Req). + +-spec reply(cowboy:http_status(), cowboy:http_headers(), Req) + -> {ok, Req} when Req::req(). +reply(Status, Headers, Req=#http_req{resp_body=Body}) -> + reply(Status, Headers, Body, Req). + +-spec reply(cowboy:http_status(), cowboy:http_headers(), + iodata() | {non_neg_integer() | resp_body_fun()}, Req) + -> {ok, Req} when Req::req(). +reply(Status, Headers, Body, Req=#http_req{ + socket=Socket, transport=Transport, + version=Version, connection=Connection, + method=Method, resp_compress=Compress, + resp_state=RespState, resp_headers=RespHeaders}) + when RespState =:= waiting; RespState =:= waiting_stream -> + HTTP11Headers = if + Transport =/= cowboy_spdy, Version =:= 'HTTP/1.0', Connection =:= keepalive -> + [{<<"connection">>, atom_to_connection(Connection)}]; + Transport =/= cowboy_spdy, Version =:= 'HTTP/1.1', Connection =:= close -> + [{<<"connection">>, atom_to_connection(Connection)}]; + true -> + [] + end, + Req3 = case Body of + BodyFun when is_function(BodyFun) -> + %% We stream the response body until we close the connection. + RespConn = close, + {RespType, Req2} = if + Transport =:= cowboy_spdy -> + response(Status, Headers, RespHeaders, [ + {<<"date">>, cowboy_clock:rfc1123()}, + {<<"server">>, <<"Cowboy">>} + ], stream, Req); + true -> + response(Status, Headers, RespHeaders, [ + {<<"connection">>, <<"close">>}, + {<<"date">>, cowboy_clock:rfc1123()}, + {<<"server">>, <<"Cowboy">>}, + {<<"transfer-encoding">>, <<"identity">>} + ], <<>>, Req) + end, + if RespType =/= hook, Method =/= <<"HEAD">> -> + BodyFun(Socket, Transport); + true -> ok + end, + Req2#http_req{connection=RespConn}; + {chunked, BodyFun} -> + %% We stream the response body in chunks. + {RespType, Req2} = chunked_response(Status, Headers, Req), + if RespType =/= hook, Method =/= <<"HEAD">> -> + ChunkFun = fun(IoData) -> chunk(IoData, Req2) end, + BodyFun(ChunkFun), + %% Send the last chunk if chunked encoding was used. + if + Version =:= 'HTTP/1.0'; RespState =:= waiting_stream -> + Req2; + true -> + last_chunk(Req2) + end; + true -> Req2 + end; + {ContentLength, BodyFun} -> + %% We stream the response body for ContentLength bytes. + RespConn = response_connection(Headers, Connection), + {RespType, Req2} = response(Status, Headers, RespHeaders, [ + {<<"content-length">>, integer_to_list(ContentLength)}, + {<<"date">>, cowboy_clock:rfc1123()}, + {<<"server">>, <<"Cowboy">>} + |HTTP11Headers], stream, Req), + if RespType =/= hook, Method =/= <<"HEAD">> -> + BodyFun(Socket, Transport); + true -> ok + end, + Req2#http_req{connection=RespConn}; + _ when Compress -> + RespConn = response_connection(Headers, Connection), + Req2 = reply_may_compress(Status, Headers, Body, Req, + RespHeaders, HTTP11Headers, Method), + Req2#http_req{connection=RespConn}; + _ -> + RespConn = response_connection(Headers, Connection), + Req2 = reply_no_compress(Status, Headers, Body, Req, + RespHeaders, HTTP11Headers, Method, iolist_size(Body)), + Req2#http_req{connection=RespConn} + end, + {ok, Req3#http_req{resp_state=done, resp_headers=[], resp_body= <<>>}}. + +reply_may_compress(Status, Headers, Body, Req, + RespHeaders, HTTP11Headers, Method) -> + BodySize = iolist_size(Body), + case parse_header(<<"accept-encoding">>, Req) of + {ok, Encodings, Req2} -> + CanGzip = (BodySize > 300) + andalso (false =:= lists:keyfind(<<"content-encoding">>, + 1, Headers)) + andalso (false =:= lists:keyfind(<<"content-encoding">>, + 1, RespHeaders)) + andalso (false =:= lists:keyfind(<<"transfer-encoding">>, + 1, Headers)) + andalso (false =:= lists:keyfind(<<"transfer-encoding">>, + 1, RespHeaders)) + andalso (Encodings =/= undefined) + andalso (false =/= lists:keyfind(<<"gzip">>, 1, Encodings)), + case CanGzip of + true -> + GzBody = zlib:gzip(Body), + {_, Req3} = response(Status, Headers, RespHeaders, [ + {<<"content-length">>, integer_to_list(byte_size(GzBody))}, + {<<"content-encoding">>, <<"gzip">>}, + {<<"date">>, cowboy_clock:rfc1123()}, + {<<"server">>, <<"Cowboy">>} + |HTTP11Headers], + case Method of <<"HEAD">> -> <<>>; _ -> GzBody end, + Req2), + Req3; + false -> + reply_no_compress(Status, Headers, Body, Req, + RespHeaders, HTTP11Headers, Method, BodySize) + end; + {error, badarg} -> + reply_no_compress(Status, Headers, Body, Req, + RespHeaders, HTTP11Headers, Method, BodySize) + end. + +reply_no_compress(Status, Headers, Body, Req, + RespHeaders, HTTP11Headers, Method, BodySize) -> + {_, Req2} = response(Status, Headers, RespHeaders, [ + {<<"content-length">>, integer_to_list(BodySize)}, + {<<"date">>, cowboy_clock:rfc1123()}, + {<<"server">>, <<"Cowboy">>} + |HTTP11Headers], + case Method of <<"HEAD">> -> <<>>; _ -> Body end, + Req), + Req2. + +-spec chunked_reply(cowboy:http_status(), Req) -> {ok, Req} when Req::req(). +chunked_reply(Status, Req) -> + chunked_reply(Status, [], Req). + +-spec chunked_reply(cowboy:http_status(), cowboy:http_headers(), Req) + -> {ok, Req} when Req::req(). +chunked_reply(Status, Headers, Req) -> + {_, Req2} = chunked_response(Status, Headers, Req), + {ok, Req2}. + +-spec chunk(iodata(), req()) -> ok | {error, atom()}. +chunk(_Data, #http_req{method= <<"HEAD">>}) -> + ok; +chunk(Data, #http_req{socket=Socket, transport=cowboy_spdy, + resp_state=chunks}) -> + cowboy_spdy:stream_data(Socket, Data); +chunk(Data, #http_req{socket=Socket, transport=Transport, + resp_state=stream}) -> + Transport:send(Socket, Data); +chunk(Data, #http_req{socket=Socket, transport=Transport, + resp_state=chunks}) -> + Transport:send(Socket, [integer_to_list(iolist_size(Data), 16), + <<"\r\n">>, Data, <<"\r\n">>]). + +%% If ever made public, need to send nothing if HEAD. +-spec last_chunk(Req) -> Req when Req::req(). +last_chunk(Req=#http_req{socket=Socket, transport=cowboy_spdy}) -> + _ = cowboy_spdy:stream_close(Socket), + Req#http_req{resp_state=done}; +last_chunk(Req=#http_req{socket=Socket, transport=Transport}) -> + _ = Transport:send(Socket, <<"0\r\n\r\n">>), + Req#http_req{resp_state=done}. + +-spec upgrade_reply(cowboy:http_status(), cowboy:http_headers(), Req) + -> {ok, Req} when Req::req(). +upgrade_reply(Status, Headers, Req=#http_req{transport=Transport, + resp_state=waiting, resp_headers=RespHeaders}) + when Transport =/= cowboy_spdy -> + {_, Req2} = response(Status, Headers, RespHeaders, [ + {<<"connection">>, <<"Upgrade">>} + ], <<>>, Req), + {ok, Req2#http_req{resp_state=done, resp_headers=[], resp_body= <<>>}}. + +-spec continue(req()) -> ok | {error, atom()}. +continue(#http_req{socket=Socket, transport=Transport, + version=Version}) -> + HTTPVer = atom_to_binary(Version, latin1), + Transport:send(Socket, + << HTTPVer/binary, " ", (status(100))/binary, "\r\n\r\n" >>). + +%% Meant to be used internally for sending errors after crashes. +-spec maybe_reply([{module(), atom(), arity() | [term()], _}], req()) -> ok. +maybe_reply(Stacktrace, Req) -> + receive + {cowboy_req, resp_sent} -> ok + after 0 -> + _ = do_maybe_reply(Stacktrace, Req), + ok + end. + +do_maybe_reply([ + {cow_http_hd, _, _, _}, + {cowboy_req, parse_header, _, _}|_], Req) -> + cowboy_req:reply(400, Req); +do_maybe_reply(_, Req) -> + cowboy_req:reply(500, Req). + +-spec ensure_response(req(), cowboy:http_status()) -> ok. +%% The response has already been fully sent to the client. +ensure_response(#http_req{resp_state=done}, _) -> + ok; +%% No response has been sent but everything apparently went fine. +%% Reply with the status code found in the second argument. +ensure_response(Req=#http_req{resp_state=RespState}, Status) + when RespState =:= waiting; RespState =:= waiting_stream -> + _ = reply(Status, [], [], Req), + ok; +%% Terminate the chunked body for HTTP/1.1 only. +ensure_response(#http_req{method= <<"HEAD">>}, _) -> + ok; +ensure_response(Req=#http_req{resp_state=chunks}, _) -> + _ = last_chunk(Req), + ok; +ensure_response(#http_req{}, _) -> + ok. + +%% Private setter/getter API. + +-spec append_buffer(binary(), Req) -> Req when Req::req(). +append_buffer(Suffix, Req=#http_req{buffer=Buffer}) -> + Req#http_req{buffer= << Buffer/binary, Suffix/binary >>}. + +-spec get(atom(), req()) -> any(); ([atom()], req()) -> any(). +get(List, Req) when is_list(List) -> + [g(Atom, Req) || Atom <- List]; +get(Atom, Req) when is_atom(Atom) -> + g(Atom, Req). + +g(bindings, #http_req{bindings=Ret}) -> Ret; +g(body_state, #http_req{body_state=Ret}) -> Ret; +g(buffer, #http_req{buffer=Ret}) -> Ret; +g(connection, #http_req{connection=Ret}) -> Ret; +g(cookies, #http_req{cookies=Ret}) -> Ret; +g(headers, #http_req{headers=Ret}) -> Ret; +g(host, #http_req{host=Ret}) -> Ret; +g(host_info, #http_req{host_info=Ret}) -> Ret; +g(meta, #http_req{meta=Ret}) -> Ret; +g(method, #http_req{method=Ret}) -> Ret; +g(multipart, #http_req{multipart=Ret}) -> Ret; +g(onresponse, #http_req{onresponse=Ret}) -> Ret; +g(p_headers, #http_req{p_headers=Ret}) -> Ret; +g(path, #http_req{path=Ret}) -> Ret; +g(path_info, #http_req{path_info=Ret}) -> Ret; +g(peer, #http_req{peer=Ret}) -> Ret; +g(pid, #http_req{pid=Ret}) -> Ret; +g(port, #http_req{port=Ret}) -> Ret; +g(qs, #http_req{qs=Ret}) -> Ret; +g(qs_vals, #http_req{qs_vals=Ret}) -> Ret; +g(resp_body, #http_req{resp_body=Ret}) -> Ret; +g(resp_compress, #http_req{resp_compress=Ret}) -> Ret; +g(resp_headers, #http_req{resp_headers=Ret}) -> Ret; +g(resp_state, #http_req{resp_state=Ret}) -> Ret; +g(socket, #http_req{socket=Ret}) -> Ret; +g(transport, #http_req{transport=Ret}) -> Ret; +g(version, #http_req{version=Ret}) -> Ret. + +-spec set([{atom(), any()}], Req) -> Req when Req::req(). +set([], Req) -> Req; +set([{bindings, Val}|Tail], Req) -> set(Tail, Req#http_req{bindings=Val}); +set([{body_state, Val}|Tail], Req) -> set(Tail, Req#http_req{body_state=Val}); +set([{buffer, Val}|Tail], Req) -> set(Tail, Req#http_req{buffer=Val}); +set([{connection, Val}|Tail], Req) -> set(Tail, Req#http_req{connection=Val}); +set([{cookies, Val}|Tail], Req) -> set(Tail, Req#http_req{cookies=Val}); +set([{headers, Val}|Tail], Req) -> set(Tail, Req#http_req{headers=Val}); +set([{host, Val}|Tail], Req) -> set(Tail, Req#http_req{host=Val}); +set([{host_info, Val}|Tail], Req) -> set(Tail, Req#http_req{host_info=Val}); +set([{meta, Val}|Tail], Req) -> set(Tail, Req#http_req{meta=Val}); +set([{method, Val}|Tail], Req) -> set(Tail, Req#http_req{method=Val}); +set([{multipart, Val}|Tail], Req) -> set(Tail, Req#http_req{multipart=Val}); +set([{onresponse, Val}|Tail], Req) -> set(Tail, Req#http_req{onresponse=Val}); +set([{p_headers, Val}|Tail], Req) -> set(Tail, Req#http_req{p_headers=Val}); +set([{path, Val}|Tail], Req) -> set(Tail, Req#http_req{path=Val}); +set([{path_info, Val}|Tail], Req) -> set(Tail, Req#http_req{path_info=Val}); +set([{peer, Val}|Tail], Req) -> set(Tail, Req#http_req{peer=Val}); +set([{pid, Val}|Tail], Req) -> set(Tail, Req#http_req{pid=Val}); +set([{port, Val}|Tail], Req) -> set(Tail, Req#http_req{port=Val}); +set([{qs, Val}|Tail], Req) -> set(Tail, Req#http_req{qs=Val}); +set([{qs_vals, Val}|Tail], Req) -> set(Tail, Req#http_req{qs_vals=Val}); +set([{resp_body, Val}|Tail], Req) -> set(Tail, Req#http_req{resp_body=Val}); +set([{resp_headers, Val}|Tail], Req) -> set(Tail, Req#http_req{resp_headers=Val}); +set([{resp_state, Val}|Tail], Req) -> set(Tail, Req#http_req{resp_state=Val}); +set([{socket, Val}|Tail], Req) -> set(Tail, Req#http_req{socket=Val}); +set([{transport, Val}|Tail], Req) -> set(Tail, Req#http_req{transport=Val}); +set([{version, Val}|Tail], Req) -> set(Tail, Req#http_req{version=Val}). + +-spec set_bindings(cowboy_router:tokens(), cowboy_router:tokens(), + cowboy_router:bindings(), Req) -> Req when Req::req(). +set_bindings(HostInfo, PathInfo, Bindings, Req) -> + Req#http_req{host_info=HostInfo, path_info=PathInfo, + bindings=Bindings}. + +%% Misc API. + +-spec compact(Req) -> Req when Req::req(). +compact(Req) -> + Req#http_req{host_info=undefined, + path_info=undefined, qs_vals=undefined, + bindings=undefined, headers=[], + p_headers=[], cookies=[]}. + +-spec lock(Req) -> Req when Req::req(). +lock(Req) -> + Req#http_req{resp_state=locked}. + +-spec to_list(req()) -> [{atom(), any()}]. +to_list(Req) -> + lists:zip(record_info(fields, http_req), tl(tuple_to_list(Req))). + +%% Internal. + +-spec chunked_response(cowboy:http_status(), cowboy:http_headers(), Req) -> + {normal | hook, Req} when Req::req(). +chunked_response(Status, Headers, Req=#http_req{ + transport=cowboy_spdy, resp_state=waiting, + resp_headers=RespHeaders}) -> + {RespType, Req2} = response(Status, Headers, RespHeaders, [ + {<<"date">>, cowboy_clock:rfc1123()}, + {<<"server">>, <<"Cowboy">>} + ], stream, Req), + {RespType, Req2#http_req{resp_state=chunks, + resp_headers=[], resp_body= <<>>}}; +chunked_response(Status, Headers, Req=#http_req{ + version=Version, connection=Connection, + resp_state=RespState, resp_headers=RespHeaders}) + when RespState =:= waiting; RespState =:= waiting_stream -> + RespConn = response_connection(Headers, Connection), + HTTP11Headers = if + Version =:= 'HTTP/1.0', Connection =:= keepalive -> + [{<<"connection">>, atom_to_connection(Connection)}]; + Version =:= 'HTTP/1.0' -> []; + true -> + MaybeTE = if + RespState =:= waiting_stream -> []; + true -> [{<<"transfer-encoding">>, <<"chunked">>}] + end, + if + Connection =:= close -> + [{<<"connection">>, atom_to_connection(Connection)}|MaybeTE]; + true -> + MaybeTE + end + end, + RespState2 = if + Version =:= 'HTTP/1.1', RespState =:= 'waiting' -> chunks; + true -> stream + end, + {RespType, Req2} = response(Status, Headers, RespHeaders, [ + {<<"date">>, cowboy_clock:rfc1123()}, + {<<"server">>, <<"Cowboy">>} + |HTTP11Headers], <<>>, Req), + {RespType, Req2#http_req{connection=RespConn, resp_state=RespState2, + resp_headers=[], resp_body= <<>>}}. + +-spec response(cowboy:http_status(), cowboy:http_headers(), + cowboy:http_headers(), cowboy:http_headers(), stream | iodata(), Req) + -> {normal | hook, Req} when Req::req(). +response(Status, Headers, RespHeaders, DefaultHeaders, Body, Req=#http_req{ + socket=Socket, transport=Transport, version=Version, + pid=ReqPid, onresponse=OnResponse}) -> + FullHeaders = case OnResponse of + already_called -> Headers; + _ -> response_merge_headers(Headers, RespHeaders, DefaultHeaders) + end, + Body2 = case Body of stream -> <<>>; _ -> Body end, + {Status2, FullHeaders2, Req2} = case OnResponse of + already_called -> {Status, FullHeaders, Req}; + undefined -> {Status, FullHeaders, Req}; + OnResponse -> + case OnResponse(Status, FullHeaders, Body2, + %% Don't call 'onresponse' from the hook itself. + Req#http_req{resp_headers=[], resp_body= <<>>, + onresponse=already_called}) of + StHdReq = {_, _, _} -> + StHdReq; + Req1 -> + {Status, FullHeaders, Req1} + end + end, + ReplyType = case Req2#http_req.resp_state of + waiting when Transport =:= cowboy_spdy, Body =:= stream -> + cowboy_spdy:stream_reply(Socket, status(Status2), FullHeaders2), + ReqPid ! {?MODULE, resp_sent}, + normal; + waiting when Transport =:= cowboy_spdy -> + cowboy_spdy:reply(Socket, status(Status2), FullHeaders2, Body), + ReqPid ! {?MODULE, resp_sent}, + normal; + RespState when RespState =:= waiting; RespState =:= waiting_stream -> + HTTPVer = atom_to_binary(Version, latin1), + StatusLine = << HTTPVer/binary, " ", + (status(Status2))/binary, "\r\n" >>, + HeaderLines = [[Key, <<": ">>, Value, <<"\r\n">>] + || {Key, Value} <- FullHeaders2], + Transport:send(Socket, [StatusLine, HeaderLines, <<"\r\n">>, Body2]), + ReqPid ! {?MODULE, resp_sent}, + normal; + _ -> + hook + end, + {ReplyType, Req2}. + +-spec response_connection(cowboy:http_headers(), keepalive | close) + -> keepalive | close. +response_connection([], Connection) -> + Connection; +response_connection([{Name, Value}|Tail], Connection) -> + case Name of + <<"connection">> -> + Tokens = cow_http_hd:parse_connection(Value), + connection_to_atom(Tokens); + _ -> + response_connection(Tail, Connection) + end. + +-spec response_merge_headers(cowboy:http_headers(), cowboy:http_headers(), + cowboy:http_headers()) -> cowboy:http_headers(). +response_merge_headers(Headers, RespHeaders, DefaultHeaders) -> + Headers2 = [{Key, Value} || {Key, Value} <- Headers], + merge_headers( + merge_headers(Headers2, RespHeaders), + DefaultHeaders). + +-spec merge_headers(cowboy:http_headers(), cowboy:http_headers()) + -> cowboy:http_headers(). + +%% Merge headers by prepending the tuples in the second list to the +%% first list. It also handles Set-Cookie properly, which supports +%% duplicated entries. Notice that, while the RFC2109 does allow more +%% than one cookie to be set per Set-Cookie header, we are following +%% the implementation of common web servers and applications which +%% return many distinct headers per each Set-Cookie entry to avoid +%% issues with clients/browser which may not support it. +merge_headers(Headers, []) -> + Headers; +merge_headers(Headers, [{<<"set-cookie">>, Value}|Tail]) -> + merge_headers([{<<"set-cookie">>, Value}|Headers], Tail); +merge_headers(Headers, [{Name, Value}|Tail]) -> + Headers2 = case lists:keymember(Name, 1, Headers) of + true -> Headers; + false -> [{Name, Value}|Headers] + end, + merge_headers(Headers2, Tail). + +-spec atom_to_connection(keepalive) -> <<_:80>>; + (close) -> <<_:40>>. +atom_to_connection(keepalive) -> + <<"keep-alive">>; +atom_to_connection(close) -> + <<"close">>. + +%% We don't match on "keep-alive" since it is the default value. +-spec connection_to_atom([binary()]) -> keepalive | close. +connection_to_atom([]) -> + keepalive; +connection_to_atom([<<"close">>|_]) -> + close; +connection_to_atom([_|Tail]) -> + connection_to_atom(Tail). + +-spec status(cowboy:http_status()) -> binary(). +status(100) -> <<"100 Continue">>; +status(101) -> <<"101 Switching Protocols">>; +status(102) -> <<"102 Processing">>; +status(200) -> <<"200 OK">>; +status(201) -> <<"201 Created">>; +status(202) -> <<"202 Accepted">>; +status(203) -> <<"203 Non-Authoritative Information">>; +status(204) -> <<"204 No Content">>; +status(205) -> <<"205 Reset Content">>; +status(206) -> <<"206 Partial Content">>; +status(207) -> <<"207 Multi-Status">>; +status(226) -> <<"226 IM Used">>; +status(300) -> <<"300 Multiple Choices">>; +status(301) -> <<"301 Moved Permanently">>; +status(302) -> <<"302 Found">>; +status(303) -> <<"303 See Other">>; +status(304) -> <<"304 Not Modified">>; +status(305) -> <<"305 Use Proxy">>; +status(306) -> <<"306 Switch Proxy">>; +status(307) -> <<"307 Temporary Redirect">>; +status(400) -> <<"400 Bad Request">>; +status(401) -> <<"401 Unauthorized">>; +status(402) -> <<"402 Payment Required">>; +status(403) -> <<"403 Forbidden">>; +status(404) -> <<"404 Not Found">>; +status(405) -> <<"405 Method Not Allowed">>; +status(406) -> <<"406 Not Acceptable">>; +status(407) -> <<"407 Proxy Authentication Required">>; +status(408) -> <<"408 Request Timeout">>; +status(409) -> <<"409 Conflict">>; +status(410) -> <<"410 Gone">>; +status(411) -> <<"411 Length Required">>; +status(412) -> <<"412 Precondition Failed">>; +status(413) -> <<"413 Request Entity Too Large">>; +status(414) -> <<"414 Request-URI Too Long">>; +status(415) -> <<"415 Unsupported Media Type">>; +status(416) -> <<"416 Requested Range Not Satisfiable">>; +status(417) -> <<"417 Expectation Failed">>; +status(418) -> <<"418 I'm a teapot">>; +status(422) -> <<"422 Unprocessable Entity">>; +status(423) -> <<"423 Locked">>; +status(424) -> <<"424 Failed Dependency">>; +status(425) -> <<"425 Unordered Collection">>; +status(426) -> <<"426 Upgrade Required">>; +status(428) -> <<"428 Precondition Required">>; +status(429) -> <<"429 Too Many Requests">>; +status(431) -> <<"431 Request Header Fields Too Large">>; +status(500) -> <<"500 Internal Server Error">>; +status(501) -> <<"501 Not Implemented">>; +status(502) -> <<"502 Bad Gateway">>; +status(503) -> <<"503 Service Unavailable">>; +status(504) -> <<"504 Gateway Timeout">>; +status(505) -> <<"505 HTTP Version Not Supported">>; +status(506) -> <<"506 Variant Also Negotiates">>; +status(507) -> <<"507 Insufficient Storage">>; +status(510) -> <<"510 Not Extended">>; +status(511) -> <<"511 Network Authentication Required">>; +status(B) when is_binary(B) -> B. + +%% Tests. + +-ifdef(TEST). +url_test() -> + {undefined, _} = + url(#http_req{transport=ranch_tcp, host= <<>>, port= undefined, + path= <<>>, qs= <<>>, pid=self()}), + {<<"http://localhost/path">>, _ } = + url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=80, + path= <<"/path">>, qs= <<>>, pid=self()}), + {<<"http://localhost:443/path">>, _} = + url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=443, + path= <<"/path">>, qs= <<>>, pid=self()}), + {<<"http://localhost:8080/path">>, _} = + url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=8080, + path= <<"/path">>, qs= <<>>, pid=self()}), + {<<"http://localhost:8080/path?dummy=2785">>, _} = + url(#http_req{transport=ranch_tcp, host= <<"localhost">>, port=8080, + path= <<"/path">>, qs= <<"dummy=2785">>, pid=self()}), + {<<"https://localhost/path">>, _} = + url(#http_req{transport=ranch_ssl, host= <<"localhost">>, port=443, + path= <<"/path">>, qs= <<>>, pid=self()}), + {<<"https://localhost:8443/path">>, _} = + url(#http_req{transport=ranch_ssl, host= <<"localhost">>, port=8443, + path= <<"/path">>, qs= <<>>, pid=self()}), + {<<"https://localhost:8443/path?dummy=2785">>, _} = + url(#http_req{transport=ranch_ssl, host= <<"localhost">>, port=8443, + path= <<"/path">>, qs= <<"dummy=2785">>, pid=self()}), + ok. + +connection_to_atom_test_() -> + Tests = [ + {[<<"close">>], close}, + {[<<"keep-alive">>], keepalive}, + {[<<"keep-alive">>, <<"upgrade">>], keepalive} + ], + [{lists:flatten(io_lib:format("~p", [T])), + fun() -> R = connection_to_atom(T) end} || {T, R} <- Tests]. + +merge_headers_test_() -> + Tests = [ + {[{<<"content-length">>,<<"13">>},{<<"server">>,<<"Cowboy">>}], + [{<<"set-cookie">>,<<"foo=bar">>},{<<"content-length">>,<<"11">>}], + [{<<"set-cookie">>,<<"foo=bar">>}, + {<<"content-length">>,<<"13">>}, + {<<"server">>,<<"Cowboy">>}]}, + {[{<<"content-length">>,<<"13">>},{<<"server">>,<<"Cowboy">>}], + [{<<"set-cookie">>,<<"foo=bar">>},{<<"set-cookie">>,<<"bar=baz">>}], + [{<<"set-cookie">>,<<"bar=baz">>}, + {<<"set-cookie">>,<<"foo=bar">>}, + {<<"content-length">>,<<"13">>}, + {<<"server">>,<<"Cowboy">>}]} + ], + [fun() -> Res = merge_headers(L,R) end || {L, R, Res} <- Tests]. +-endif. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_rest.erl b/rabbitmq-server/deps/cowboy/src/cowboy_rest.erl similarity index 54% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_rest.erl rename to rabbitmq-server/deps/cowboy/src/cowboy_rest.erl index e6cc6ff..fe72583 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_rest.erl +++ b/rabbitmq-server/deps/cowboy/src/cowboy_rest.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011, Loïc Hoguin +%% Copyright (c) 2011-2014, Loïc Hoguin %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -12,25 +12,31 @@ %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -%% @doc Experimental REST protocol implementation. -%% -%% Based on the Webmachine Diagram from Alan Dean and Justin Sheehy, which -%% can be found in the Webmachine source tree, and on the Webmachine -%% documentation available at http://wiki.basho.com/Webmachine.html -%% at the time of writing. --module(cowboy_http_rest). +%% Originally based on the Webmachine Diagram from Alan Dean and +%% Justin Sheehy. +-module(cowboy_rest). +-behaviour(cowboy_sub_protocol). + -export([upgrade/4]). -record(state, { + env :: cowboy_middleware:env(), + method = undefined :: binary(), + %% Handler. handler :: atom(), handler_state :: any(), + %% Allowed methods. Only used for OPTIONS requests. + allowed_methods :: [binary()], + %% Media type. content_types_p = [] :: - [{{binary(), binary(), [{binary(), binary()}]}, atom()}], + [{binary() | {binary(), binary(), [{binary(), binary()}] | '*'}, + atom()}], content_type_a :: undefined - | {{binary(), binary(), [{binary(), binary()}]}, atom()}, + | {binary() | {binary(), binary(), [{binary(), binary()}] | '*'}, + atom()}, %% Language. languages_p = [] :: [binary()], @@ -40,52 +46,51 @@ charsets_p = [] :: [binary()], charset_a :: undefined | binary(), + %% Whether the resource exists. + exists = false :: boolean(), + %% Cached resource calls. - etag :: undefined | no_call | binary(), + etag :: undefined | no_call | {strong | weak, binary()}, last_modified :: undefined | no_call | calendar:datetime(), - expires :: undefined | no_call | calendar:datetime() + expires :: undefined | no_call | calendar:datetime() | binary() }). --include("include/http.hrl"). - -%% @doc Upgrade a HTTP request to the REST protocol. -%% -%% You do not need to call this function manually. To upgrade to the REST -%% protocol, you simply need to return {upgrade, protocol, {@module}} -%% in your cowboy_http_handler:init/3 handler function. --spec upgrade(pid(), module(), any(), #http_req{}) - -> {ok, #http_req{}} | close. -upgrade(_ListenerPid, Handler, Opts, Req) -> - try - case erlang:function_exported(Handler, rest_init, 2) of - true -> - case Handler:rest_init(Req, Opts) of - {ok, Req2, HandlerState} -> - service_available(Req2, #state{handler=Handler, - handler_state=HandlerState}) - end; - false -> - service_available(Req, #state{handler=Handler}) - end - catch Class:Reason -> - error_logger:error_msg( - "** Handler ~p terminating in rest_init/3~n" - " for the reason ~p:~p~n** Options were ~p~n" - "** Request was ~p~n** Stacktrace: ~p~n~n", - [Handler, Class, Reason, Opts, Req, erlang:get_stacktrace()]), - {ok, _Req2} = cowboy_http_req:reply(500, Req), - close +-spec upgrade(Req, Env, module(), any()) + -> {ok, Req, Env} when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +upgrade(Req, Env, Handler, HandlerOpts) -> + Method = cowboy_req:get(method, Req), + case erlang:function_exported(Handler, rest_init, 2) of + true -> + try Handler:rest_init(Req, HandlerOpts) of + {ok, Req2, HandlerState} -> + service_available(Req2, #state{env=Env, method=Method, + handler=Handler, handler_state=HandlerState}) + catch Class:Reason -> + Stacktrace = erlang:get_stacktrace(), + cowboy_req:maybe_reply(Stacktrace, Req), + erlang:Class([ + {reason, Reason}, + {mfa, {Handler, rest_init, 2}}, + {stacktrace, Stacktrace}, + {req, cowboy_req:to_list(Req)}, + {opts, HandlerOpts} + ]) + end; + false -> + service_available(Req, #state{env=Env, method=Method, + handler=Handler}) end. service_available(Req, State) -> expect(Req, State, service_available, true, fun known_methods/2, 503). -%% known_methods/2 should return a list of atoms or binary methods. -known_methods(Req=#http_req{method=Method}, State) -> +%% known_methods/2 should return a list of binary methods. +known_methods(Req, State=#state{method=Method}) -> case call(Req, State, known_methods) of - no_call when Method =:= 'HEAD'; Method =:= 'GET'; Method =:= 'POST'; - Method =:= 'PUT'; Method =:= 'DELETE'; Method =:= 'TRACE'; - Method =:= 'CONNECT'; Method =:= 'OPTIONS' -> + no_call when Method =:= <<"HEAD">>; Method =:= <<"GET">>; + Method =:= <<"POST">>; Method =:= <<"PUT">>; + Method =:= <<"PATCH">>; Method =:= <<"DELETE">>; + Method =:= <<"OPTIONS">> -> next(Req, State, fun uri_too_long/2); no_call -> next(Req, State, 501); @@ -102,38 +107,41 @@ known_methods(Req=#http_req{method=Method}, State) -> uri_too_long(Req, State) -> expect(Req, State, uri_too_long, false, fun allowed_methods/2, 414). -%% allowed_methods/2 should return a list of atoms or binary methods. -allowed_methods(Req=#http_req{method=Method}, State) -> +%% allowed_methods/2 should return a list of binary methods. +allowed_methods(Req, State=#state{method=Method}) -> case call(Req, State, allowed_methods) of - no_call when Method =:= 'HEAD'; Method =:= 'GET' -> + no_call when Method =:= <<"HEAD">>; Method =:= <<"GET">> -> next(Req, State, fun malformed_request/2); + no_call when Method =:= <<"OPTIONS">> -> + next(Req, State#state{allowed_methods= + [<<"HEAD">>, <<"GET">>, <<"OPTIONS">>]}, + fun malformed_request/2); no_call -> - method_not_allowed(Req, State, ['GET', 'HEAD']); + method_not_allowed(Req, State, + [<<"HEAD">>, <<"GET">>, <<"OPTIONS">>]); {halt, Req2, HandlerState} -> terminate(Req2, State#state{handler_state=HandlerState}); {List, Req2, HandlerState} -> State2 = State#state{handler_state=HandlerState}, case lists:member(Method, List) of - true -> next(Req2, State2, fun malformed_request/2); - false -> method_not_allowed(Req2, State2, List) + true when Method =:= <<"OPTIONS">> -> + next(Req2, State2#state{allowed_methods=List}, + fun malformed_request/2); + true -> + next(Req2, State2, fun malformed_request/2); + false -> + method_not_allowed(Req2, State2, List) end end. +method_not_allowed(Req, State, []) -> + Req2 = cowboy_req:set_resp_header(<<"allow">>, <<>>, Req), + respond(Req2, State, 405); method_not_allowed(Req, State, Methods) -> - {ok, Req2} = cowboy_http_req:set_resp_header( - <<"Allow">>, method_not_allowed_build(Methods, []), Req), + << ", ", Allow/binary >> = << << ", ", M/binary >> || M <- Methods >>, + Req2 = cowboy_req:set_resp_header(<<"allow">>, Allow, Req), respond(Req2, State, 405). -method_not_allowed_build([], []) -> - <<>>; -method_not_allowed_build([], [_Ignore|Acc]) -> - lists:reverse(Acc); -method_not_allowed_build([Method|Tail], Acc) when is_atom(Method) -> - Method2 = list_to_binary(atom_to_list(Method)), - method_not_allowed_build(Tail, [<<", ">>, Method2|Acc]); -method_not_allowed_build([Method|Tail], Acc) -> - method_not_allowed_build(Tail, [<<", ">>, Method|Acc]). - malformed_request(Req, State) -> expect(Req, State, malformed_request, false, fun is_authorized/2, 400). @@ -147,8 +155,8 @@ is_authorized(Req, State) -> {true, Req2, HandlerState} -> forbidden(Req2, State#state{handler_state=HandlerState}); {{false, AuthHead}, Req2, HandlerState} -> - {ok, Req3} = cowboy_http_req:set_resp_header( - <<"Www-Authenticate">>, AuthHead, Req2), + Req3 = cowboy_req:set_resp_header( + <<"www-authenticate">>, AuthHead, Req2), respond(Req3, State#state{handler_state=HandlerState}, 401) end. @@ -161,15 +169,23 @@ valid_content_headers(Req, State) -> known_content_type(Req, State) -> expect(Req, State, known_content_type, true, - fun valid_entity_length/2, 413). + fun valid_entity_length/2, 415). valid_entity_length(Req, State) -> expect(Req, State, valid_entity_length, true, fun options/2, 413). %% If you need to add additional headers to the response at this point, %% you should do it directly in the options/2 call using set_resp_headers. -options(Req=#http_req{method='OPTIONS'}, State) -> +options(Req, State=#state{allowed_methods=Methods, method= <<"OPTIONS">>}) -> case call(Req, State, options) of + no_call when Methods =:= [] -> + Req2 = cowboy_req:set_resp_header(<<"allow">>, <<>>, Req), + respond(Req2, State, 200); + no_call -> + << ", ", Allow/binary >> + = << << ", ", M/binary >> || M <- Methods >>, + Req2 = cowboy_req:set_resp_header(<<"allow">>, Allow, Req), + respond(Req2, State, 200); {halt, Req2, HandlerState} -> terminate(Req2, State#state{handler_state=HandlerState}); {ok, Req2, HandlerState} -> @@ -190,39 +206,53 @@ options(Req, State) -> %% %% Note that it is also possible to return a binary content type that will %% then be parsed by Cowboy. However note that while this may make your -%% resources a little more readable, this is a lot less efficient. An example -%% of such a return value would be: +%% resources a little more readable, this is a lot less efficient. +%% +%% An example of such return value would be: %% {<<"text/html">>, to_html} -content_types_provided(Req=#http_req{meta=Meta}, State) -> +content_types_provided(Req, State) -> case call(Req, State, content_types_provided) of no_call -> - not_acceptable(Req, State); + State2 = State#state{ + content_types_p=[{{<<"text">>, <<"html">>, '*'}, to_html}]}, + case cowboy_req:parse_header(<<"accept">>, Req) of + {error, badarg} -> + respond(Req, State2, 400); + {ok, undefined, Req2} -> + languages_provided( + cowboy_req:set_meta(media_type, {<<"text">>, <<"html">>, []}, Req2), + State2#state{content_type_a={{<<"text">>, <<"html">>, []}, to_html}}); + {ok, Accept, Req2} -> + Accept2 = prioritize_accept(Accept), + choose_media_type(Req2, State2, Accept2) + end; {halt, Req2, HandlerState} -> terminate(Req2, State#state{handler_state=HandlerState}); {[], Req2, HandlerState} -> not_acceptable(Req2, State#state{handler_state=HandlerState}); {CTP, Req2, HandlerState} -> - CTP2 = [normalize_content_types_provided(P) || P <- CTP], + CTP2 = [normalize_content_types(P) || P <- CTP], State2 = State#state{ handler_state=HandlerState, content_types_p=CTP2}, - {Accept, Req3} = cowboy_http_req:parse_header('Accept', Req2), - case Accept of - undefined -> + case cowboy_req:parse_header(<<"accept">>, Req2) of + {error, badarg} -> + respond(Req2, State2, 400); + {ok, undefined, Req3} -> {PMT, _Fun} = HeadCTP = hd(CTP2), languages_provided( - Req3#http_req{meta=[{media_type, PMT}|Meta]}, + cowboy_req:set_meta(media_type, PMT, Req3), State2#state{content_type_a=HeadCTP}); - Accept -> + {ok, Accept, Req3} -> Accept2 = prioritize_accept(Accept), choose_media_type(Req3, State2, Accept2) end end. -normalize_content_types_provided({ContentType, Handler}) +normalize_content_types({ContentType, Callback}) when is_binary(ContentType) -> - {cowboy_http:content_type(ContentType), Handler}; -normalize_content_types_provided(Provided) -> - Provided. + {cowboy_http:content_type(ContentType), Callback}; +normalize_content_types(Normalized) -> + Normalized. prioritize_accept(Accept) -> lists:sort( @@ -273,12 +303,18 @@ match_media_type(Req, State, Accept, match_media_type(Req, State, Accept, [_Any|Tail], MediaType) -> match_media_type(Req, State, Accept, Tail, MediaType). -match_media_type_params(Req=#http_req{meta=Meta}, State, Accept, +match_media_type_params(Req, State, _Accept, + [Provided = {{TP, STP, '*'}, _Fun}|_Tail], + {{_TA, _STA, Params_A}, _QA, _APA}) -> + PMT = {TP, STP, Params_A}, + languages_provided(cowboy_req:set_meta(media_type, PMT, Req), + State#state{content_type_a=Provided}); +match_media_type_params(Req, State, Accept, [Provided = {PMT = {_TP, _STP, Params_P}, _Fun}|Tail], MediaType = {{_TA, _STA, Params_A}, _QA, _APA}) -> case lists:sort(Params_P) =:= lists:sort(Params_A) of true -> - languages_provided(Req#http_req{meta=[{media_type, PMT}|Meta]}, + languages_provided(cowboy_req:set_meta(media_type, PMT, Req), State#state{content_type_a=Provided}); false -> match_media_type(Req, State, Accept, Tail, MediaType) @@ -299,8 +335,8 @@ languages_provided(Req, State) -> not_acceptable(Req2, State#state{handler_state=HandlerState}); {LP, Req2, HandlerState} -> State2 = State#state{handler_state=HandlerState, languages_p=LP}, - {AcceptLanguage, Req3} = - cowboy_http_req:parse_header('Accept-Language', Req2), + {ok, AcceptLanguage, Req3} = + cowboy_req:parse_header(<<"accept-language">>, Req2), case AcceptLanguage of undefined -> set_language(Req3, State2#state{language_a=hd(LP)}); @@ -345,10 +381,9 @@ match_language(Req, State, Accept, [Provided|Tail], match_language(Req, State, Accept, Tail, Language) end. -set_language(Req=#http_req{meta=Meta}, State=#state{language_a=Language}) -> - {ok, Req2} = cowboy_http_req:set_resp_header( - <<"Content-Language">>, Language, Req), - charsets_provided(Req2#http_req{meta=[{language, Language}|Meta]}, State). +set_language(Req, State=#state{language_a=Language}) -> + Req2 = cowboy_req:set_resp_header(<<"content-language">>, Language, Req), + charsets_provided(cowboy_req:set_meta(language, Language, Req2), State). %% charsets_provided should return a list of binary values indicating %% which charsets are accepted by the resource. @@ -362,8 +397,8 @@ charsets_provided(Req, State) -> not_acceptable(Req2, State#state{handler_state=HandlerState}); {CP, Req2, HandlerState} -> State2 = State#state{handler_state=HandlerState, charsets_p=CP}, - {AcceptCharset, Req3} = - cowboy_http_req:parse_header('Accept-Charset', Req2), + {ok, AcceptCharset, Req3} = + cowboy_req:parse_header(<<"accept-charset">>, Req2), case AcceptCharset of undefined -> set_content_type(Req3, State2#state{charset_a=hd(CP)}); @@ -386,7 +421,11 @@ prioritize_charsets(AcceptCharsets) -> end, AcceptCharsets), case lists:keymember(<<"*">>, 1, AcceptCharsets2) of true -> AcceptCharsets2; - false -> [{<<"iso-8859-1">>, 1000}|AcceptCharsets2] + false -> + case lists:keymember(<<"iso-8859-1">>, 1, AcceptCharsets2) of + true -> AcceptCharsets2; + false -> [{<<"iso-8859-1">>, 1000}|AcceptCharsets2] + end end. choose_charset(Req, State, []) -> @@ -396,13 +435,12 @@ choose_charset(Req, State=#state{charsets_p=CP}, [Charset|Tail]) -> match_charset(Req, State, Accept, [], _Charset) -> choose_charset(Req, State, Accept); -match_charset(Req, State, _Accept, [Provided|_Tail], - {Provided, _Quality}) -> +match_charset(Req, State, _Accept, [Provided|_], {Provided, _}) -> set_content_type(Req, State#state{charset_a=Provided}); -match_charset(Req, State, Accept, [_Provided|Tail], Charset) -> +match_charset(Req, State, Accept, [_|Tail], Charset) -> match_charset(Req, State, Accept, Tail, Charset). -set_content_type(Req=#http_req{meta=Meta}, State=#state{ +set_content_type(Req, State=#state{ content_type_a={{Type, SubType, Params}, _Fun}, charset_a=Charset}) -> ParamsBin = set_content_type_build_params(Params, []), @@ -411,10 +449,11 @@ set_content_type(Req=#http_req{meta=Meta}, State=#state{ undefined -> ContentType; Charset -> [ContentType, <<"; charset=">>, Charset] end, - {ok, Req2} = cowboy_http_req:set_resp_header( - <<"Content-Type">>, ContentType2, Req), - encodings_provided(Req2#http_req{meta=[{charset, Charset}|Meta]}, State). + Req2 = cowboy_req:set_resp_header(<<"content-type">>, ContentType2, Req), + encodings_provided(cowboy_req:set_meta(charset, Charset, Req2), State). +set_content_type_build_params('*', []) -> + <<>>; set_content_type_build_params([], []) -> <<>>; set_content_type_build_params([], Acc) -> @@ -443,120 +482,140 @@ variances(Req, State=#state{content_types_p=CTP, Variances = case CTP of [] -> []; [_] -> []; - [_|_] -> [<<"Accept">>] + [_|_] -> [<<"accept">>] end, Variances2 = case LP of [] -> Variances; [_] -> Variances; - [_|_] -> [<<"Accept-Language">>|Variances] + [_|_] -> [<<"accept-language">>|Variances] end, Variances3 = case CP of [] -> Variances2; [_] -> Variances2; - [_|_] -> [<<"Accept-Charset">>|Variances2] + [_|_] -> [<<"accept-charset">>|Variances2] end, - {Variances4, Req3, State2} = case call(Req, State, variances) of + try variances(Req, State, Variances3) of + {Variances4, Req2, State2} -> + case [[<<", ">>, V] || V <- Variances4] of + [] -> + resource_exists(Req2, State2); + [[<<", ">>, H]|Variances5] -> + Req3 = cowboy_req:set_resp_header( + <<"vary">>, [H|Variances5], Req2), + resource_exists(Req3, State2) + end + catch Class:Reason -> + error_terminate(Req, State, Class, Reason, variances) + end. + +variances(Req, State, Variances) -> + case unsafe_call(Req, State, variances) of no_call -> - {Variances3, Req, State}; + {Variances, Req, State}; {HandlerVariances, Req2, HandlerState} -> - {Variances3 ++ HandlerVariances, Req2, + {Variances ++ HandlerVariances, Req2, State#state{handler_state=HandlerState}} - end, - case [[<<", ">>, V] || V <- Variances4] of - [] -> - resource_exists(Req3, State2); - [[<<", ">>, H]|Variances5] -> - {ok, Req4} = cowboy_http_req:set_resp_header( - <<"Variances">>, [H|Variances5], Req3), - resource_exists(Req4, State2) end. resource_exists(Req, State) -> expect(Req, State, resource_exists, true, - fun if_match_exists/2, fun if_match_musnt_exist/2). + fun if_match_exists/2, fun if_match_must_not_exist/2). if_match_exists(Req, State) -> - case cowboy_http_req:parse_header('If-Match', Req) of - {undefined, Req2} -> - if_unmodified_since_exists(Req2, State); - {'*', Req2} -> - if_unmodified_since_exists(Req2, State); - {ETagsList, Req2} -> - if_match(Req2, State, ETagsList) + State2 = State#state{exists=true}, + case cowboy_req:parse_header(<<"if-match">>, Req) of + {ok, undefined, Req2} -> + if_unmodified_since_exists(Req2, State2); + {ok, '*', Req2} -> + if_unmodified_since_exists(Req2, State2); + {ok, ETagsList, Req2} -> + if_match(Req2, State2, ETagsList); + {error, badarg} -> + respond(Req, State2, 400) end. if_match(Req, State, EtagsList) -> - {Etag, Req2, State2} = generate_etag(Req, State), - case Etag of - no_call -> - precondition_failed(Req2, State2); - Etag -> + try generate_etag(Req, State) of + {Etag, Req2, State2} -> case lists:member(Etag, EtagsList) of true -> if_unmodified_since_exists(Req2, State2); + %% Etag may be `undefined' which cannot be a member. false -> precondition_failed(Req2, State2) end + catch Class:Reason -> + error_terminate(Req, State, Class, Reason, generate_etag) end. -if_match_musnt_exist(Req, State) -> - case cowboy_http_req:header('If-Match', Req) of +if_match_must_not_exist(Req, State) -> + case cowboy_req:header(<<"if-match">>, Req) of {undefined, Req2} -> is_put_to_missing_resource(Req2, State); {_Any, Req2} -> precondition_failed(Req2, State) end. if_unmodified_since_exists(Req, State) -> - case cowboy_http_req:parse_header('If-Unmodified-Since', Req) of - {undefined, Req2} -> - if_none_match_exists(Req2, State); - {{error, badarg}, Req2} -> + case cowboy_req:parse_header(<<"if-unmodified-since">>, Req) of + {ok, undefined, Req2} -> if_none_match_exists(Req2, State); - {IfUnmodifiedSince, Req2} -> - if_unmodified_since(Req2, State, IfUnmodifiedSince) + {ok, IfUnmodifiedSince, Req2} -> + if_unmodified_since(Req2, State, IfUnmodifiedSince); + {error, badarg} -> + if_none_match_exists(Req, State) end. %% If LastModified is the atom 'no_call', we continue. if_unmodified_since(Req, State, IfUnmodifiedSince) -> - {LastModified, Req2, State2} = last_modified(Req, State), - case LastModified > IfUnmodifiedSince of - true -> precondition_failed(Req2, State2); - false -> if_none_match_exists(Req2, State2) + try last_modified(Req, State) of + {LastModified, Req2, State2} -> + case LastModified > IfUnmodifiedSince of + true -> precondition_failed(Req2, State2); + false -> if_none_match_exists(Req2, State2) + end + catch Class:Reason -> + error_terminate(Req, State, Class, Reason, last_modified) end. if_none_match_exists(Req, State) -> - case cowboy_http_req:parse_header('If-None-Match', Req) of - {undefined, Req2} -> + case cowboy_req:parse_header(<<"if-none-match">>, Req) of + {ok, undefined, Req2} -> if_modified_since_exists(Req2, State); - {'*', Req2} -> + {ok, '*', Req2} -> precondition_is_head_get(Req2, State); - {EtagsList, Req2} -> - if_none_match(Req2, State, EtagsList) + {ok, EtagsList, Req2} -> + if_none_match(Req2, State, EtagsList); + {error, badarg} -> + respond(Req, State, 400) end. if_none_match(Req, State, EtagsList) -> - {Etag, Req2, State2} = generate_etag(Req, State), - case Etag of - no_call -> - precondition_failed(Req2, State2); - Etag -> - case lists:member(Etag, EtagsList) of - true -> precondition_is_head_get(Req2, State2); - false -> if_modified_since_exists(Req2, State2) + try generate_etag(Req, State) of + {Etag, Req2, State2} -> + case Etag of + undefined -> + precondition_failed(Req2, State2); + Etag -> + case lists:member(Etag, EtagsList) of + true -> precondition_is_head_get(Req2, State2); + false -> if_modified_since_exists(Req2, State2) + end end + catch Class:Reason -> + error_terminate(Req, State, Class, Reason, generate_etag) end. -precondition_is_head_get(Req=#http_req{method=Method}, State) - when Method =:= 'HEAD'; Method =:= 'GET' -> +precondition_is_head_get(Req, State=#state{method=Method}) + when Method =:= <<"HEAD">>; Method =:= <<"GET">> -> not_modified(Req, State); precondition_is_head_get(Req, State) -> precondition_failed(Req, State). if_modified_since_exists(Req, State) -> - case cowboy_http_req:parse_header('If-Modified-Since', Req) of - {undefined, Req2} -> + case cowboy_req:parse_header(<<"if-modified-since">>, Req) of + {ok, undefined, Req2} -> method(Req2, State); - {{error, badarg}, Req2} -> - method(Req2, State); - {IfModifiedSince, Req2} -> - if_modified_since_now(Req2, State, IfModifiedSince) + {ok, IfModifiedSince, Req2} -> + if_modified_since_now(Req2, State, IfModifiedSince); + {error, badarg} -> + method(Req, State) end. if_modified_since_now(Req, State, IfModifiedSince) -> @@ -566,28 +625,36 @@ if_modified_since_now(Req, State, IfModifiedSince) -> end. if_modified_since(Req, State, IfModifiedSince) -> - {LastModified, Req2, State2} = last_modified(Req, State), - case LastModified of - no_call -> + try last_modified(Req, State) of + {no_call, Req2, State2} -> method(Req2, State2); - LastModified -> + {LastModified, Req2, State2} -> case LastModified > IfModifiedSince of true -> method(Req2, State2); false -> not_modified(Req2, State2) end + catch Class:Reason -> + error_terminate(Req, State, Class, Reason, last_modified) + end. + +not_modified(Req, State) -> + Req2 = cowboy_req:delete_resp_header(<<"content-type">>, Req), + try set_resp_etag(Req2, State) of + {Req3, State2} -> + try set_resp_expires(Req3, State2) of + {Req4, State3} -> + respond(Req4, State3, 304) + catch Class:Reason -> + error_terminate(Req, State, Class, Reason, expires) + end + catch Class:Reason -> + error_terminate(Req, State, Class, Reason, generate_etag) end. -not_modified(Req=#http_req{resp_headers=RespHeaders}, State) -> - RespHeaders2 = lists:keydelete(<<"Content-Type">>, 1, RespHeaders), - Req2 = Req#http_req{resp_headers=RespHeaders2}, - {Req3, State2} = set_resp_etag(Req2, State), - {Req4, State3} = set_resp_expires(Req3, State2), - respond(Req4, State3, 304). - precondition_failed(Req, State) -> respond(Req, State, 412). -is_put_to_missing_resource(Req=#http_req{method='PUT'}, State) -> +is_put_to_missing_resource(Req, State=#state{method= <<"PUT">>}) -> moved_permanently(Req, State, fun is_conflict/2); is_put_to_missing_resource(Req, State) -> previously_existed(Req, State). @@ -597,8 +664,8 @@ is_put_to_missing_resource(Req, State) -> moved_permanently(Req, State, OnFalse) -> case call(Req, State, moved_permanently) of {{true, Location}, Req2, HandlerState} -> - {ok, Req3} = cowboy_http_req:set_resp_header( - <<"Location">>, Location, Req2), + Req3 = cowboy_req:set_resp_header( + <<"location">>, Location, Req2), respond(Req3, State#state{handler_state=HandlerState}, 301); {false, Req2, HandlerState} -> OnFalse(Req2, State#state{handler_state=HandlerState}); @@ -618,8 +685,8 @@ previously_existed(Req, State) -> moved_temporarily(Req, State) -> case call(Req, State, moved_temporarily) of {{true, Location}, Req2, HandlerState} -> - {ok, Req3} = cowboy_http_req:set_resp_header( - <<"Location">>, Location, Req2), + Req3 = cowboy_req:set_resp_header( + <<"location">>, Location, Req2), respond(Req3, State#state{handler_state=HandlerState}, 307); {false, Req2, HandlerState} -> is_post_to_missing_resource(Req2, State#state{handler_state=HandlerState}, 410); @@ -629,174 +696,184 @@ moved_temporarily(Req, State) -> is_post_to_missing_resource(Req, State, 410) end. -is_post_to_missing_resource(Req=#http_req{method='POST'}, State, OnFalse) -> +is_post_to_missing_resource(Req, State=#state{method= <<"POST">>}, OnFalse) -> allow_missing_post(Req, State, OnFalse); is_post_to_missing_resource(Req, State, OnFalse) -> respond(Req, State, OnFalse). allow_missing_post(Req, State, OnFalse) -> - expect(Req, State, allow_missing_post, true, fun post_is_create/2, OnFalse). + expect(Req, State, allow_missing_post, true, fun accept_resource/2, OnFalse). -method(Req=#http_req{method='DELETE'}, State) -> +method(Req, State=#state{method= <<"DELETE">>}) -> delete_resource(Req, State); -method(Req=#http_req{method='POST'}, State) -> - post_is_create(Req, State); -method(Req=#http_req{method='PUT'}, State) -> +method(Req, State=#state{method= <<"PUT">>}) -> is_conflict(Req, State); +method(Req, State=#state{method=Method}) + when Method =:= <<"POST">>; Method =:= <<"PATCH">> -> + accept_resource(Req, State); +method(Req, State=#state{method=Method}) + when Method =:= <<"GET">>; Method =:= <<"HEAD">> -> + set_resp_body_etag(Req, State); method(Req, State) -> - set_resp_body(Req, State). + multiple_choices(Req, State). %% delete_resource/2 should start deleting the resource and return. delete_resource(Req, State) -> - expect(Req, State, delete_resource, true, fun delete_completed/2, 500). + expect(Req, State, delete_resource, false, 500, fun delete_completed/2). %% delete_completed/2 indicates whether the resource has been deleted yet. delete_completed(Req, State) -> expect(Req, State, delete_completed, true, fun has_resp_body/2, 202). -%% post_is_create/2 indicates whether the POST method can create new resources. -post_is_create(Req, State) -> - expect(Req, State, post_is_create, false, fun process_post/2, fun create_path/2). - -%% When the POST method can create new resources, create_path/2 will be called -%% and is expected to return the full path to the new resource -%% (including the leading /). -create_path(Req=#http_req{meta=Meta}, State) -> - case call(Req, State, create_path) of - {halt, Req2, HandlerState} -> - terminate(Req2, State#state{handler_state=HandlerState}); - {Path, Req2, HandlerState} -> - Location = create_path_location(Req2, Path), - State2 = State#state{handler_state=HandlerState}, - {ok, Req3} = cowboy_http_req:set_resp_header( - <<"Location">>, Location, Req2), - put_resource(Req3#http_req{meta=[{put_path, Path}|Meta]}, - State2, 303) - end. - -create_path_location(#http_req{transport=Transport, raw_host=Host, - port=Port}, Path) -> - TransportName = Transport:name(), - << (create_path_location_protocol(TransportName))/binary, "://", - Host/binary, (create_path_location_port(TransportName, Port))/binary, - Path/binary >>. - -create_path_location_protocol(ssl) -> <<"https">>; -create_path_location_protocol(_) -> <<"http">>. - -create_path_location_port(ssl, 443) -> - <<>>; -create_path_location_port(tcp, 80) -> - <<>>; -create_path_location_port(_, Port) -> - <<":", (list_to_binary(integer_to_list(Port)))/binary>>. - -%% process_post should return true when the POST body could be processed -%% and false when it hasn't, in which case a 500 error is sent. -process_post(Req, State) -> - case call(Req, State, process_post) of - {halt, Req2, HandlerState} -> - terminate(Req2, State#state{handler_state=HandlerState}); - {true, Req2, HandlerState} -> - State2 = State#state{handler_state=HandlerState}, - next(Req2, State2, 201); - {false, Req2, HandlerState} -> - State2 = State#state{handler_state=HandlerState}, - respond(Req2, State2, 500) - end. - is_conflict(Req, State) -> - expect(Req, State, is_conflict, false, fun put_resource/2, 409). - -put_resource(Req=#http_req{raw_path=RawPath, meta=Meta}, State) -> - Req2 = Req#http_req{meta=[{put_path, RawPath}|Meta]}, - put_resource(Req2, State, fun is_new_resource/2). + expect(Req, State, is_conflict, false, fun accept_resource/2, 409). %% content_types_accepted should return a list of media types and their %% associated callback functions in the same format as content_types_provided. %% %% The callback will then be called and is expected to process the content -%% pushed to the resource in the request body. The path to the new resource -%% may be different from the request path, and is stored as request metadata. -%% It is always defined past this point. It can be retrieved as demonstrated: -%% {PutPath, Req2} = cowboy_http_req:meta(put_path, Req) -put_resource(Req, State, OnTrue) -> +%% pushed to the resource in the request body. +%% +%% content_types_accepted SHOULD return a different list +%% for each HTTP method. +accept_resource(Req, State) -> case call(Req, State, content_types_accepted) of no_call -> respond(Req, State, 415); {halt, Req2, HandlerState} -> terminate(Req2, State#state{handler_state=HandlerState}); {CTA, Req2, HandlerState} -> + CTA2 = [normalize_content_types(P) || P <- CTA], State2 = State#state{handler_state=HandlerState}, - {ContentType, Req3} - = cowboy_http_req:parse_header('Content-Type', Req2), - choose_content_type(Req3, State2, OnTrue, ContentType, CTA) + case cowboy_req:parse_header(<<"content-type">>, Req2) of + {ok, ContentType, Req3} -> + choose_content_type(Req3, State2, ContentType, CTA2); + {error, badarg} -> + respond(Req2, State2, 415) + end end. -choose_content_type(Req, State, _OnTrue, _ContentType, []) -> +%% The special content type '*' will always match. It can be used as a +%% catch-all content type for accepting any kind of request content. +%% Note that because it will always match, it should be the last of the +%% list of content types, otherwise it'll shadow the ones following. +choose_content_type(Req, State, _ContentType, []) -> respond(Req, State, 415); -choose_content_type(Req, State, OnTrue, ContentType, - [{Accepted, Fun}|_Tail]) when ContentType =:= Accepted -> - case call(Req, State, Fun) of - {halt, Req2, HandlerState} -> - terminate(Req2, State#state{handler_state=HandlerState}); - {true, Req2, HandlerState} -> - State2 = State#state{handler_state=HandlerState}, - next(Req2, State2, OnTrue); - {false, Req2, HandlerState} -> - State2 = State#state{handler_state=HandlerState}, - respond(Req2, State2, 500) - end; -choose_content_type(Req, State, OnTrue, ContentType, [_Any|Tail]) -> - choose_content_type(Req, State, OnTrue, ContentType, Tail). - -%% Whether we created a new resource, either through PUT or POST. -%% This is easily testable because we would have set the Location -%% header by this point if we did so. -is_new_resource(Req, State) -> - case cowboy_http_req:has_resp_header(<<"Location">>, Req) of +choose_content_type(Req, State, ContentType, [{Accepted, Fun}|_Tail]) + when Accepted =:= '*'; Accepted =:= ContentType -> + process_content_type(Req, State, Fun); +%% The special parameter '*' will always match any kind of content type +%% parameters. +%% Note that because it will always match, it should be the last of the +%% list for specific content type, otherwise it'll shadow the ones following. +choose_content_type(Req, State, {Type, SubType, Param}, + [{{Type, SubType, AcceptedParam}, Fun}|_Tail]) + when AcceptedParam =:= '*'; AcceptedParam =:= Param -> + process_content_type(Req, State, Fun); +choose_content_type(Req, State, ContentType, [_Any|Tail]) -> + choose_content_type(Req, State, ContentType, Tail). + +process_content_type(Req, State=#state{method=Method, exists=Exists}, Fun) -> + try case call(Req, State, Fun) of + {halt, Req2, HandlerState2} -> + terminate(Req2, State#state{handler_state=HandlerState2}); + {true, Req2, HandlerState2} when Exists -> + State2 = State#state{handler_state=HandlerState2}, + next(Req2, State2, fun has_resp_body/2); + {true, Req2, HandlerState2} -> + State2 = State#state{handler_state=HandlerState2}, + next(Req2, State2, fun maybe_created/2); + {false, Req2, HandlerState2} -> + State2 = State#state{handler_state=HandlerState2}, + respond(Req2, State2, 400); + {{true, ResURL}, Req2, HandlerState2} when Method =:= <<"POST">> -> + State2 = State#state{handler_state=HandlerState2}, + Req3 = cowboy_req:set_resp_header( + <<"location">>, ResURL, Req2), + if + Exists -> respond(Req3, State2, 303); + true -> respond(Req3, State2, 201) + end + end catch Class:Reason = {case_clause, no_call} -> + error_terminate(Req, State, Class, Reason, Fun) + end. + +%% If PUT was used then the resource has been created at the current URL. +%% Otherwise, if a location header has been set then the resource has been +%% created at a new URL. If not, send a 200 or 204 as expected from a +%% POST or PATCH request. +maybe_created(Req, State=#state{method= <<"PUT">>}) -> + respond(Req, State, 201); +maybe_created(Req, State) -> + case cowboy_req:has_resp_header(<<"location">>, Req) of true -> respond(Req, State, 201); false -> has_resp_body(Req, State) end. has_resp_body(Req, State) -> - case cowboy_http_req:has_resp_body(Req) of + case cowboy_req:has_resp_body(Req) of true -> multiple_choices(Req, State); false -> respond(Req, State, 204) end. +%% Set the Etag header if any for the response provided. +set_resp_body_etag(Req, State) -> + try set_resp_etag(Req, State) of + {Req2, State2} -> + set_resp_body_last_modified(Req2, State2) + catch Class:Reason -> + error_terminate(Req, State, Class, Reason, generate_etag) + end. + +%% Set the Last-Modified header if any for the response provided. +set_resp_body_last_modified(Req, State) -> + try last_modified(Req, State) of + {LastModified, Req2, State2} -> + case LastModified of + LastModified when is_atom(LastModified) -> + set_resp_body_expires(Req2, State2); + LastModified -> + LastModifiedBin = cowboy_clock:rfc1123(LastModified), + Req3 = cowboy_req:set_resp_header( + <<"last-modified">>, LastModifiedBin, Req2), + set_resp_body_expires(Req3, State2) + end + catch Class:Reason -> + error_terminate(Req, State, Class, Reason, last_modified) + end. + +%% Set the Expires header if any for the response provided. +set_resp_body_expires(Req, State) -> + try set_resp_expires(Req, State) of + {Req2, State2} -> + set_resp_body(Req2, State2) + catch Class:Reason -> + error_terminate(Req, State, Class, Reason, expires) + end. + %% Set the response headers and call the callback found using %% content_types_provided/2 to obtain the request body and add %% it to the response. -set_resp_body(Req=#http_req{method=Method}, - State=#state{content_type_a={_Type, Fun}}) - when Method =:= 'GET'; Method =:= 'HEAD' -> - {Req2, State2} = set_resp_etag(Req, State), - {LastModified, Req3, State3} = last_modified(Req2, State2), - case LastModified of - LastModified when is_atom(LastModified) -> - Req4 = Req3; - LastModified -> - LastModifiedStr = httpd_util:rfc1123_date(LastModified), - {ok, Req4} = cowboy_http_req:set_resp_header( - <<"Last-Modified">>, LastModifiedStr, Req3) - end, - {Req5, State4} = set_resp_expires(Req4, State3), - case call(Req5, State4, Fun) of - {halt, Req6, HandlerState} -> - terminate(Req6, State4#state{handler_state=HandlerState}); - {Body, Req6, HandlerState} -> - State5 = State4#state{handler_state=HandlerState}, - {ok, Req7} = case Body of - {stream, Len, Fun1} -> - cowboy_http_req:set_resp_body_fun(Len, Fun1, Req6); +set_resp_body(Req, State=#state{content_type_a={_, Callback}}) -> + try case call(Req, State, Callback) of + {halt, Req2, HandlerState2} -> + terminate(Req2, State#state{handler_state=HandlerState2}); + {Body, Req2, HandlerState2} -> + State2 = State#state{handler_state=HandlerState2}, + Req3 = case Body of + {stream, StreamFun} -> + cowboy_req:set_resp_body_fun(StreamFun, Req2); + {stream, Len, StreamFun} -> + cowboy_req:set_resp_body_fun(Len, StreamFun, Req2); + {chunked, StreamFun} -> + cowboy_req:set_resp_body_fun(chunked, StreamFun, Req2); _Contents -> - cowboy_http_req:set_resp_body(Body, Req6) + cowboy_req:set_resp_body(Body, Req2) end, - multiple_choices(Req7, State5) - end; -set_resp_body(Req, State) -> - multiple_choices(Req, State). + multiple_choices(Req3, State2) + end catch Class:Reason = {case_clause, no_call} -> + error_terminate(Req, State, Class, Reason, Callback) + end. multiple_choices(Req, State) -> expect(Req, State, multiple_choices, false, 200, 300). @@ -809,20 +886,28 @@ set_resp_etag(Req, State) -> undefined -> {Req2, State2}; Etag -> - {ok, Req3} = cowboy_http_req:set_resp_header( - <<"Etag">>, Etag, Req2), + Req3 = cowboy_req:set_resp_header( + <<"etag">>, encode_etag(Etag), Req2), {Req3, State2} end. +-spec encode_etag({strong | weak, binary()}) -> iolist(). +encode_etag({strong, Etag}) -> [$",Etag,$"]; +encode_etag({weak, Etag}) -> ["W/\"",Etag,$"]. + set_resp_expires(Req, State) -> {Expires, Req2, State2} = expires(Req, State), case Expires of Expires when is_atom(Expires) -> {Req2, State2}; + Expires when is_binary(Expires) -> + Req3 = cowboy_req:set_resp_header( + <<"expires">>, Expires, Req2), + {Req3, State2}; Expires -> - ExpiresStr = httpd_util:rfc1123_date(Expires), - {ok, Req3} = cowboy_http_req:set_resp_header( - <<"Expires">>, ExpiresStr, Req2), + ExpiresBin = cowboy_clock:rfc1123(Expires), + Req3 = cowboy_req:set_resp_header( + <<"expires">>, ExpiresBin, Req2), {Req3, State2} end. @@ -831,9 +916,12 @@ set_resp_expires(Req, State) -> generate_etag(Req, State=#state{etag=no_call}) -> {undefined, Req, State}; generate_etag(Req, State=#state{etag=undefined}) -> - case call(Req, State, generate_etag) of + case unsafe_call(Req, State, generate_etag) of no_call -> {undefined, Req, State#state{etag=no_call}}; + {Etag, Req2, HandlerState} when is_binary(Etag) -> + [Etag2] = cowboy_http:entity_tag_match(Etag), + {Etag2, Req2, State#state{handler_state=HandlerState, etag=Etag2}}; {Etag, Req2, HandlerState} -> {Etag, Req2, State#state{handler_state=HandlerState, etag=Etag}} end; @@ -843,7 +931,7 @@ generate_etag(Req, State=#state{etag=Etag}) -> last_modified(Req, State=#state{last_modified=no_call}) -> {undefined, Req, State}; last_modified(Req, State=#state{last_modified=undefined}) -> - case call(Req, State, last_modified) of + case unsafe_call(Req, State, last_modified) of no_call -> {undefined, Req, State#state{last_modified=no_call}}; {LastModified, Req2, HandlerState} -> @@ -856,7 +944,7 @@ last_modified(Req, State=#state{last_modified=LastModified}) -> expires(Req, State=#state{expires=no_call}) -> {undefined, Req, State}; expires(Req, State=#state{expires=undefined}) -> - case call(Req, State, expires) of + case unsafe_call(Req, State, expires) of no_call -> {undefined, Req, State#state{expires=no_call}}; {Expires, Req2, HandlerState} -> @@ -880,9 +968,23 @@ expect(Req, State, Callback, Expected, OnTrue, OnFalse) -> next(Req2, State#state{handler_state=HandlerState}, OnFalse) end. -call(Req, #state{handler=Handler, handler_state=HandlerState}, Fun) -> - case erlang:function_exported(Handler, Fun, 2) of - true -> Handler:Fun(Req, HandlerState); +call(Req, State=#state{handler=Handler, handler_state=HandlerState}, + Callback) -> + case erlang:function_exported(Handler, Callback, 2) of + true -> + try + Handler:Callback(Req, HandlerState) + catch Class:Reason -> + error_terminate(Req, State, Class, Reason, Callback) + end; + false -> + no_call + end. + +unsafe_call(Req, #state{handler=Handler, handler_state=HandlerState}, + Callback) -> + case erlang:function_exported(Handler, Callback, 2) of + true -> Handler:Callback(Req, HandlerState); false -> no_call end. @@ -891,15 +993,30 @@ next(Req, State, Next) when is_function(Next) -> next(Req, State, StatusCode) when is_integer(StatusCode) -> respond(Req, State, StatusCode). -%% @todo Allow some sort of callback for custom error pages. respond(Req, State, StatusCode) -> - {ok, Req2} = cowboy_http_req:reply(StatusCode, Req), + {ok, Req2} = cowboy_req:reply(StatusCode, Req), terminate(Req2, State). -terminate(Req, #state{handler=Handler, handler_state=HandlerState}) -> +terminate(Req, State=#state{env=Env}) -> + rest_terminate(Req, State), + {ok, Req, [{result, ok}|Env]}. + +error_terminate(Req, State=#state{handler=Handler, handler_state=HandlerState}, + Class, Reason, Callback) -> + Stacktrace = erlang:get_stacktrace(), + rest_terminate(Req, State), + cowboy_req:maybe_reply(Stacktrace, Req), + erlang:Class([ + {reason, Reason}, + {mfa, {Handler, Callback, 2}}, + {stacktrace, Stacktrace}, + {req, cowboy_req:to_list(Req)}, + {state, HandlerState} + ]). + +rest_terminate(Req, #state{handler=Handler, handler_state=HandlerState}) -> case erlang:function_exported(Handler, rest_terminate, 2) of true -> ok = Handler:rest_terminate( - Req#http_req{resp_state=locked}, HandlerState); + cowboy_req:lock(Req), HandlerState); false -> ok - end, - {ok, Req}. + end. diff --git a/rabbitmq-server/deps/cowboy/src/cowboy_router.erl b/rabbitmq-server/deps/cowboy/src/cowboy_router.erl new file mode 100644 index 0000000..ef91c6d --- /dev/null +++ b/rabbitmq-server/deps/cowboy/src/cowboy_router.erl @@ -0,0 +1,572 @@ +%% Copyright (c) 2011-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% Routing middleware. +%% +%% Resolve the handler to be used for the request based on the +%% routing information found in the dispatch environment value. +%% When found, the handler module and associated data are added to +%% the environment as the handler and handler_opts values +%% respectively. +%% +%% If the route cannot be found, processing stops with either +%% a 400 or a 404 reply. +-module(cowboy_router). +-behaviour(cowboy_middleware). + +-export([compile/1]). +-export([execute/2]). + +-type bindings() :: [{atom(), binary()}]. +-type tokens() :: [binary()]. +-export_type([bindings/0]). +-export_type([tokens/0]). + +-type constraints() :: [{atom(), int} + | {atom(), function, fun ((binary()) -> true | {true, any()} | false)}]. +-export_type([constraints/0]). + +-type route_match() :: '_' | iodata(). +-type route_path() :: {Path::route_match(), Handler::module(), Opts::any()} + | {Path::route_match(), constraints(), Handler::module(), Opts::any()}. +-type route_rule() :: {Host::route_match(), Paths::[route_path()]} + | {Host::route_match(), constraints(), Paths::[route_path()]}. +-type routes() :: [route_rule()]. +-export_type([routes/0]). + +-type dispatch_match() :: '_' | <<_:8>> | [binary() | '_' | '...' | atom()]. +-type dispatch_path() :: {dispatch_match(), module(), any()}. +-type dispatch_rule() :: {Host::dispatch_match(), Paths::[dispatch_path()]}. +-opaque dispatch_rules() :: [dispatch_rule()]. +-export_type([dispatch_rules/0]). + +-spec compile(routes()) -> dispatch_rules(). +compile(Routes) -> + compile(Routes, []). + +compile([], Acc) -> + lists:reverse(Acc); +compile([{Host, Paths}|Tail], Acc) -> + compile([{Host, [], Paths}|Tail], Acc); +compile([{HostMatch, Constraints, Paths}|Tail], Acc) -> + HostRules = case HostMatch of + '_' -> '_'; + _ -> compile_host(HostMatch) + end, + PathRules = compile_paths(Paths, []), + Hosts = case HostRules of + '_' -> [{'_', Constraints, PathRules}]; + _ -> [{R, Constraints, PathRules} || R <- HostRules] + end, + compile(Tail, Hosts ++ Acc). + +compile_host(HostMatch) when is_list(HostMatch) -> + compile_host(list_to_binary(HostMatch)); +compile_host(HostMatch) when is_binary(HostMatch) -> + compile_rules(HostMatch, $., [], [], <<>>). + +compile_paths([], Acc) -> + lists:reverse(Acc); +compile_paths([{PathMatch, Handler, Opts}|Tail], Acc) -> + compile_paths([{PathMatch, [], Handler, Opts}|Tail], Acc); +compile_paths([{PathMatch, Constraints, Handler, Opts}|Tail], Acc) + when is_list(PathMatch) -> + compile_paths([{iolist_to_binary(PathMatch), + Constraints, Handler, Opts}|Tail], Acc); +compile_paths([{'_', Constraints, Handler, Opts}|Tail], Acc) -> + compile_paths(Tail, [{'_', Constraints, Handler, Opts}] ++ Acc); +compile_paths([{<< $/, PathMatch/binary >>, Constraints, Handler, Opts}|Tail], + Acc) -> + PathRules = compile_rules(PathMatch, $/, [], [], <<>>), + Paths = [{lists:reverse(R), Constraints, Handler, Opts} || R <- PathRules], + compile_paths(Tail, Paths ++ Acc); +compile_paths([{PathMatch, _, _, _}|_], _) -> + error({badarg, "The following route MUST begin with a slash: " + ++ binary_to_list(PathMatch)}). + +compile_rules(<<>>, _, Segments, Rules, <<>>) -> + [Segments|Rules]; +compile_rules(<<>>, _, Segments, Rules, Acc) -> + [[Acc|Segments]|Rules]; +compile_rules(<< S, Rest/binary >>, S, Segments, Rules, <<>>) -> + compile_rules(Rest, S, Segments, Rules, <<>>); +compile_rules(<< S, Rest/binary >>, S, Segments, Rules, Acc) -> + compile_rules(Rest, S, [Acc|Segments], Rules, <<>>); +compile_rules(<< $:, Rest/binary >>, S, Segments, Rules, <<>>) -> + {NameBin, Rest2} = compile_binding(Rest, S, <<>>), + Name = binary_to_atom(NameBin, utf8), + compile_rules(Rest2, S, Segments, Rules, Name); +compile_rules(<< $:, _/binary >>, _, _, _, _) -> + erlang:error(badarg); +compile_rules(<< $[, $., $., $., $], Rest/binary >>, S, Segments, Rules, Acc) + when Acc =:= <<>> -> + compile_rules(Rest, S, ['...'|Segments], Rules, Acc); +compile_rules(<< $[, $., $., $., $], Rest/binary >>, S, Segments, Rules, Acc) -> + compile_rules(Rest, S, ['...', Acc|Segments], Rules, Acc); +compile_rules(<< $[, S, Rest/binary >>, S, Segments, Rules, Acc) -> + compile_brackets(Rest, S, [Acc|Segments], Rules); +compile_rules(<< $[, Rest/binary >>, S, Segments, Rules, <<>>) -> + compile_brackets(Rest, S, Segments, Rules); +%% Open bracket in the middle of a segment. +compile_rules(<< $[, _/binary >>, _, _, _, _) -> + erlang:error(badarg); +%% Missing an open bracket. +compile_rules(<< $], _/binary >>, _, _, _, _) -> + erlang:error(badarg); +compile_rules(<< C, Rest/binary >>, S, Segments, Rules, Acc) -> + compile_rules(Rest, S, Segments, Rules, << Acc/binary, C >>). + +%% Everything past $: until the segment separator ($. for hosts, +%% $/ for paths) or $[ or $] or end of binary is the binding name. +compile_binding(<<>>, _, <<>>) -> + erlang:error(badarg); +compile_binding(Rest = <<>>, _, Acc) -> + {Acc, Rest}; +compile_binding(Rest = << C, _/binary >>, S, Acc) + when C =:= S; C =:= $[; C =:= $] -> + {Acc, Rest}; +compile_binding(<< C, Rest/binary >>, S, Acc) -> + compile_binding(Rest, S, << Acc/binary, C >>). + +compile_brackets(Rest, S, Segments, Rules) -> + {Bracket, Rest2} = compile_brackets_split(Rest, <<>>, 0), + Rules1 = compile_rules(Rest2, S, Segments, [], <<>>), + Rules2 = compile_rules(<< Bracket/binary, Rest2/binary >>, + S, Segments, [], <<>>), + Rules ++ Rules2 ++ Rules1. + +%% Missing a close bracket. +compile_brackets_split(<<>>, _, _) -> + erlang:error(badarg); +%% Make sure we don't confuse the closing bracket we're looking for. +compile_brackets_split(<< C, Rest/binary >>, Acc, N) when C =:= $[ -> + compile_brackets_split(Rest, << Acc/binary, C >>, N + 1); +compile_brackets_split(<< C, Rest/binary >>, Acc, N) when C =:= $], N > 0 -> + compile_brackets_split(Rest, << Acc/binary, C >>, N - 1); +%% That's the right one. +compile_brackets_split(<< $], Rest/binary >>, Acc, 0) -> + {Acc, Rest}; +compile_brackets_split(<< C, Rest/binary >>, Acc, N) -> + compile_brackets_split(Rest, << Acc/binary, C >>, N). + +-spec execute(Req, Env) + -> {ok, Req, Env} | {error, 400 | 404, Req} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +execute(Req, Env) -> + {_, Dispatch} = lists:keyfind(dispatch, 1, Env), + [Host, Path] = cowboy_req:get([host, path], Req), + case match(Dispatch, Host, Path) of + {ok, Handler, HandlerOpts, Bindings, HostInfo, PathInfo} -> + Req2 = cowboy_req:set_bindings(HostInfo, PathInfo, Bindings, Req), + {ok, Req2, [{handler, Handler}, {handler_opts, HandlerOpts}|Env]}; + {error, notfound, host} -> + {error, 400, Req}; + {error, badrequest, path} -> + {error, 400, Req}; + {error, notfound, path} -> + {error, 404, Req} + end. + +%% Internal. + +%% Match hostname tokens and path tokens against dispatch rules. +%% +%% It is typically used for matching tokens for the hostname and path of +%% the request against a global dispatch rule for your listener. +%% +%% Dispatch rules are a list of {Hostname, PathRules} tuples, with +%% PathRules being a list of {Path, HandlerMod, HandlerOpts}. +%% +%% Hostname and Path are match rules and can be either the +%% atom '_', which matches everything, `<<"*">>', which match the +%% wildcard path, or a list of tokens. +%% +%% Each token can be either a binary, the atom '_', +%% the atom '...' or a named atom. A binary token must match exactly, +%% '_' matches everything for a single token, '...' matches +%% everything for the rest of the tokens and a named atom will bind the +%% corresponding token value and return it. +%% +%% The list of hostname tokens is reversed before matching. For example, if +%% we were to match "www.ninenines.eu", we would first match "eu", then +%% "ninenines", then "www". This means that in the context of hostnames, +%% the '...' atom matches properly the lower levels of the domain +%% as would be expected. +%% +%% When a result is found, this function will return the handler module and +%% options found in the dispatch list, a key-value list of bindings and +%% the tokens that were matched by the '...' atom for both the +%% hostname and path. +-spec match(dispatch_rules(), Host::binary() | tokens(), Path::binary()) + -> {ok, module(), any(), bindings(), + HostInfo::undefined | tokens(), + PathInfo::undefined | tokens()} + | {error, notfound, host} | {error, notfound, path} + | {error, badrequest, path}. +match([], _, _) -> + {error, notfound, host}; +%% If the host is '_' then there can be no constraints. +match([{'_', [], PathMatchs}|_Tail], _, Path) -> + match_path(PathMatchs, undefined, Path, []); +match([{HostMatch, Constraints, PathMatchs}|Tail], Tokens, Path) + when is_list(Tokens) -> + case list_match(Tokens, HostMatch, []) of + false -> + match(Tail, Tokens, Path); + {true, Bindings, HostInfo} -> + HostInfo2 = case HostInfo of + undefined -> undefined; + _ -> lists:reverse(HostInfo) + end, + case check_constraints(Constraints, Bindings) of + {ok, Bindings2} -> + match_path(PathMatchs, HostInfo2, Path, Bindings2); + nomatch -> + match(Tail, Tokens, Path) + end + end; +match(Dispatch, Host, Path) -> + match(Dispatch, split_host(Host), Path). + +-spec match_path([dispatch_path()], + HostInfo::undefined | tokens(), binary() | tokens(), bindings()) + -> {ok, module(), any(), bindings(), + HostInfo::undefined | tokens(), + PathInfo::undefined | tokens()} + | {error, notfound, path} | {error, badrequest, path}. +match_path([], _, _, _) -> + {error, notfound, path}; +%% If the path is '_' then there can be no constraints. +match_path([{'_', [], Handler, Opts}|_Tail], HostInfo, _, Bindings) -> + {ok, Handler, Opts, Bindings, HostInfo, undefined}; +match_path([{<<"*">>, _Constraints, Handler, Opts}|_Tail], HostInfo, <<"*">>, Bindings) -> + {ok, Handler, Opts, Bindings, HostInfo, undefined}; +match_path([{PathMatch, Constraints, Handler, Opts}|Tail], HostInfo, Tokens, + Bindings) when is_list(Tokens) -> + case list_match(Tokens, PathMatch, Bindings) of + false -> + match_path(Tail, HostInfo, Tokens, Bindings); + {true, PathBinds, PathInfo} -> + case check_constraints(Constraints, PathBinds) of + {ok, PathBinds2} -> + {ok, Handler, Opts, PathBinds2, HostInfo, PathInfo}; + nomatch -> + match_path(Tail, HostInfo, Tokens, Bindings) + end + end; +match_path(_Dispatch, _HostInfo, badrequest, _Bindings) -> + {error, badrequest, path}; +match_path(Dispatch, HostInfo, Path, Bindings) -> + match_path(Dispatch, HostInfo, split_path(Path), Bindings). + +check_constraints([], Bindings) -> + {ok, Bindings}; +check_constraints([Constraint|Tail], Bindings) -> + Name = element(1, Constraint), + case lists:keyfind(Name, 1, Bindings) of + false -> + check_constraints(Tail, Bindings); + {_, Value} -> + case check_constraint(Constraint, Value) of + true -> + check_constraints(Tail, Bindings); + {true, Value2} -> + Bindings2 = lists:keyreplace(Name, 1, Bindings, + {Name, Value2}), + check_constraints(Tail, Bindings2); + false -> + nomatch + end + end. + +check_constraint({_, int}, Value) -> + try {true, list_to_integer(binary_to_list(Value))} + catch _:_ -> false + end; +check_constraint({_, function, Fun}, Value) -> + Fun(Value). + +-spec split_host(binary()) -> tokens(). +split_host(Host) -> + split_host(Host, []). + +split_host(Host, Acc) -> + case binary:match(Host, <<".">>) of + nomatch when Host =:= <<>> -> + Acc; + nomatch -> + [Host|Acc]; + {Pos, _} -> + << Segment:Pos/binary, _:8, Rest/bits >> = Host, + false = byte_size(Segment) == 0, + split_host(Rest, [Segment|Acc]) + end. + +%% Following RFC2396, this function may return path segments containing any +%% character, including / if, and only if, a / was escaped +%% and part of a path segment. +-spec split_path(binary()) -> tokens(). +split_path(<< $/, Path/bits >>) -> + split_path(Path, []); +split_path(_) -> + badrequest. + +split_path(Path, Acc) -> + try + case binary:match(Path, <<"/">>) of + nomatch when Path =:= <<>> -> + lists:reverse([cow_qs:urldecode(S) || S <- Acc]); + nomatch -> + lists:reverse([cow_qs:urldecode(S) || S <- [Path|Acc]]); + {Pos, _} -> + << Segment:Pos/binary, _:8, Rest/bits >> = Path, + split_path(Rest, [Segment|Acc]) + end + catch + error:badarg -> + badrequest + end. + +-spec list_match(tokens(), dispatch_match(), bindings()) + -> {true, bindings(), undefined | tokens()} | false. +%% Atom '...' matches any trailing path, stop right now. +list_match(List, ['...'], Binds) -> + {true, Binds, List}; +%% Atom '_' matches anything, continue. +list_match([_E|Tail], ['_'|TailMatch], Binds) -> + list_match(Tail, TailMatch, Binds); +%% Both values match, continue. +list_match([E|Tail], [E|TailMatch], Binds) -> + list_match(Tail, TailMatch, Binds); +%% Bind E to the variable name V and continue, +%% unless V was already defined and E isn't identical to the previous value. +list_match([E|Tail], [V|TailMatch], Binds) when is_atom(V) -> + case lists:keyfind(V, 1, Binds) of + {_, E} -> + list_match(Tail, TailMatch, Binds); + {_, _} -> + false; + false -> + list_match(Tail, TailMatch, [{V, E}|Binds]) + end; +%% Match complete. +list_match([], [], Binds) -> + {true, Binds, undefined}; +%% Values don't match, stop. +list_match(_List, _Match, _Binds) -> + false. + +%% Tests. + +-ifdef(TEST). +compile_test_() -> + Tests = [ + %% Match any host and path. + {[{'_', [{'_', h, o}]}], + [{'_', [], [{'_', [], h, o}]}]}, + {[{"cowboy.example.org", + [{"/", ha, oa}, {"/path/to/resource", hb, ob}]}], + [{[<<"org">>, <<"example">>, <<"cowboy">>], [], [ + {[], [], ha, oa}, + {[<<"path">>, <<"to">>, <<"resource">>], [], hb, ob}]}]}, + {[{'_', [{"/path/to/resource/", h, o}]}], + [{'_', [], [{[<<"path">>, <<"to">>, <<"resource">>], [], h, o}]}]}, + % Cyrillic from a latin1 encoded file. + {[{'_', [{[47,208,191,209,131,209,130,209,140,47,208,186,47,209,128, + 208,181,209,129,209,131,209,128,209,129,209,131,47], h, o}]}], + [{'_', [], [{[<<208,191,209,131,209,130,209,140>>, <<208,186>>, + <<209,128,208,181,209,129,209,131,209,128,209,129,209,131>>], + [], h, o}]}]}, + {[{"cowboy.example.org.", [{'_', h, o}]}], + [{[<<"org">>, <<"example">>, <<"cowboy">>], [], [{'_', [], h, o}]}]}, + {[{".cowboy.example.org", [{'_', h, o}]}], + [{[<<"org">>, <<"example">>, <<"cowboy">>], [], [{'_', [], h, o}]}]}, + % Cyrillic from a latin1 encoded file. + {[{[208,189,208,181,208,186,208,184,208,185,46,209,129,208,176, + 208,185,209,130,46,209,128,209,132,46], [{'_', h, o}]}], + [{[<<209,128,209,132>>, <<209,129,208,176,208,185,209,130>>, + <<208,189,208,181,208,186,208,184,208,185>>], + [], [{'_', [], h, o}]}]}, + {[{":subdomain.example.org", [{"/hats/:name/prices", h, o}]}], + [{[<<"org">>, <<"example">>, subdomain], [], [ + {[<<"hats">>, name, <<"prices">>], [], h, o}]}]}, + {[{"ninenines.:_", [{"/hats/:_", h, o}]}], + [{['_', <<"ninenines">>], [], [{[<<"hats">>, '_'], [], h, o}]}]}, + {[{"[www.]ninenines.eu", + [{"/horses", h, o}, {"/hats/[page/:number]", h, o}]}], [ + {[<<"eu">>, <<"ninenines">>], [], [ + {[<<"horses">>], [], h, o}, + {[<<"hats">>], [], h, o}, + {[<<"hats">>, <<"page">>, number], [], h, o}]}, + {[<<"eu">>, <<"ninenines">>, <<"www">>], [], [ + {[<<"horses">>], [], h, o}, + {[<<"hats">>], [], h, o}, + {[<<"hats">>, <<"page">>, number], [], h, o}]}]}, + {[{'_', [{"/hats/[page/[:number]]", h, o}]}], [{'_', [], [ + {[<<"hats">>], [], h, o}, + {[<<"hats">>, <<"page">>], [], h, o}, + {[<<"hats">>, <<"page">>, number], [], h, o}]}]}, + {[{"[...]ninenines.eu", [{"/hats/[...]", h, o}]}], + [{[<<"eu">>, <<"ninenines">>, '...'], [], [ + {[<<"hats">>, '...'], [], h, o}]}]} + ], + [{lists:flatten(io_lib:format("~p", [Rt])), + fun() -> Rs = compile(Rt) end} || {Rt, Rs} <- Tests]. + +split_host_test_() -> + Tests = [ + {<<"">>, []}, + {<<"*">>, [<<"*">>]}, + {<<"cowboy.ninenines.eu">>, + [<<"eu">>, <<"ninenines">>, <<"cowboy">>]}, + {<<"ninenines.eu">>, + [<<"eu">>, <<"ninenines">>]}, + {<<"a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z">>, + [<<"z">>, <<"y">>, <<"x">>, <<"w">>, <<"v">>, <<"u">>, <<"t">>, + <<"s">>, <<"r">>, <<"q">>, <<"p">>, <<"o">>, <<"n">>, <<"m">>, + <<"l">>, <<"k">>, <<"j">>, <<"i">>, <<"h">>, <<"g">>, <<"f">>, + <<"e">>, <<"d">>, <<"c">>, <<"b">>, <<"a">>]} + ], + [{H, fun() -> R = split_host(H) end} || {H, R} <- Tests]. + +split_path_test_() -> + Tests = [ + {<<"/">>, []}, + {<<"/extend//cowboy">>, [<<"extend">>, <<>>, <<"cowboy">>]}, + {<<"/users">>, [<<"users">>]}, + {<<"/users/42/friends">>, [<<"users">>, <<"42">>, <<"friends">>]}, + {<<"/users/a+b/c%21d">>, [<<"users">>, <<"a b">>, <<"c!d">>]} + ], + [{P, fun() -> R = split_path(P) end} || {P, R} <- Tests]. + +match_test_() -> + Dispatch = [ + {[<<"eu">>, <<"ninenines">>, '_', <<"www">>], [], [ + {[<<"users">>, '_', <<"mails">>], [], match_any_subdomain_users, []} + ]}, + {[<<"eu">>, <<"ninenines">>], [], [ + {[<<"users">>, id, <<"friends">>], [], match_extend_users_friends, []}, + {'_', [], match_extend, []} + ]}, + {[var, <<"ninenines">>], [], [ + {[<<"threads">>, var], [], match_duplicate_vars, + [we, {expect, two}, var, here]} + ]}, + {[ext, <<"erlang">>], [], [ + {'_', [], match_erlang_ext, []} + ]}, + {'_', [], [ + {[<<"users">>, id, <<"friends">>], [], match_users_friends, []}, + {'_', [], match_any, []} + ]} + ], + Tests = [ + {<<"any">>, <<"/">>, {ok, match_any, [], []}}, + {<<"www.any.ninenines.eu">>, <<"/users/42/mails">>, + {ok, match_any_subdomain_users, [], []}}, + {<<"www.ninenines.eu">>, <<"/users/42/mails">>, + {ok, match_any, [], []}}, + {<<"www.ninenines.eu">>, <<"/">>, + {ok, match_any, [], []}}, + {<<"www.any.ninenines.eu">>, <<"/not_users/42/mails">>, + {error, notfound, path}}, + {<<"ninenines.eu">>, <<"/">>, + {ok, match_extend, [], []}}, + {<<"ninenines.eu">>, <<"/users/42/friends">>, + {ok, match_extend_users_friends, [], [{id, <<"42">>}]}}, + {<<"erlang.fr">>, '_', + {ok, match_erlang_ext, [], [{ext, <<"fr">>}]}}, + {<<"any">>, <<"/users/444/friends">>, + {ok, match_users_friends, [], [{id, <<"444">>}]}} + ], + [{lists:flatten(io_lib:format("~p, ~p", [H, P])), fun() -> + {ok, Handler, Opts, Binds, undefined, undefined} + = match(Dispatch, H, P) + end} || {H, P, {ok, Handler, Opts, Binds}} <- Tests]. + +match_info_test_() -> + Dispatch = [ + {[<<"eu">>, <<"ninenines">>, <<"www">>], [], [ + {[<<"pathinfo">>, <<"is">>, <<"next">>, '...'], [], match_path, []} + ]}, + {[<<"eu">>, <<"ninenines">>, '...'], [], [ + {'_', [], match_any, []} + ]}, + % Cyrillic from a latin1 encoded file. + {[<<209,128,209,132>>, <<209,129,208,176,208,185,209,130>>], [], [ + {[<<208,191,209,131,209,130,209,140>>, '...'], [], match_path, []} + ]} + ], + Tests = [ + {<<"ninenines.eu">>, <<"/">>, + {ok, match_any, [], [], [], undefined}}, + {<<"bugs.ninenines.eu">>, <<"/">>, + {ok, match_any, [], [], [<<"bugs">>], undefined}}, + {<<"cowboy.bugs.ninenines.eu">>, <<"/">>, + {ok, match_any, [], [], [<<"cowboy">>, <<"bugs">>], undefined}}, + {<<"www.ninenines.eu">>, <<"/pathinfo/is/next">>, + {ok, match_path, [], [], undefined, []}}, + {<<"www.ninenines.eu">>, <<"/pathinfo/is/next/path_info">>, + {ok, match_path, [], [], undefined, [<<"path_info">>]}}, + {<<"www.ninenines.eu">>, <<"/pathinfo/is/next/foo/bar">>, + {ok, match_path, [], [], undefined, [<<"foo">>, <<"bar">>]}}, + % Cyrillic from a latin1 encoded file. + {<<209,129,208,176,208,185,209,130,46,209,128,209,132>>, + <<47,208,191,209,131,209,130,209,140,47,208,180,208,190,208,188,208,190,208,185>>, + {ok, match_path, [], [], undefined, [<<208,180,208,190,208,188,208,190,208,185>>]}} + ], + [{lists:flatten(io_lib:format("~p, ~p", [H, P])), fun() -> + R = match(Dispatch, H, P) + end} || {H, P, R} <- Tests]. + +match_constraints_test() -> + Dispatch = [{'_', [], + [{[<<"path">>, value], [{value, int}], match, []}]}], + {ok, _, [], [{value, 123}], _, _} = match(Dispatch, + <<"ninenines.eu">>, <<"/path/123">>), + {ok, _, [], [{value, 123}], _, _} = match(Dispatch, + <<"ninenines.eu">>, <<"/path/123/">>), + {error, notfound, path} = match(Dispatch, + <<"ninenines.eu">>, <<"/path/NaN/">>), + Dispatch2 = [{'_', [], + [{[<<"path">>, username], [{username, function, + fun(Value) -> Value =:= cowboy_bstr:to_lower(Value) end}], + match, []}]}], + {ok, _, [], [{username, <<"essen">>}], _, _} = match(Dispatch2, + <<"ninenines.eu">>, <<"/path/essen">>), + {error, notfound, path} = match(Dispatch2, + <<"ninenines.eu">>, <<"/path/ESSEN">>), + ok. + +match_same_bindings_test() -> + Dispatch = [{[same, same], [], [{'_', [], match, []}]}], + {ok, _, [], [{same, <<"eu">>}], _, _} = match(Dispatch, + <<"eu.eu">>, <<"/">>), + {error, notfound, host} = match(Dispatch, + <<"ninenines.eu">>, <<"/">>), + Dispatch2 = [{[<<"eu">>, <<"ninenines">>, user], [], + [{[<<"path">>, user], [], match, []}]}], + {ok, _, [], [{user, <<"essen">>}], _, _} = match(Dispatch2, + <<"essen.ninenines.eu">>, <<"/path/essen">>), + {ok, _, [], [{user, <<"essen">>}], _, _} = match(Dispatch2, + <<"essen.ninenines.eu">>, <<"/path/essen/">>), + {error, notfound, path} = match(Dispatch2, + <<"essen.ninenines.eu">>, <<"/path/notessen">>), + Dispatch3 = [{'_', [], [{[same, same], [], match, []}]}], + {ok, _, [], [{same, <<"path">>}], _, _} = match(Dispatch3, + <<"ninenines.eu">>, <<"/path/path">>), + {error, notfound, path} = match(Dispatch3, + <<"ninenines.eu">>, <<"/path/to">>), + ok. +-endif. diff --git a/rabbitmq-server/deps/cowboy/src/cowboy_spdy.erl b/rabbitmq-server/deps/cowboy/src/cowboy_spdy.erl new file mode 100644 index 0000000..8da9613 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/src/cowboy_spdy.erl @@ -0,0 +1,505 @@ +%% Copyright (c) 2013-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_spdy). + +%% API. +-export([start_link/4]). + +%% Internal. +-export([init/5]). +-export([system_continue/3]). +-export([system_terminate/4]). +-export([system_code_change/4]). + +%% Internal request process. +-export([request_init/11]). +-export([resume/5]). +-export([reply/4]). +-export([stream_reply/3]). +-export([stream_data/2]). +-export([stream_close/1]). + +%% Internal transport functions. +-export([name/0]). +-export([messages/0]). +-export([recv/3]). +-export([send/2]). +-export([sendfile/2]). +-export([setopts/2]). + +-type streamid() :: non_neg_integer(). +-type socket() :: {pid(), streamid()}. + +-record(child, { + streamid :: streamid(), + pid :: pid(), + input = nofin :: fin | nofin, + in_buffer = <<>> :: binary(), + is_recv = false :: false | {active, socket(), pid()} + | {passive, socket(), pid(), non_neg_integer(), reference()}, + output = nofin :: fin | nofin +}). + +-record(state, { + parent = undefined :: pid(), + socket, + transport, + buffer = <<>> :: binary(), + middlewares, + env, + onrequest, + onresponse, + peer, + zdef, + zinf, + last_streamid = 0 :: streamid(), + children = [] :: [#child{}] +}). + +-type opts() :: [{env, cowboy_middleware:env()} + | {middlewares, [module()]} + | {onrequest, cowboy:onrequest_fun()} + | {onresponse, cowboy:onresponse_fun()}]. +-export_type([opts/0]). + +%% API. + +-spec start_link(any(), inet:socket(), module(), any()) -> {ok, pid()}. +start_link(Ref, Socket, Transport, Opts) -> + proc_lib:start_link(?MODULE, init, + [self(), Ref, Socket, Transport, Opts]). + +%% Internal. + +%% Faster alternative to proplists:get_value/3. +get_value(Key, Opts, Default) -> + case lists:keyfind(Key, 1, Opts) of + {_, Value} -> Value; + _ -> Default + end. + +-spec init(pid(), ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Parent, Ref, Socket, Transport, Opts) -> + process_flag(trap_exit, true), + ok = proc_lib:init_ack(Parent, {ok, self()}), + {ok, Peer} = Transport:peername(Socket), + Middlewares = get_value(middlewares, Opts, [cowboy_router, cowboy_handler]), + Env = [{listener, Ref}|get_value(env, Opts, [])], + OnRequest = get_value(onrequest, Opts, undefined), + OnResponse = get_value(onresponse, Opts, undefined), + Zdef = cow_spdy:deflate_init(), + Zinf = cow_spdy:inflate_init(), + ok = ranch:accept_ack(Ref), + loop(#state{parent=Parent, socket=Socket, transport=Transport, + middlewares=Middlewares, env=Env, onrequest=OnRequest, + onresponse=OnResponse, peer=Peer, zdef=Zdef, zinf=Zinf}). + +loop(State=#state{parent=Parent, socket=Socket, transport=Transport, + buffer=Buffer, children=Children}) -> + {OK, Closed, Error} = Transport:messages(), + Transport:setopts(Socket, [{active, once}]), + receive + {OK, Socket, Data} -> + parse_frame(State, << Buffer/binary, Data/binary >>); + {Closed, Socket} -> + terminate(State); + {Error, Socket, _Reason} -> + terminate(State); + {recv, FromSocket = {Pid, StreamID}, FromPid, Length, Timeout} + when Pid =:= self() -> + Child = #child{in_buffer=InBuffer, is_recv=false} + = get_child(StreamID, State), + if + Length =:= 0, InBuffer =/= <<>> -> + FromPid ! {recv, FromSocket, {ok, InBuffer}}, + loop(replace_child(Child#child{in_buffer= <<>>}, State)); + byte_size(InBuffer) >= Length -> + << Data:Length/binary, Rest/binary >> = InBuffer, + FromPid ! {recv, FromSocket, {ok, Data}}, + loop(replace_child(Child#child{in_buffer=Rest}, State)); + true -> + TRef = erlang:send_after(Timeout, self(), + {recv_timeout, FromSocket}), + loop(replace_child(Child#child{ + is_recv={passive, FromSocket, FromPid, Length, TRef}}, + State)) + end; + {recv_timeout, {Pid, StreamID}} + when Pid =:= self() -> + Child = #child{is_recv={passive, FromSocket, FromPid, _, _}} + = get_child(StreamID, State), + FromPid ! {recv, FromSocket, {error, timeout}}, + loop(replace_child(Child, State)); + {reply, {Pid, StreamID}, Status, Headers} + when Pid =:= self() -> + Child = #child{output=nofin} = get_child(StreamID, State), + syn_reply(State, StreamID, true, Status, Headers), + loop(replace_child(Child#child{output=fin}, State)); + {reply, {Pid, StreamID}, Status, Headers, Body} + when Pid =:= self() -> + Child = #child{output=nofin} = get_child(StreamID, State), + syn_reply(State, StreamID, false, Status, Headers), + data(State, StreamID, true, Body), + loop(replace_child(Child#child{output=fin}, State)); + {stream_reply, {Pid, StreamID}, Status, Headers} + when Pid =:= self() -> + #child{output=nofin} = get_child(StreamID, State), + syn_reply(State, StreamID, false, Status, Headers), + loop(State); + {stream_data, {Pid, StreamID}, Data} + when Pid =:= self() -> + #child{output=nofin} = get_child(StreamID, State), + data(State, StreamID, false, Data), + loop(State); + {stream_close, {Pid, StreamID}} + when Pid =:= self() -> + Child = #child{output=nofin} = get_child(StreamID, State), + data(State, StreamID, true, <<>>), + loop(replace_child(Child#child{output=fin}, State)); + {sendfile, {Pid, StreamID}, Filepath} + when Pid =:= self() -> + Child = #child{output=nofin} = get_child(StreamID, State), + data_from_file(State, StreamID, Filepath), + loop(replace_child(Child#child{output=fin}, State)); + {active, FromSocket = {Pid, StreamID}, FromPid} when Pid =:= self() -> + Child = #child{in_buffer=InBuffer, is_recv=false} + = get_child(StreamID, State), + case InBuffer of + <<>> -> + loop(replace_child(Child#child{ + is_recv={active, FromSocket, FromPid}}, State)); + _ -> + FromPid ! {spdy, FromSocket, InBuffer}, + loop(replace_child(Child#child{in_buffer= <<>>}, State)) + end; + {passive, FromSocket = {Pid, StreamID}, FromPid} when Pid =:= self() -> + Child = #child{is_recv=IsRecv} = get_child(StreamID, State), + %% Make sure we aren't in the middle of a recv call. + case IsRecv of false -> ok; {active, FromSocket, FromPid} -> ok end, + loop(replace_child(Child#child{is_recv=false}, State)); + {'EXIT', Parent, Reason} -> + exit(Reason); + {'EXIT', Pid, _} -> + %% @todo Report the error if any. + loop(delete_child(Pid, State)); + {system, From, Request} -> + sys:handle_system_msg(Request, From, Parent, ?MODULE, [], State); + %% Calls from the supervisor module. + {'$gen_call', {To, Tag}, which_children} -> + Workers = [{?MODULE, Pid, worker, [?MODULE]} + || #child{pid=Pid} <- Children], + To ! {Tag, Workers}, + loop(State); + {'$gen_call', {To, Tag}, count_children} -> + NbChildren = length(Children), + Counts = [{specs, 1}, {active, NbChildren}, + {supervisors, 0}, {workers, NbChildren}], + To ! {Tag, Counts}, + loop(State); + {'$gen_call', {To, Tag}, _} -> + To ! {Tag, {error, ?MODULE}}, + loop(State) + after 60000 -> + goaway(State, ok), + terminate(State) + end. + +-spec system_continue(_, _, #state{}) -> ok. +system_continue(_, _, State) -> + loop(State). + +-spec system_terminate(any(), _, _, _) -> no_return(). +system_terminate(Reason, _, _, _) -> + exit(Reason). + +-spec system_code_change(Misc, _, _, _) -> {ok, Misc} when Misc::#state{}. +system_code_change(Misc, _, _, _) -> + {ok, Misc}. + +parse_frame(State=#state{zinf=Zinf}, Data) -> + case cow_spdy:split(Data) of + {true, Frame, Rest} -> + P = cow_spdy:parse(Frame, Zinf), + case handle_frame(State#state{buffer = Rest}, P) of + error -> + terminate(State); + State2 -> + parse_frame(State2, Rest) + end; + false -> + loop(State#state{buffer=Data}) + end. + +%% FLAG_UNIDIRECTIONAL can only be set by the server. +handle_frame(State, {syn_stream, StreamID, _, _, true, + _, _, _, _, _, _, _}) -> + rst_stream(State, StreamID, protocol_error), + State; +%% We do not support Associated-To-Stream-ID. +handle_frame(State, {syn_stream, StreamID, AssocToStreamID, + _, _, _, _, _, _, _, _, _}) when AssocToStreamID =/= 0 -> + rst_stream(State, StreamID, internal_error), + State; +%% SYN_STREAM. +%% +%% Erlang does not allow us to control the priority of processes +%% so we ignore that value entirely. +handle_frame(State=#state{middlewares=Middlewares, env=Env, + onrequest=OnRequest, onresponse=OnResponse, peer=Peer}, + {syn_stream, StreamID, _, IsFin, _, _, + Method, _, Host, Path, Version, Headers}) -> + Pid = spawn_link(?MODULE, request_init, [ + {self(), StreamID}, Peer, OnRequest, OnResponse, + Env, Middlewares, Method, Host, Path, Version, Headers + ]), + new_child(State, StreamID, Pid, IsFin); +%% RST_STREAM. +handle_frame(State, {rst_stream, StreamID, Status}) -> + error_logger:error_msg("Received RST_STREAM frame ~p ~p", + [StreamID, Status]), + %% @todo Stop StreamID. + State; +%% PING initiated by the server; ignore, we don't send any. +handle_frame(State, {ping, PingID}) when PingID rem 2 =:= 0 -> + error_logger:error_msg("Ignored PING control frame: ~p~n", [PingID]), + State; +%% PING initiated by the client; send it back. +handle_frame(State=#state{socket=Socket, transport=Transport}, + {ping, PingID}) -> + Transport:send(Socket, cow_spdy:ping(PingID)), + State; +%% Data received for a stream. +handle_frame(State, {data, StreamID, IsFin, Data}) -> + Child = #child{input=nofin, in_buffer=Buffer, is_recv=IsRecv} + = get_child(StreamID, State), + Data2 = << Buffer/binary, Data/binary >>, + IsFin2 = if IsFin -> fin; true -> nofin end, + Child2 = case IsRecv of + {active, FromSocket, FromPid} -> + FromPid ! {spdy, FromSocket, Data}, + Child#child{input=IsFin2, is_recv=false}; + {passive, FromSocket, FromPid, 0, TRef} -> + FromPid ! {recv, FromSocket, {ok, Data2}}, + cancel_recv_timeout(StreamID, TRef), + Child#child{input=IsFin2, in_buffer= <<>>, is_recv=false}; + {passive, FromSocket, FromPid, Length, TRef} + when byte_size(Data2) >= Length -> + << Data3:Length/binary, Rest/binary >> = Data2, + FromPid ! {recv, FromSocket, {ok, Data3}}, + cancel_recv_timeout(StreamID, TRef), + Child#child{input=IsFin2, in_buffer=Rest, is_recv=false}; + _ -> + Child#child{input=IsFin2, in_buffer=Data2} + end, + replace_child(Child2, State); +%% General error, can't recover. +handle_frame(State, {error, badprotocol}) -> + goaway(State, protocol_error), + error; +%% Ignore all other frames for now. +handle_frame(State, Frame) -> + error_logger:error_msg("Ignored frame ~p", [Frame]), + State. + +cancel_recv_timeout(StreamID, TRef) -> + _ = erlang:cancel_timer(TRef), + receive + {recv_timeout, {Pid, StreamID}} + when Pid =:= self() -> + ok + after 0 -> + ok + end. + +%% @todo We must wait for the children to finish here, +%% but only up to N milliseconds. Then we shutdown. +terminate(_State) -> + ok. + +syn_reply(#state{socket=Socket, transport=Transport, zdef=Zdef}, + StreamID, IsFin, Status, Headers) -> + Transport:send(Socket, cow_spdy:syn_reply(Zdef, StreamID, IsFin, + Status, <<"HTTP/1.1">>, Headers)). + +rst_stream(#state{socket=Socket, transport=Transport}, StreamID, Status) -> + Transport:send(Socket, cow_spdy:rst_stream(StreamID, Status)). + +goaway(#state{socket=Socket, transport=Transport, last_streamid=LastStreamID}, + Status) -> + Transport:send(Socket, cow_spdy:goaway(LastStreamID, Status)). + +data(#state{socket=Socket, transport=Transport}, StreamID, IsFin, Data) -> + Transport:send(Socket, cow_spdy:data(StreamID, IsFin, Data)). + +data_from_file(#state{socket=Socket, transport=Transport}, + StreamID, Filepath) -> + {ok, IoDevice} = file:open(Filepath, [read, binary, raw]), + data_from_file(Socket, Transport, StreamID, IoDevice). + +data_from_file(Socket, Transport, StreamID, IoDevice) -> + case file:read(IoDevice, 16#1fff) of + eof -> + _ = Transport:send(Socket, cow_spdy:data(StreamID, true, <<>>)), + ok; + {ok, Data} -> + case Transport:send(Socket, cow_spdy:data(StreamID, false, Data)) of + ok -> + data_from_file(Socket, Transport, StreamID, IoDevice); + {error, _} -> + ok + end + end. + +%% Children. + +new_child(State=#state{children=Children}, StreamID, Pid, IsFin) -> + IsFin2 = if IsFin -> fin; true -> nofin end, + State#state{last_streamid=StreamID, + children=[#child{streamid=StreamID, + pid=Pid, input=IsFin2}|Children]}. + +get_child(StreamID, #state{children=Children}) -> + lists:keyfind(StreamID, #child.streamid, Children). + +replace_child(Child=#child{streamid=StreamID}, + State=#state{children=Children}) -> + Children2 = lists:keyreplace(StreamID, #child.streamid, Children, Child), + State#state{children=Children2}. + +delete_child(Pid, State=#state{children=Children}) -> + Children2 = lists:keydelete(Pid, #child.pid, Children), + State#state{children=Children2}. + +%% Request process. + +-spec request_init(socket(), {inet:ip_address(), inet:port_number()}, + cowboy:onrequest_fun(), cowboy:onresponse_fun(), + cowboy_middleware:env(), [module()], + binary(), binary(), binary(), binary(), [{binary(), binary()}]) + -> ok. +request_init(FakeSocket, Peer, OnRequest, OnResponse, + Env, Middlewares, Method, Host, Path, Version, Headers) -> + {Host2, Port} = cow_http:parse_fullhost(Host), + {Path2, Qs} = cow_http:parse_fullpath(Path), + Version2 = cow_http:parse_version(Version), + Req = cowboy_req:new(FakeSocket, ?MODULE, Peer, + Method, Path2, Qs, Version2, Headers, + Host2, Port, <<>>, true, false, OnResponse), + case OnRequest of + undefined -> + execute(Req, Env, Middlewares); + _ -> + Req2 = OnRequest(Req), + case cowboy_req:get(resp_state, Req2) of + waiting -> execute(Req2, Env, Middlewares); + _ -> ok + end + end. + +-spec execute(cowboy_req:req(), cowboy_middleware:env(), [module()]) + -> ok. +execute(Req, _, []) -> + cowboy_req:ensure_response(Req, 204); +execute(Req, Env, [Middleware|Tail]) -> + case Middleware:execute(Req, Env) of + {ok, Req2, Env2} -> + execute(Req2, Env2, Tail); + {suspend, Module, Function, Args} -> + erlang:hibernate(?MODULE, resume, + [Env, Tail, Module, Function, Args]); + {halt, Req2} -> + cowboy_req:ensure_response(Req2, 204); + {error, Status, Req2} -> + cowboy_req:reply(Status, Req2) + end. + +-spec resume(cowboy_middleware:env(), [module()], + module(), module(), [any()]) -> ok. +resume(Env, Tail, Module, Function, Args) -> + case apply(Module, Function, Args) of + {ok, Req2, Env2} -> + execute(Req2, Env2, Tail); + {suspend, Module2, Function2, Args2} -> + erlang:hibernate(?MODULE, resume, + [Env, Tail, Module2, Function2, Args2]); + {halt, Req2} -> + cowboy_req:ensure_response(Req2, 204); + {error, Status, Req2} -> + cowboy_req:reply(Status, Req2) + end. + +%% Reply functions used by cowboy_req. + +-spec reply(socket(), binary(), cowboy:http_headers(), iodata()) -> ok. +reply(Socket = {Pid, _}, Status, Headers, Body) -> + _ = case iolist_size(Body) of + 0 -> Pid ! {reply, Socket, Status, Headers}; + _ -> Pid ! {reply, Socket, Status, Headers, Body} + end, + ok. + +-spec stream_reply(socket(), binary(), cowboy:http_headers()) -> ok. +stream_reply(Socket = {Pid, _}, Status, Headers) -> + _ = Pid ! {stream_reply, Socket, Status, Headers}, + ok. + +-spec stream_data(socket(), iodata()) -> ok. +stream_data(Socket = {Pid, _}, Data) -> + _ = Pid ! {stream_data, Socket, Data}, + ok. + +-spec stream_close(socket()) -> ok. +stream_close(Socket = {Pid, _}) -> + _ = Pid ! {stream_close, Socket}, + ok. + +%% Internal transport functions. + +-spec name() -> spdy. +name() -> + spdy. + +-spec messages() -> {spdy, spdy_closed, spdy_error}. +messages() -> + {spdy, spdy_closed, spdy_error}. + +-spec recv(socket(), non_neg_integer(), timeout()) + -> {ok, binary()} | {error, timeout}. +recv(Socket = {Pid, _}, Length, Timeout) -> + _ = Pid ! {recv, Socket, self(), Length, Timeout}, + receive + {recv, Socket, Ret} -> + Ret + end. + +-spec send(socket(), iodata()) -> ok. +send(Socket, Data) -> + stream_data(Socket, Data). + +%% We don't wait for the result of the actual sendfile call, +%% therefore we can't know how much was actually sent. +%% This isn't a problem as we don't use this value in Cowboy. +-spec sendfile(socket(), file:name_all()) -> {ok, undefined}. +sendfile(Socket = {Pid, _}, Filepath) -> + _ = Pid ! {sendfile, Socket, Filepath}, + {ok, undefined}. + +-spec setopts({pid(), _}, list()) -> ok. +setopts(Socket = {Pid, _}, [{active, once}]) -> + _ = Pid ! {active, Socket, self()}, + ok; +setopts(Socket = {Pid, _}, [{active, false}]) -> + _ = Pid ! {passive, Socket, self()}, + ok. diff --git a/rabbitmq-server/deps/cowboy/src/cowboy_static.erl b/rabbitmq-server/deps/cowboy/src/cowboy_static.erl new file mode 100644 index 0000000..fae4568 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/src/cowboy_static.erl @@ -0,0 +1,291 @@ +%% Copyright (c) 2011, Magnus Klaar +%% Copyright (c) 2013-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_static). + +-export([init/3]). +-export([rest_init/2]). +-export([malformed_request/2]). +-export([forbidden/2]). +-export([content_types_provided/2]). +-export([resource_exists/2]). +-export([last_modified/2]). +-export([generate_etag/2]). +-export([get_file/2]). + +-type extra_etag() :: {etag, module(), function()} | {etag, false}. +-type extra_mimetypes() :: {mimetypes, module(), function()} + | {mimetypes, binary() | {binary(), binary(), [{binary(), binary()}]}}. +-type extra() :: [extra_etag() | extra_mimetypes()]. +-type opts() :: {file | dir, string() | binary()} + | {file | dir, string() | binary(), extra()} + | {priv_file | priv_dir, atom(), string() | binary()} + | {priv_file | priv_dir, atom(), string() | binary(), extra()}. +-export_type([opts/0]). + +-include_lib("kernel/include/file.hrl"). + +-type state() :: {binary(), {ok, #file_info{}} | {error, atom()}, extra()}. + +-spec init(_, _, _) -> {upgrade, protocol, cowboy_rest}. +init(_, _, _) -> + {upgrade, protocol, cowboy_rest}. + +%% Resolve the file that will be sent and get its file information. +%% If the handler is configured to manage a directory, check that the +%% requested file is inside the configured directory. + +-spec rest_init(Req, opts()) + -> {ok, Req, error | state()} + when Req::cowboy_req:req(). +rest_init(Req, {Name, Path}) -> + rest_init_opts(Req, {Name, Path, []}); +rest_init(Req, {Name, App, Path}) + when Name =:= priv_file; Name =:= priv_dir -> + rest_init_opts(Req, {Name, App, Path, []}); +rest_init(Req, Opts) -> + rest_init_opts(Req, Opts). + +rest_init_opts(Req, {priv_file, App, Path, Extra}) -> + rest_init_info(Req, absname(priv_path(App, Path)), Extra); +rest_init_opts(Req, {file, Path, Extra}) -> + rest_init_info(Req, absname(Path), Extra); +rest_init_opts(Req, {priv_dir, App, Path, Extra}) -> + rest_init_dir(Req, priv_path(App, Path), Extra); +rest_init_opts(Req, {dir, Path, Extra}) -> + rest_init_dir(Req, Path, Extra). + +priv_path(App, Path) -> + case code:priv_dir(App) of + {error, bad_name} -> + error({badarg, "Can't resolve the priv_dir of application " + ++ atom_to_list(App)}); + PrivDir when is_list(Path) -> + PrivDir ++ "/" ++ Path; + PrivDir when is_binary(Path) -> + << (list_to_binary(PrivDir))/binary, $/, Path/binary >> + end. + +absname(Path) when is_list(Path) -> + filename:absname(list_to_binary(Path)); +absname(Path) when is_binary(Path) -> + filename:absname(Path). + +rest_init_dir(Req, Path, Extra) when is_list(Path) -> + rest_init_dir(Req, list_to_binary(Path), Extra); +rest_init_dir(Req, Path, Extra) -> + Dir = fullpath(filename:absname(Path)), + {PathInfo, Req2} = cowboy_req:path_info(Req), + Filepath = filename:join([Dir|PathInfo]), + Len = byte_size(Dir), + case fullpath(Filepath) of + << Dir:Len/binary, $/, _/binary >> -> + rest_init_info(Req2, Filepath, Extra); + _ -> + {ok, Req2, error} + end. + +fullpath(Path) -> + fullpath(filename:split(Path), []). +fullpath([], Acc) -> + filename:join(lists:reverse(Acc)); +fullpath([<<".">>|Tail], Acc) -> + fullpath(Tail, Acc); +fullpath([<<"..">>|Tail], Acc=[_]) -> + fullpath(Tail, Acc); +fullpath([<<"..">>|Tail], [_|Acc]) -> + fullpath(Tail, Acc); +fullpath([Segment|Tail], Acc) -> + fullpath(Tail, [Segment|Acc]). + +rest_init_info(Req, Path, Extra) -> + Info = file:read_file_info(Path, [{time, universal}]), + {ok, Req, {Path, Info, Extra}}. + +-ifdef(TEST). +fullpath_test_() -> + Tests = [ + {<<"/home/cowboy">>, <<"/home/cowboy">>}, + {<<"/home/cowboy">>, <<"/home/cowboy/">>}, + {<<"/home/cowboy">>, <<"/home/cowboy/./">>}, + {<<"/home/cowboy">>, <<"/home/cowboy/./././././.">>}, + {<<"/home/cowboy">>, <<"/home/cowboy/abc/..">>}, + {<<"/home/cowboy">>, <<"/home/cowboy/abc/../">>}, + {<<"/home/cowboy">>, <<"/home/cowboy/abc/./../.">>}, + {<<"/">>, <<"/home/cowboy/../../../../../..">>}, + {<<"/etc/passwd">>, <<"/home/cowboy/../../etc/passwd">>} + ], + [{P, fun() -> R = fullpath(P) end} || {R, P} <- Tests]. + +good_path_check_test_() -> + Tests = [ + <<"/home/cowboy/file">>, + <<"/home/cowboy/file/">>, + <<"/home/cowboy/./file">>, + <<"/home/cowboy/././././././file">>, + <<"/home/cowboy/abc/../file">>, + <<"/home/cowboy/abc/../file">>, + <<"/home/cowboy/abc/./.././file">> + ], + [{P, fun() -> + case fullpath(P) of + << "/home/cowboy/", _/binary >> -> ok + end + end} || P <- Tests]. + +bad_path_check_test_() -> + Tests = [ + <<"/home/cowboy/../../../../../../file">>, + <<"/home/cowboy/../../etc/passwd">> + ], + [{P, fun() -> + error = case fullpath(P) of + << "/home/cowboy/", _/binary >> -> ok; + _ -> error + end + end} || P <- Tests]. + +good_path_win32_check_test_() -> + Tests = case os:type() of + {unix, _} -> + []; + {win32, _} -> + [ + <<"c:/home/cowboy/file">>, + <<"c:/home/cowboy/file/">>, + <<"c:/home/cowboy/./file">>, + <<"c:/home/cowboy/././././././file">>, + <<"c:/home/cowboy/abc/../file">>, + <<"c:/home/cowboy/abc/../file">>, + <<"c:/home/cowboy/abc/./.././file">> + ] + end, + [{P, fun() -> + case fullpath(P) of + << "c:/home/cowboy/", _/binary >> -> ok + end + end} || P <- Tests]. + +bad_path_win32_check_test_() -> + Tests = case os:type() of + {unix, _} -> + []; + {win32, _} -> + [ + <<"c:/home/cowboy/../../secretfile.bat">>, + <<"c:/home/cowboy/c:/secretfile.bat">>, + <<"c:/home/cowboy/..\\..\\secretfile.bat">>, + <<"c:/home/cowboy/c:\\secretfile.bat">> + ] + end, + [{P, fun() -> + error = case fullpath(P) of + << "c:/home/cowboy/", _/binary >> -> ok; + _ -> error + end + end} || P <- Tests]. +-endif. + +%% Reject requests that tried to access a file outside +%% the target directory. + +-spec malformed_request(Req, State) + -> {boolean(), Req, State}. +malformed_request(Req, State) -> + {State =:= error, Req, State}. + +%% Directories, files that can't be accessed at all and +%% files with no read flag are forbidden. + +-spec forbidden(Req, State) + -> {boolean(), Req, State} + when State::state(). +forbidden(Req, State={_, {ok, #file_info{type=directory}}, _}) -> + {true, Req, State}; +forbidden(Req, State={_, {error, eacces}, _}) -> + {true, Req, State}; +forbidden(Req, State={_, {ok, #file_info{access=Access}}, _}) + when Access =:= write; Access =:= none -> + {true, Req, State}; +forbidden(Req, State) -> + {false, Req, State}. + +%% Detect the mimetype of the file. + +-spec content_types_provided(Req, State) + -> {[{binary(), get_file}], Req, State} + when State::state(). +content_types_provided(Req, State={Path, _, Extra}) -> + case lists:keyfind(mimetypes, 1, Extra) of + false -> + {[{cow_mimetypes:web(Path), get_file}], Req, State}; + {mimetypes, Module, Function} -> + {[{Module:Function(Path), get_file}], Req, State}; + {mimetypes, Type} -> + {[{Type, get_file}], Req, State} + end. + +%% Assume the resource doesn't exist if it's not a regular file. + +-spec resource_exists(Req, State) + -> {boolean(), Req, State} + when State::state(). +resource_exists(Req, State={_, {ok, #file_info{type=regular}}, _}) -> + {true, Req, State}; +resource_exists(Req, State) -> + {false, Req, State}. + +%% Generate an etag for the file. + +-spec generate_etag(Req, State) + -> {{strong | weak, binary()}, Req, State} + when State::state(). +generate_etag(Req, State={Path, {ok, #file_info{size=Size, mtime=Mtime}}, + Extra}) -> + case lists:keyfind(etag, 1, Extra) of + false -> + {generate_default_etag(Size, Mtime), Req, State}; + {etag, Module, Function} -> + {Module:Function(Path, Size, Mtime), Req, State}; + {etag, false} -> + {undefined, Req, State} + end. + +generate_default_etag(Size, Mtime) -> + {strong, integer_to_binary(erlang:phash2({Size, Mtime}, 16#ffffffff))}. + +%% Return the time of last modification of the file. + +-spec last_modified(Req, State) + -> {calendar:datetime(), Req, State} + when State::state(). +last_modified(Req, State={_, {ok, #file_info{mtime=Modified}}, _}) -> + {Modified, Req, State}. + +%% Stream the file. +%% @todo Export cowboy_req:resp_body_fun()? + +-spec get_file(Req, State) + -> {{stream, non_neg_integer(), fun()}, Req, State} + when State::state(). +get_file(Req, State={Path, {ok, #file_info{size=Size}}, _}) -> + Sendfile = fun (Socket, Transport) -> + case Transport:sendfile(Socket, Path) of + {ok, _} -> ok; + {error, closed} -> ok; + {error, etimedout} -> ok + end + end, + {{stream, Size, Sendfile}, Req, State}. diff --git a/rabbitmq-server/deps/cowboy/src/cowboy_sub_protocol.erl b/rabbitmq-server/deps/cowboy/src/cowboy_sub_protocol.erl new file mode 100644 index 0000000..713c3cd --- /dev/null +++ b/rabbitmq-server/deps/cowboy/src/cowboy_sub_protocol.erl @@ -0,0 +1,23 @@ +%% Copyright (c) 2013, James Fish +%% Copyright (c) 2013-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_sub_protocol). + +-callback upgrade(Req, Env, module(), any()) + -> {ok, Req, Env} + | {suspend, module(), atom(), [any()]} + | {halt, Req} + | {error, cowboy:http_status(), Req} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_sup.erl b/rabbitmq-server/deps/cowboy/src/cowboy_sup.erl similarity index 75% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_sup.erl rename to rabbitmq-server/deps/cowboy/src/cowboy_sup.erl index 34591bc..cf48595 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_sup.erl +++ b/rabbitmq-server/deps/cowboy/src/cowboy_sup.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011, Loïc Hoguin +%% Copyright (c) 2011-2014, Loïc Hoguin %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -12,24 +12,18 @@ %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -%% @private -module(cowboy_sup). -behaviour(supervisor). --export([start_link/0]). %% API. --export([init/1]). %% supervisor. - --define(SUPERVISOR, ?MODULE). - -%% API. +-export([start_link/0]). +-export([init/1]). -spec start_link() -> {ok, pid()}. start_link() -> - supervisor:start_link({local, ?SUPERVISOR}, ?MODULE, []). - -%% supervisor. + supervisor:start_link({local, ?MODULE}, ?MODULE, []). --spec init([]) -> {ok, {{one_for_one, 10, 10}, [{_, _, _, _, _, _}, ...]}}. +-spec init([]) + -> {ok, {{supervisor:strategy(), 10, 10}, [supervisor:child_spec()]}}. init([]) -> Procs = [{cowboy_clock, {cowboy_clock, start_link, []}, permanent, 5000, worker, [cowboy_clock]}], diff --git a/rabbitmq-server/deps/cowboy/src/cowboy_websocket.erl b/rabbitmq-server/deps/cowboy/src/cowboy_websocket.erl new file mode 100644 index 0000000..c0f94c4 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/src/cowboy_websocket.erl @@ -0,0 +1,770 @@ +%% Copyright (c) 2011-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% Cowboy supports versions 7 through 17 of the Websocket drafts. +%% It also supports RFC6455, the proposed standard for Websocket. +-module(cowboy_websocket). +-behaviour(cowboy_sub_protocol). + +-export([upgrade/4]). +-export([handler_loop/4]). + +-type close_code() :: 1000..4999. +-export_type([close_code/0]). + +-type frame() :: close | ping | pong + | {text | binary | close | ping | pong, iodata()} + | {close, close_code(), iodata()}. +-export_type([frame/0]). + +-type opcode() :: 0 | 1 | 2 | 8 | 9 | 10. +-type mask_key() :: 0..16#ffffffff. +-type frag_state() :: undefined + | {nofin, opcode(), binary()} | {fin, opcode(), binary()}. +-type rsv() :: << _:3 >>. +-type terminate_reason() :: {normal | error | remote, atom()} + | {remote, close_code(), binary()}. + +-record(state, { + env :: cowboy_middleware:env(), + socket = undefined :: inet:socket(), + transport = undefined :: module(), + handler :: module(), + key = undefined :: undefined | binary(), + timeout = infinity :: timeout(), + timeout_ref = undefined :: undefined | reference(), + messages = undefined :: undefined | {atom(), atom(), atom()}, + hibernate = false :: boolean(), + frag_state = undefined :: frag_state(), + utf8_state = <<>> :: binary(), + deflate_frame = false :: boolean(), + inflate_state :: undefined | port(), + deflate_state :: undefined | port() +}). + +-spec upgrade(Req, Env, module(), any()) + -> {ok, Req, Env} + | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(), Env::cowboy_middleware:env(). +upgrade(Req, Env, Handler, HandlerOpts) -> + {_, Ref} = lists:keyfind(listener, 1, Env), + ranch:remove_connection(Ref), + [Socket, Transport] = cowboy_req:get([socket, transport], Req), + State = #state{env=Env, socket=Socket, transport=Transport, + handler=Handler}, + try websocket_upgrade(State, Req) of + {ok, State2, Req2} -> + handler_init(State2, Req2, HandlerOpts) + catch _:_ -> + receive + {cowboy_req, resp_sent} -> ok + after 0 -> + _ = cowboy_req:reply(400, Req), + exit(normal) + end + end. + +-spec websocket_upgrade(#state{}, Req) + -> {ok, #state{}, Req} when Req::cowboy_req:req(). +websocket_upgrade(State, Req) -> + {ok, ConnTokens, Req2} + = cowboy_req:parse_header(<<"connection">>, Req), + true = lists:member(<<"upgrade">>, ConnTokens), + %% @todo Should probably send a 426 if the Upgrade header is missing. + {ok, [<<"websocket">>], Req3} + = cowboy_req:parse_header(<<"upgrade">>, Req2), + {Version, Req4} = cowboy_req:header(<<"sec-websocket-version">>, Req3), + IntVersion = list_to_integer(binary_to_list(Version)), + true = (IntVersion =:= 7) orelse (IntVersion =:= 8) + orelse (IntVersion =:= 13), + {Key, Req5} = cowboy_req:header(<<"sec-websocket-key">>, Req4), + false = Key =:= undefined, + websocket_extensions(State#state{key=Key}, + cowboy_req:set_meta(websocket_version, IntVersion, Req5)). + +-spec websocket_extensions(#state{}, Req) + -> {ok, #state{}, Req} when Req::cowboy_req:req(). +websocket_extensions(State, Req) -> + case cowboy_req:parse_header(<<"sec-websocket-extensions">>, Req) of + {ok, Extensions, Req2} when Extensions =/= undefined -> + [Compress] = cowboy_req:get([resp_compress], Req), + case lists:keyfind(<<"x-webkit-deflate-frame">>, 1, Extensions) of + {<<"x-webkit-deflate-frame">>, []} when Compress =:= true -> + Inflate = zlib:open(), + Deflate = zlib:open(), + % Since we are negotiating an unconstrained deflate-frame + % then we must be willing to accept frames using the + % maximum window size which is 2^15. The negative value + % indicates that zlib headers are not used. + ok = zlib:inflateInit(Inflate, -15), + % Initialize the deflater with a window size of 2^15 bits and disable + % the zlib headers. + ok = zlib:deflateInit(Deflate, best_compression, deflated, -15, 8, default), + {ok, State#state{ + deflate_frame = true, + inflate_state = Inflate, + deflate_state = Deflate + }, cowboy_req:set_meta(websocket_compress, true, Req2)}; + _ -> + {ok, State, cowboy_req:set_meta(websocket_compress, false, Req2)} + end; + _ -> + {ok, State, cowboy_req:set_meta(websocket_compress, false, Req)} + end. + +-spec handler_init(#state{}, Req, any()) + -> {ok, Req, cowboy_middleware:env()} | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +handler_init(State=#state{env=Env, transport=Transport, + handler=Handler}, Req, HandlerOpts) -> + try Handler:websocket_init(Transport:name(), Req, HandlerOpts) of + {ok, Req2, HandlerState} -> + websocket_handshake(State, Req2, HandlerState); + {ok, Req2, HandlerState, hibernate} -> + websocket_handshake(State#state{hibernate=true}, + Req2, HandlerState); + {ok, Req2, HandlerState, Timeout} -> + websocket_handshake(State#state{timeout=Timeout}, + Req2, HandlerState); + {ok, Req2, HandlerState, Timeout, hibernate} -> + websocket_handshake(State#state{timeout=Timeout, + hibernate=true}, Req2, HandlerState); + {shutdown, Req2} -> + cowboy_req:ensure_response(Req2, 400), + {ok, Req2, [{result, closed}|Env]} + catch Class:Reason -> + Stacktrace = erlang:get_stacktrace(), + cowboy_req:maybe_reply(Stacktrace, Req), + erlang:Class([ + {reason, Reason}, + {mfa, {Handler, websocket_init, 3}}, + {stacktrace, Stacktrace}, + {req, cowboy_req:to_list(Req)}, + {opts, HandlerOpts} + ]) + end. + +-spec websocket_handshake(#state{}, Req, any()) + -> {ok, Req, cowboy_middleware:env()} + | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +websocket_handshake(State=#state{ + transport=Transport, key=Key, deflate_frame=DeflateFrame}, + Req, HandlerState) -> + Challenge = base64:encode(crypto:hash(sha, + << Key/binary, "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" >>)), + Extensions = case DeflateFrame of + false -> []; + true -> [{<<"sec-websocket-extensions">>, <<"x-webkit-deflate-frame">>}] + end, + {ok, Req2} = cowboy_req:upgrade_reply( + 101, + [{<<"upgrade">>, <<"websocket">>}, + {<<"sec-websocket-accept">>, Challenge}| + Extensions], + Req), + %% Flush the resp_sent message before moving on. + receive {cowboy_req, resp_sent} -> ok after 0 -> ok end, + State2 = handler_loop_timeout(State), + handler_before_loop(State2#state{key=undefined, + messages=Transport:messages()}, Req2, HandlerState, <<>>). + +-spec handler_before_loop(#state{}, Req, any(), binary()) + -> {ok, Req, cowboy_middleware:env()} + | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +handler_before_loop(State=#state{ + socket=Socket, transport=Transport, hibernate=true}, + Req, HandlerState, SoFar) -> + Transport:setopts(Socket, [{active, once}]), + {suspend, ?MODULE, handler_loop, + [State#state{hibernate=false}, Req, HandlerState, SoFar]}; +handler_before_loop(State=#state{socket=Socket, transport=Transport}, + Req, HandlerState, SoFar) -> + Transport:setopts(Socket, [{active, once}]), + handler_loop(State, Req, HandlerState, SoFar). + +-spec handler_loop_timeout(#state{}) -> #state{}. +handler_loop_timeout(State=#state{timeout=infinity}) -> + State#state{timeout_ref=undefined}; +handler_loop_timeout(State=#state{timeout=Timeout, timeout_ref=PrevRef}) -> + _ = case PrevRef of undefined -> ignore; PrevRef -> + erlang:cancel_timer(PrevRef) end, + TRef = erlang:start_timer(Timeout, self(), ?MODULE), + State#state{timeout_ref=TRef}. + +-spec handler_loop(#state{}, Req, any(), binary()) + -> {ok, Req, cowboy_middleware:env()} + | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +handler_loop(State=#state{socket=Socket, messages={OK, Closed, Error}, + timeout_ref=TRef}, Req, HandlerState, SoFar) -> + receive + {OK, Socket, Data} -> + State2 = handler_loop_timeout(State), + websocket_data(State2, Req, HandlerState, + << SoFar/binary, Data/binary >>); + {Closed, Socket} -> + handler_terminate(State, Req, HandlerState, {error, closed}); + {Error, Socket, Reason} -> + handler_terminate(State, Req, HandlerState, {error, Reason}); + {timeout, TRef, ?MODULE} -> + websocket_close(State, Req, HandlerState, {normal, timeout}); + {timeout, OlderTRef, ?MODULE} when is_reference(OlderTRef) -> + handler_loop(State, Req, HandlerState, SoFar); + Message -> + handler_call(State, Req, HandlerState, + SoFar, websocket_info, Message, fun handler_before_loop/4) + end. + +%% All frames passing through this function are considered valid, +%% with the only exception of text and close frames with a payload +%% which may still contain errors. +-spec websocket_data(#state{}, Req, any(), binary()) + -> {ok, Req, cowboy_middleware:env()} + | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +%% RSV bits MUST be 0 unless an extension is negotiated +%% that defines meanings for non-zero values. +websocket_data(State, Req, HandlerState, << _:1, Rsv:3, _/bits >>) + when Rsv =/= 0, State#state.deflate_frame =:= false -> + websocket_close(State, Req, HandlerState, {error, badframe}); +%% Invalid opcode. Note that these opcodes may be used by extensions. +websocket_data(State, Req, HandlerState, << _:4, Opcode:4, _/bits >>) + when Opcode > 2, Opcode =/= 8, Opcode =/= 9, Opcode =/= 10 -> + websocket_close(State, Req, HandlerState, {error, badframe}); +%% Control frames MUST NOT be fragmented. +websocket_data(State, Req, HandlerState, << 0:1, _:3, Opcode:4, _/bits >>) + when Opcode >= 8 -> + websocket_close(State, Req, HandlerState, {error, badframe}); +%% A frame MUST NOT use the zero opcode unless fragmentation was initiated. +websocket_data(State=#state{frag_state=undefined}, Req, HandlerState, + << _:4, 0:4, _/bits >>) -> + websocket_close(State, Req, HandlerState, {error, badframe}); +%% Non-control opcode when expecting control message or next fragment. +websocket_data(State=#state{frag_state={nofin, _, _}}, Req, HandlerState, + << _:4, Opcode:4, _/bits >>) + when Opcode =/= 0, Opcode < 8 -> + websocket_close(State, Req, HandlerState, {error, badframe}); +%% Close control frame length MUST be 0 or >= 2. +websocket_data(State, Req, HandlerState, << _:4, 8:4, _:1, 1:7, _/bits >>) -> + websocket_close(State, Req, HandlerState, {error, badframe}); +%% Close control frame with incomplete close code. Need more data. +websocket_data(State, Req, HandlerState, + Data = << _:4, 8:4, 1:1, Len:7, _/bits >>) + when Len > 1, byte_size(Data) < 8 -> + handler_before_loop(State, Req, HandlerState, Data); +%% 7 bits payload length. +websocket_data(State, Req, HandlerState, << Fin:1, Rsv:3/bits, Opcode:4, 1:1, + Len:7, MaskKey:32, Rest/bits >>) + when Len < 126 -> + websocket_data(State, Req, HandlerState, + Opcode, Len, MaskKey, Rest, Rsv, Fin); +%% 16 bits payload length. +websocket_data(State, Req, HandlerState, << Fin:1, Rsv:3/bits, Opcode:4, 1:1, + 126:7, Len:16, MaskKey:32, Rest/bits >>) + when Len > 125, Opcode < 8 -> + websocket_data(State, Req, HandlerState, + Opcode, Len, MaskKey, Rest, Rsv, Fin); +%% 63 bits payload length. +websocket_data(State, Req, HandlerState, << Fin:1, Rsv:3/bits, Opcode:4, 1:1, + 127:7, 0:1, Len:63, MaskKey:32, Rest/bits >>) + when Len > 16#ffff, Opcode < 8 -> + websocket_data(State, Req, HandlerState, + Opcode, Len, MaskKey, Rest, Rsv, Fin); +%% When payload length is over 63 bits, the most significant bit MUST be 0. +websocket_data(State, Req, HandlerState, << _:8, 1:1, 127:7, 1:1, _:7, _/binary >>) -> + websocket_close(State, Req, HandlerState, {error, badframe}); +%% All frames sent from the client to the server are masked. +websocket_data(State, Req, HandlerState, << _:8, 0:1, _/bits >>) -> + websocket_close(State, Req, HandlerState, {error, badframe}); +%% For the next two clauses, it can be one of the following: +%% +%% * The minimal number of bytes MUST be used to encode the length +%% * All control frames MUST have a payload length of 125 bytes or less +websocket_data(State, Req, HandlerState, << _:9, 126:7, _:48, _/bits >>) -> + websocket_close(State, Req, HandlerState, {error, badframe}); +websocket_data(State, Req, HandlerState, << _:9, 127:7, _:96, _/bits >>) -> + websocket_close(State, Req, HandlerState, {error, badframe}); +%% Need more data. +websocket_data(State, Req, HandlerState, Data) -> + handler_before_loop(State, Req, HandlerState, Data). + +%% Initialize or update fragmentation state. +-spec websocket_data(#state{}, Req, any(), + opcode(), non_neg_integer(), mask_key(), binary(), rsv(), 0 | 1) + -> {ok, Req, cowboy_middleware:env()} + | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +%% The opcode is only included in the first frame fragment. +websocket_data(State=#state{frag_state=undefined}, Req, HandlerState, + Opcode, Len, MaskKey, Data, Rsv, 0) -> + websocket_payload(State#state{frag_state={nofin, Opcode, <<>>}}, + Req, HandlerState, 0, Len, MaskKey, <<>>, 0, Data, Rsv); +%% Subsequent frame fragments. +websocket_data(State=#state{frag_state={nofin, _, _}}, Req, HandlerState, + 0, Len, MaskKey, Data, Rsv, 0) -> + websocket_payload(State, Req, HandlerState, + 0, Len, MaskKey, <<>>, 0, Data, Rsv); +%% Final frame fragment. +websocket_data(State=#state{frag_state={nofin, Opcode, SoFar}}, + Req, HandlerState, 0, Len, MaskKey, Data, Rsv, 1) -> + websocket_payload(State#state{frag_state={fin, Opcode, SoFar}}, + Req, HandlerState, 0, Len, MaskKey, <<>>, 0, Data, Rsv); +%% Unfragmented frame. +websocket_data(State, Req, HandlerState, Opcode, Len, MaskKey, Data, Rsv, 1) -> + websocket_payload(State, Req, HandlerState, + Opcode, Len, MaskKey, <<>>, 0, Data, Rsv). + +-spec websocket_payload(#state{}, Req, any(), + opcode(), non_neg_integer(), mask_key(), binary(), non_neg_integer(), + binary(), rsv()) + -> {ok, Req, cowboy_middleware:env()} + | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +%% Close control frames with a payload MUST contain a valid close code. +websocket_payload(State, Req, HandlerState, + Opcode=8, Len, MaskKey, <<>>, 0, + << MaskedCode:2/binary, Rest/bits >>, Rsv) -> + Unmasked = << Code:16 >> = websocket_unmask(MaskedCode, MaskKey, <<>>), + if Code < 1000; Code =:= 1004; Code =:= 1005; Code =:= 1006; + (Code > 1011) and (Code < 3000); Code > 4999 -> + websocket_close(State, Req, HandlerState, {error, badframe}); + true -> + websocket_payload(State, Req, HandlerState, + Opcode, Len - 2, MaskKey, Unmasked, byte_size(MaskedCode), + Rest, Rsv) + end; +%% Text frames and close control frames MUST have a payload that is valid UTF-8. +websocket_payload(State=#state{utf8_state=Incomplete}, + Req, HandlerState, Opcode, Len, MaskKey, Unmasked, UnmaskedLen, + Data, Rsv) + when (byte_size(Data) < Len) andalso ((Opcode =:= 1) orelse + ((Opcode =:= 8) andalso (Unmasked =/= <<>>))) -> + Unmasked2 = websocket_unmask(Data, + rotate_mask_key(MaskKey, UnmaskedLen), <<>>), + {Unmasked3, State2} = websocket_inflate_frame(Unmasked2, Rsv, false, State), + case is_utf8(<< Incomplete/binary, Unmasked3/binary >>) of + false -> + websocket_close(State2, Req, HandlerState, {error, badencoding}); + Utf8State -> + websocket_payload_loop(State2#state{utf8_state=Utf8State}, + Req, HandlerState, Opcode, Len - byte_size(Data), MaskKey, + << Unmasked/binary, Unmasked3/binary >>, + UnmaskedLen + byte_size(Data), Rsv) + end; +websocket_payload(State=#state{utf8_state=Incomplete}, + Req, HandlerState, Opcode, Len, MaskKey, Unmasked, UnmaskedLen, + Data, Rsv) + when Opcode =:= 1; (Opcode =:= 8) and (Unmasked =/= <<>>) -> + << End:Len/binary, Rest/bits >> = Data, + Unmasked2 = websocket_unmask(End, + rotate_mask_key(MaskKey, UnmaskedLen), <<>>), + {Unmasked3, State2} = websocket_inflate_frame(Unmasked2, Rsv, true, State), + case is_utf8(<< Incomplete/binary, Unmasked3/binary >>) of + <<>> -> + websocket_dispatch(State2#state{utf8_state= <<>>}, + Req, HandlerState, Rest, Opcode, + << Unmasked/binary, Unmasked3/binary >>); + _ -> + websocket_close(State2, Req, HandlerState, {error, badencoding}) + end; +%% Fragmented text frames may cut payload in the middle of UTF-8 codepoints. +websocket_payload(State=#state{frag_state={_, 1, _}, utf8_state=Incomplete}, + Req, HandlerState, Opcode=0, Len, MaskKey, Unmasked, UnmaskedLen, + Data, Rsv) + when byte_size(Data) < Len -> + Unmasked2 = websocket_unmask(Data, + rotate_mask_key(MaskKey, UnmaskedLen), <<>>), + {Unmasked3, State2} = websocket_inflate_frame(Unmasked2, Rsv, false, State), + case is_utf8(<< Incomplete/binary, Unmasked3/binary >>) of + false -> + websocket_close(State2, Req, HandlerState, {error, badencoding}); + Utf8State -> + websocket_payload_loop(State2#state{utf8_state=Utf8State}, + Req, HandlerState, Opcode, Len - byte_size(Data), MaskKey, + << Unmasked/binary, Unmasked3/binary >>, + UnmaskedLen + byte_size(Data), Rsv) + end; +websocket_payload(State=#state{frag_state={Fin, 1, _}, utf8_state=Incomplete}, + Req, HandlerState, Opcode=0, Len, MaskKey, Unmasked, UnmaskedLen, + Data, Rsv) -> + << End:Len/binary, Rest/bits >> = Data, + Unmasked2 = websocket_unmask(End, + rotate_mask_key(MaskKey, UnmaskedLen), <<>>), + {Unmasked3, State2} = websocket_inflate_frame(Unmasked2, Rsv, Fin =:= fin, State), + case is_utf8(<< Incomplete/binary, Unmasked3/binary >>) of + <<>> -> + websocket_dispatch(State2#state{utf8_state= <<>>}, + Req, HandlerState, Rest, Opcode, + << Unmasked/binary, Unmasked3/binary >>); + Utf8State when is_binary(Utf8State), Fin =:= nofin -> + websocket_dispatch(State2#state{utf8_state=Utf8State}, + Req, HandlerState, Rest, Opcode, + << Unmasked/binary, Unmasked3/binary >>); + _ -> + websocket_close(State, Req, HandlerState, {error, badencoding}) + end; +%% Other frames have a binary payload. +websocket_payload(State, Req, HandlerState, + Opcode, Len, MaskKey, Unmasked, UnmaskedLen, Data, Rsv) + when byte_size(Data) < Len -> + Unmasked2 = websocket_unmask(Data, + rotate_mask_key(MaskKey, UnmaskedLen), <<>>), + {Unmasked3, State2} = websocket_inflate_frame(Unmasked2, Rsv, false, State), + websocket_payload_loop(State2, Req, HandlerState, + Opcode, Len - byte_size(Data), MaskKey, + << Unmasked/binary, Unmasked3/binary >>, UnmaskedLen + byte_size(Data), + Rsv); +websocket_payload(State, Req, HandlerState, + Opcode, Len, MaskKey, Unmasked, UnmaskedLen, Data, Rsv) -> + << End:Len/binary, Rest/bits >> = Data, + Unmasked2 = websocket_unmask(End, + rotate_mask_key(MaskKey, UnmaskedLen), <<>>), + {Unmasked3, State2} = websocket_inflate_frame(Unmasked2, Rsv, true, State), + websocket_dispatch(State2, Req, HandlerState, Rest, Opcode, + << Unmasked/binary, Unmasked3/binary >>). + +-spec websocket_inflate_frame(binary(), rsv(), boolean(), #state{}) -> + {binary(), #state{}}. +websocket_inflate_frame(Data, << Rsv1:1, _:2 >>, _, + #state{deflate_frame = DeflateFrame} = State) + when DeflateFrame =:= false orelse Rsv1 =:= 0 -> + {Data, State}; +websocket_inflate_frame(Data, << 1:1, _:2 >>, false, State) -> + Result = zlib:inflate(State#state.inflate_state, Data), + {iolist_to_binary(Result), State}; +websocket_inflate_frame(Data, << 1:1, _:2 >>, true, State) -> + Result = zlib:inflate(State#state.inflate_state, + << Data/binary, 0:8, 0:8, 255:8, 255:8 >>), + {iolist_to_binary(Result), State}. + +-spec websocket_unmask(B, mask_key(), B) -> B when B::binary(). +websocket_unmask(<<>>, _, Unmasked) -> + Unmasked; +websocket_unmask(<< O:32, Rest/bits >>, MaskKey, Acc) -> + T = O bxor MaskKey, + websocket_unmask(Rest, MaskKey, << Acc/binary, T:32 >>); +websocket_unmask(<< O:24 >>, MaskKey, Acc) -> + << MaskKey2:24, _:8 >> = << MaskKey:32 >>, + T = O bxor MaskKey2, + << Acc/binary, T:24 >>; +websocket_unmask(<< O:16 >>, MaskKey, Acc) -> + << MaskKey2:16, _:16 >> = << MaskKey:32 >>, + T = O bxor MaskKey2, + << Acc/binary, T:16 >>; +websocket_unmask(<< O:8 >>, MaskKey, Acc) -> + << MaskKey2:8, _:24 >> = << MaskKey:32 >>, + T = O bxor MaskKey2, + << Acc/binary, T:8 >>. + +%% Because we unmask on the fly we need to continue from the right mask byte. +-spec rotate_mask_key(mask_key(), non_neg_integer()) -> mask_key(). +rotate_mask_key(MaskKey, UnmaskedLen) -> + Left = UnmaskedLen rem 4, + Right = 4 - Left, + (MaskKey bsl (Left * 8)) + (MaskKey bsr (Right * 8)). + +%% Returns <<>> if the argument is valid UTF-8, false if not, +%% or the incomplete part of the argument if we need more data. +-spec is_utf8(binary()) -> false | binary(). +is_utf8(Valid = <<>>) -> + Valid; +is_utf8(<< _/utf8, Rest/binary >>) -> + is_utf8(Rest); +%% 2 bytes. Codepages C0 and C1 are invalid; fail early. +is_utf8(<< 2#1100000:7, _/bits >>) -> + false; +is_utf8(Incomplete = << 2#110:3, _:5 >>) -> + Incomplete; +%% 3 bytes. +is_utf8(Incomplete = << 2#1110:4, _:4 >>) -> + Incomplete; +is_utf8(Incomplete = << 2#1110:4, _:4, 2#10:2, _:6 >>) -> + Incomplete; +%% 4 bytes. Codepage F4 may have invalid values greater than 0x10FFFF. +is_utf8(<< 2#11110100:8, 2#10:2, High:6, _/bits >>) when High >= 2#10000 -> + false; +is_utf8(Incomplete = << 2#11110:5, _:3 >>) -> + Incomplete; +is_utf8(Incomplete = << 2#11110:5, _:3, 2#10:2, _:6 >>) -> + Incomplete; +is_utf8(Incomplete = << 2#11110:5, _:3, 2#10:2, _:6, 2#10:2, _:6 >>) -> + Incomplete; +%% Invalid. +is_utf8(_) -> + false. + +-spec websocket_payload_loop(#state{}, Req, any(), + opcode(), non_neg_integer(), mask_key(), binary(), + non_neg_integer(), rsv()) + -> {ok, Req, cowboy_middleware:env()} + | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +websocket_payload_loop(State=#state{socket=Socket, transport=Transport, + messages={OK, Closed, Error}, timeout_ref=TRef}, + Req, HandlerState, Opcode, Len, MaskKey, Unmasked, UnmaskedLen, Rsv) -> + Transport:setopts(Socket, [{active, once}]), + receive + {OK, Socket, Data} -> + State2 = handler_loop_timeout(State), + websocket_payload(State2, Req, HandlerState, + Opcode, Len, MaskKey, Unmasked, UnmaskedLen, Data, Rsv); + {Closed, Socket} -> + handler_terminate(State, Req, HandlerState, {error, closed}); + {Error, Socket, Reason} -> + handler_terminate(State, Req, HandlerState, {error, Reason}); + {timeout, TRef, ?MODULE} -> + websocket_close(State, Req, HandlerState, {normal, timeout}); + {timeout, OlderTRef, ?MODULE} when is_reference(OlderTRef) -> + websocket_payload_loop(State, Req, HandlerState, + Opcode, Len, MaskKey, Unmasked, UnmaskedLen, Rsv); + Message -> + handler_call(State, Req, HandlerState, + <<>>, websocket_info, Message, + fun (State2, Req2, HandlerState2, _) -> + websocket_payload_loop(State2, Req2, HandlerState2, + Opcode, Len, MaskKey, Unmasked, UnmaskedLen, Rsv) + end) + end. + +-spec websocket_dispatch(#state{}, Req, any(), binary(), opcode(), binary()) + -> {ok, Req, cowboy_middleware:env()} + | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +%% Continuation frame. +websocket_dispatch(State=#state{frag_state={nofin, Opcode, SoFar}}, + Req, HandlerState, RemainingData, 0, Payload) -> + websocket_data(State#state{frag_state={nofin, Opcode, + << SoFar/binary, Payload/binary >>}}, Req, HandlerState, RemainingData); +%% Last continuation frame. +websocket_dispatch(State=#state{frag_state={fin, Opcode, SoFar}}, + Req, HandlerState, RemainingData, 0, Payload) -> + websocket_dispatch(State#state{frag_state=undefined}, Req, HandlerState, + RemainingData, Opcode, << SoFar/binary, Payload/binary >>); +%% Text frame. +websocket_dispatch(State, Req, HandlerState, RemainingData, 1, Payload) -> + handler_call(State, Req, HandlerState, RemainingData, + websocket_handle, {text, Payload}, fun websocket_data/4); +%% Binary frame. +websocket_dispatch(State, Req, HandlerState, RemainingData, 2, Payload) -> + handler_call(State, Req, HandlerState, RemainingData, + websocket_handle, {binary, Payload}, fun websocket_data/4); +%% Close control frame. +websocket_dispatch(State, Req, HandlerState, _RemainingData, 8, <<>>) -> + websocket_close(State, Req, HandlerState, {remote, closed}); +websocket_dispatch(State, Req, HandlerState, _RemainingData, 8, + << Code:16, Payload/bits >>) -> + websocket_close(State, Req, HandlerState, {remote, Code, Payload}); +%% Ping control frame. Send a pong back and forward the ping to the handler. +websocket_dispatch(State=#state{socket=Socket, transport=Transport}, + Req, HandlerState, RemainingData, 9, Payload) -> + Len = payload_length_to_binary(byte_size(Payload)), + Transport:send(Socket, << 1:1, 0:3, 10:4, 0:1, Len/bits, Payload/binary >>), + handler_call(State, Req, HandlerState, RemainingData, + websocket_handle, {ping, Payload}, fun websocket_data/4); +%% Pong control frame. +websocket_dispatch(State, Req, HandlerState, RemainingData, 10, Payload) -> + handler_call(State, Req, HandlerState, RemainingData, + websocket_handle, {pong, Payload}, fun websocket_data/4). + +-spec handler_call(#state{}, Req, any(), binary(), atom(), any(), fun()) + -> {ok, Req, cowboy_middleware:env()} + | {suspend, module(), atom(), [any()]} + when Req::cowboy_req:req(). +handler_call(State=#state{handler=Handler}, Req, HandlerState, + RemainingData, Callback, Message, NextState) -> + try Handler:Callback(Message, Req, HandlerState) of + {ok, Req2, HandlerState2} -> + NextState(State, Req2, HandlerState2, RemainingData); + {ok, Req2, HandlerState2, hibernate} -> + NextState(State#state{hibernate=true}, + Req2, HandlerState2, RemainingData); + {reply, Payload, Req2, HandlerState2} + when is_list(Payload) -> + case websocket_send_many(Payload, State) of + {ok, State2} -> + NextState(State2, Req2, HandlerState2, RemainingData); + {shutdown, State2} -> + handler_terminate(State2, Req2, HandlerState2, + {normal, shutdown}); + {{error, _} = Error, State2} -> + handler_terminate(State2, Req2, HandlerState2, Error) + end; + {reply, Payload, Req2, HandlerState2, hibernate} + when is_list(Payload) -> + case websocket_send_many(Payload, State) of + {ok, State2} -> + NextState(State2#state{hibernate=true}, + Req2, HandlerState2, RemainingData); + {shutdown, State2} -> + handler_terminate(State2, Req2, HandlerState2, + {normal, shutdown}); + {{error, _} = Error, State2} -> + handler_terminate(State2, Req2, HandlerState2, Error) + end; + {reply, Payload, Req2, HandlerState2} -> + case websocket_send(Payload, State) of + {ok, State2} -> + NextState(State2, Req2, HandlerState2, RemainingData); + {shutdown, State2} -> + handler_terminate(State2, Req2, HandlerState2, + {normal, shutdown}); + {{error, _} = Error, State2} -> + handler_terminate(State2, Req2, HandlerState2, Error) + end; + {reply, Payload, Req2, HandlerState2, hibernate} -> + case websocket_send(Payload, State) of + {ok, State2} -> + NextState(State2#state{hibernate=true}, + Req2, HandlerState2, RemainingData); + {shutdown, State2} -> + handler_terminate(State2, Req2, HandlerState2, + {normal, shutdown}); + {{error, _} = Error, State2} -> + handler_terminate(State2, Req2, HandlerState2, Error) + end; + {shutdown, Req2, HandlerState2} -> + websocket_close(State, Req2, HandlerState2, {normal, shutdown}) + catch Class:Reason -> + _ = websocket_close(State, Req, HandlerState, {error, handler}), + erlang:Class([ + {reason, Reason}, + {mfa, {Handler, Callback, 3}}, + {stacktrace, erlang:get_stacktrace()}, + {msg, Message}, + {req, cowboy_req:to_list(Req)}, + {state, HandlerState} + ]) + end. + +websocket_opcode(text) -> 1; +websocket_opcode(binary) -> 2; +websocket_opcode(close) -> 8; +websocket_opcode(ping) -> 9; +websocket_opcode(pong) -> 10. + +-spec websocket_deflate_frame(opcode(), binary(), #state{}) -> + {binary(), rsv(), #state{}}. +websocket_deflate_frame(Opcode, Payload, + State=#state{deflate_frame = DeflateFrame}) + when DeflateFrame =:= false orelse Opcode >= 8 -> + {Payload, << 0:3 >>, State}; +websocket_deflate_frame(_, Payload, State=#state{deflate_state = Deflate}) -> + Deflated = iolist_to_binary(zlib:deflate(Deflate, Payload, sync)), + DeflatedBodyLength = erlang:size(Deflated) - 4, + Deflated1 = case Deflated of + << Body:DeflatedBodyLength/binary, 0:8, 0:8, 255:8, 255:8 >> -> Body; + _ -> Deflated + end, + {Deflated1, << 1:1, 0:2 >>, State}. + +-spec websocket_send(frame(), #state{}) +-> {ok, #state{}} | {shutdown, #state{}} | {{error, atom()}, #state{}}. +websocket_send(Type, State=#state{socket=Socket, transport=Transport}) + when Type =:= close -> + Opcode = websocket_opcode(Type), + case Transport:send(Socket, << 1:1, 0:3, Opcode:4, 0:8 >>) of + ok -> {shutdown, State}; + Error -> {Error, State} + end; +websocket_send(Type, State=#state{socket=Socket, transport=Transport}) + when Type =:= ping; Type =:= pong -> + Opcode = websocket_opcode(Type), + {Transport:send(Socket, << 1:1, 0:3, Opcode:4, 0:8 >>), State}; +websocket_send({close, Payload}, State) -> + websocket_send({close, 1000, Payload}, State); +websocket_send({Type = close, StatusCode, Payload}, State=#state{ + socket=Socket, transport=Transport}) -> + Opcode = websocket_opcode(Type), + Len = 2 + iolist_size(Payload), + %% Control packets must not be > 125 in length. + true = Len =< 125, + BinLen = payload_length_to_binary(Len), + Transport:send(Socket, + [<< 1:1, 0:3, Opcode:4, 0:1, BinLen/bits, StatusCode:16 >>, Payload]), + {shutdown, State}; +websocket_send({Type, Payload0}, State=#state{socket=Socket, transport=Transport}) -> + Opcode = websocket_opcode(Type), + {Payload, Rsv, State2} = websocket_deflate_frame(Opcode, iolist_to_binary(Payload0), State), + Len = iolist_size(Payload), + %% Control packets must not be > 125 in length. + true = if Type =:= ping; Type =:= pong -> + Len =< 125; + true -> + true + end, + BinLen = payload_length_to_binary(Len), + {Transport:send(Socket, + [<< 1:1, Rsv/bits, Opcode:4, 0:1, BinLen/bits >>, Payload]), State2}. + +-spec websocket_send_many([frame()], #state{}) + -> {ok, #state{}} | {shutdown, #state{}} | {{error, atom()}, #state{}}. +websocket_send_many([], State) -> + {ok, State}; +websocket_send_many([Frame|Tail], State) -> + case websocket_send(Frame, State) of + {ok, State2} -> websocket_send_many(Tail, State2); + {shutdown, State2} -> {shutdown, State2}; + {Error, State2} -> {Error, State2} + end. + +-spec websocket_close(#state{}, Req, any(), terminate_reason()) + -> {ok, Req, cowboy_middleware:env()} + when Req::cowboy_req:req(). +websocket_close(State=#state{socket=Socket, transport=Transport}, + Req, HandlerState, Reason) -> + case Reason of + {normal, _} -> + Transport:send(Socket, << 1:1, 0:3, 8:4, 0:1, 2:7, 1000:16 >>); + {error, badframe} -> + Transport:send(Socket, << 1:1, 0:3, 8:4, 0:1, 2:7, 1002:16 >>); + {error, badencoding} -> + Transport:send(Socket, << 1:1, 0:3, 8:4, 0:1, 2:7, 1007:16 >>); + {error, handler} -> + Transport:send(Socket, << 1:1, 0:3, 8:4, 0:1, 2:7, 1011:16 >>); + {remote, closed} -> + Transport:send(Socket, << 1:1, 0:3, 8:4, 0:8 >>); + {remote, Code, _} -> + Transport:send(Socket, << 1:1, 0:3, 8:4, 0:1, 2:7, Code:16 >>) + end, + handler_terminate(State, Req, HandlerState, Reason). + +-spec handler_terminate(#state{}, Req, any(), terminate_reason()) + -> {ok, Req, cowboy_middleware:env()} + when Req::cowboy_req:req(). +handler_terminate(#state{env=Env, handler=Handler}, + Req, HandlerState, TerminateReason) -> + try + Handler:websocket_terminate(TerminateReason, Req, HandlerState) + catch Class:Reason -> + erlang:Class([ + {reason, Reason}, + {mfa, {Handler, websocket_terminate, 3}}, + {stacktrace, erlang:get_stacktrace()}, + {req, cowboy_req:to_list(Req)}, + {state, HandlerState}, + {terminate_reason, TerminateReason} + ]) + end, + {ok, Req, [{result, closed}|Env]}. + +-spec payload_length_to_binary(0..16#7fffffffffffffff) + -> << _:7 >> | << _:23 >> | << _:71 >>. +payload_length_to_binary(N) -> + case N of + N when N =< 125 -> << N:7 >>; + N when N =< 16#ffff -> << 126:7, N:16 >>; + N when N =< 16#7fffffffffffffff -> << 127:7, N:64 >> + end. diff --git a/rabbitmq-server/deps/cowboy/src/cowboy_websocket_handler.erl b/rabbitmq-server/deps/cowboy/src/cowboy_websocket_handler.erl new file mode 100644 index 0000000..177e5f6 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/src/cowboy_websocket_handler.erl @@ -0,0 +1,50 @@ +%% Copyright (c) 2011-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_websocket_handler). + +-type opts() :: any(). +-type state() :: any(). +-type terminate_reason() :: {normal, shutdown} + | {normal, timeout} + | {error, closed} + | {remote, closed} + | {remote, cowboy_websocket:close_code(), binary()} + | {error, badencoding} + | {error, badframe} + | {error, atom()}. + +-callback websocket_init(atom(), Req, opts()) + -> {ok, Req, state()} + | {ok, Req, state(), hibernate} + | {ok, Req, state(), timeout()} + | {ok, Req, state(), timeout(), hibernate} + | {shutdown, Req} + when Req::cowboy_req:req(). +-callback websocket_handle({text | binary | ping | pong, binary()}, Req, State) + -> {ok, Req, State} + | {ok, Req, State, hibernate} + | {reply, cowboy_websocket:frame() | [cowboy_websocket:frame()], Req, State} + | {reply, cowboy_websocket:frame() | [cowboy_websocket:frame()], Req, State, hibernate} + | {shutdown, Req, State} + when Req::cowboy_req:req(), State::state(). +-callback websocket_info(any(), Req, State) + -> {ok, Req, State} + | {ok, Req, State, hibernate} + | {reply, cowboy_websocket:frame() | [cowboy_websocket:frame()], Req, State} + | {reply, cowboy_websocket:frame() | [cowboy_websocket:frame()], Req, State, hibernate} + | {shutdown, Req, State} + when Req::cowboy_req:req(), State::state(). +-callback websocket_terminate(terminate_reason(), cowboy_req:req(), state()) + -> ok. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/cover.spec b/rabbitmq-server/deps/cowboy/test/cover.spec similarity index 100% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/cover.spec rename to rabbitmq-server/deps/cowboy/test/cover.spec diff --git a/rabbitmq-server/deps/cowboy/test/cowboy_ct_hook.erl b/rabbitmq-server/deps/cowboy/test/cowboy_ct_hook.erl new file mode 100644 index 0000000..1586412 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/cowboy_ct_hook.erl @@ -0,0 +1,23 @@ +%% Copyright (c) 2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_ct_hook). + +-export([init/2]). + +init(_, _) -> + cowboy_test:start([cowboy, gun]), + cowboy_test:make_certs(), + error_logger:add_report_handler(cowboy_error_h), + {ok, undefined}. diff --git a/rabbitmq-server/deps/cowboy/test/cowboy_error_h.erl b/rabbitmq-server/deps/cowboy/test/cowboy_error_h.erl new file mode 100644 index 0000000..b4ae78f --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/cowboy_error_h.erl @@ -0,0 +1,145 @@ +%% Copyright (c) 2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_error_h). +-behaviour(gen_event). + +%% Public interface. +-export([ignore/3]). + +%% gen_event. +-export([init/1]). +-export([handle_event/2]). +-export([handle_call/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +%% Public interface. + +%% Ignore crashes from Pid occuring in M:F/A. +ignore(M, F, A) -> + gen_event:call(error_logger, ?MODULE, {expect, {self(), M, F, A}}). + +%% gen_event. + +init(_) -> + spawn(fun() -> error_logger:tty(false) end), + {ok, []}. + +%% Ignore supervisor and progress reports. +handle_event({info_report, _, {_, progress, _}}, State) -> + {ok, State}; +handle_event({info_report, _, {_, std_info, _}}, State) -> + {ok, State}; +handle_event({error_report, _, {_, supervisor_report, _}}, State) -> + {ok, State}; +%% Ignore gun retry failures. +handle_event({error_report, _, {_, crash_report, + [[{initial_call, {gun, init, _}}, _, _, + {error_info, {error, gone, _}}|_]|_]}}, + State) -> + {ok, State}; +%% Ignore emulator reports that are a duplicate of what Ranch gives us. +%% +%% The emulator always sends strings for errors, which makes it very +%% difficult to extract the information we need, hence the regexps. +handle_event(Event = {error, GL, {emulator, _, Msg}}, State) + when node(GL) =:= node() -> + Result = re:run(Msg, + "Error in process ([^\s]+).+? with exit value: " + ".+?{stacktrace,\\[{([^,]+),([^,]+),(.+)", + [{capture, all_but_first, list}]), + case Result of + nomatch -> + write_event(Event), + {ok, State}; + {match, [PidStr, MStr, FStr, Rest]} -> + A = case Rest of + "[]" ++ _ -> + 0; + "[" ++ Rest2 -> + count_args(Rest2, 1, 0); + _ -> + {match, [AStr]} = re:run(Rest, "([^,]+).+", + [{capture, all_but_first, list}]), + list_to_integer(AStr) + end, + Crash = {list_to_pid(PidStr), list_to_existing_atom(MStr), + list_to_existing_atom(FStr), A}, + case lists:member(Crash, State) of + true -> + {ok, lists:delete(Crash, State)}; + false -> + write_event(Event), + {ok, State} + end + end; +handle_event(Event = {error, GL, + {_, "Ranch listener" ++ _, [_, _, Pid, {[_, _, + {stacktrace, [{M, F, A, _}|_]}|_], _}]}}, + State) when node(GL) =:= node() -> + A2 = if is_list(A) -> length(A); true -> A end, + Crash = {Pid, M, F, A2}, + case lists:member(Crash, State) of + true -> + {ok, lists:delete(Crash, State)}; + false -> + write_event(Event), + {ok, State} + end; +handle_event(Event = {_, GL, _}, State) when node(GL) =:= node() -> + write_event(Event), + {ok, State}; +handle_event(_, State) -> + {ok, State}. + +handle_call({expect, Crash}, State) -> + {ok, ok, [Crash, Crash|State]}; +handle_call(_, State) -> + {ok, {error, bad_query}, State}. + +handle_info(_, State) -> + {ok, State}. + +terminate(_, _) -> + spawn(fun() -> error_logger:tty(true) end), + ok. + +code_change(_, State, _) -> + {ok, State}. + +%% Internal. + +write_event(Event) -> + error_logger_tty_h:write_event( + {erlang:universaltime(), Event}, + io). + +count_args("]" ++ _, N, 0) -> + N; +count_args("]" ++ Tail, N, Levels) -> + count_args(Tail, N, Levels - 1); +count_args("[" ++ Tail, N, Levels) -> + count_args(Tail, N, Levels + 1); +count_args("}" ++ Tail, N, Levels) -> + count_args(Tail, N, Levels - 1); +count_args("{" ++ Tail, N, Levels) -> + count_args(Tail, N, Levels + 1); +count_args("," ++ Tail, N, Levels = 0) -> + count_args(Tail, N + 1, Levels); +count_args("," ++ Tail, N, Levels) -> + count_args(Tail, N, Levels); +count_args([_|Tail], N, Levels) -> + count_args(Tail, N, Levels). diff --git a/rabbitmq-server/deps/cowboy/test/cowboy_test.erl b/rabbitmq-server/deps/cowboy/test/cowboy_test.erl new file mode 100644 index 0000000..f4a5706 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/cowboy_test.erl @@ -0,0 +1,200 @@ +%% Copyright (c) 2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cowboy_test). +-compile(export_all). + +%% Start and stop applications and their dependencies. + +start(Apps) -> + _ = [do_start(App) || App <- Apps], + ok. + +do_start(App) -> + case application:start(App) of + ok -> + ok; + {error, {not_started, Dep}} -> + do_start(Dep), + do_start(App) + end. + +%% SSL certificate creation and safekeeping. + +make_certs() -> + {_, Cert, Key} = ct_helper:make_certs(), + CertOpts = [{cert, Cert}, {key, Key}], + Pid = spawn(fun() -> receive after infinity -> ok end end), + ?MODULE = ets:new(?MODULE, [ordered_set, public, named_table, + {heir, Pid, undefined}]), + ets:insert(?MODULE, {cert_opts, CertOpts}), + ok. + +get_certs() -> + ets:lookup_element(?MODULE, cert_opts, 2). + +%% Quick configuration value retrieval. + +config(Key, Config) -> + {_, Value} = lists:keyfind(Key, 1, Config), + Value. + +%% Test case description. + +doc(String) -> + ct:comment(String), + ct:log(String). + +%% List of all test cases in the suite. + +all(Suite) -> + lists:usort([F || {F, 1} <- Suite:module_info(exports), + F =/= module_info, + F =/= test, %% This is leftover from the eunit parse_transform... + F =/= all, + F =/= groups, + string:substr(atom_to_list(F), 1, 5) =/= "init_", + string:substr(atom_to_list(F), 1, 4) =/= "end_", + string:substr(atom_to_list(F), 1, 3) =/= "do_" + ]). + +%% Listeners initialization. + +init_http(Ref, ProtoOpts, Config) -> + {ok, _} = cowboy:start_http(Ref, 100, [{port, 0}], [ + {max_keepalive, 50}, + {timeout, 500} + |ProtoOpts]), + Port = ranch:get_port(Ref), + [{type, tcp}, {port, Port}, {opts, []}|Config]. + +init_https(Ref, ProtoOpts, Config) -> + Opts = get_certs(), + {ok, _} = cowboy:start_https(Ref, 100, Opts ++ [{port, 0}], [ + {max_keepalive, 50}, + {timeout, 500} + |ProtoOpts]), + Port = ranch:get_port(Ref), + [{type, ssl}, {port, Port}, {opts, Opts}|Config]. + +init_spdy(Ref, ProtoOpts, Config) -> + Opts = get_certs(), + {ok, _} = cowboy:start_spdy(Ref, 100, Opts ++ [{port, 0}], + ProtoOpts), + Port = ranch:get_port(Ref), + [{type, ssl}, {port, Port}, {opts, Opts}|Config]. + +%% Common group of listeners used by most suites. + +common_all() -> + [ + {group, http}, + {group, https}, + {group, spdy}, + {group, http_compress}, + {group, https_compress}, + {group, spdy_compress} + ]. + +common_groups(Tests) -> + [ + {http, [parallel], Tests}, + {https, [parallel], Tests}, + {spdy, [parallel], Tests}, + {http_compress, [parallel], Tests}, + {https_compress, [parallel], Tests}, + {spdy_compress, [parallel], Tests} + ]. + +init_common_groups(Name = http, Config, Mod) -> + init_http(Name, [ + {env, [{dispatch, Mod:init_dispatch(Config)}]} + ], Config); +init_common_groups(Name = https, Config, Mod) -> + init_https(Name, [ + {env, [{dispatch, Mod:init_dispatch(Config)}]} + ], Config); +init_common_groups(Name = spdy, Config, Mod) -> + init_spdy(Name, [ + {env, [{dispatch, Mod:init_dispatch(Config)}]} + ], Config); +init_common_groups(Name = http_compress, Config, Mod) -> + init_http(Name, [ + {env, [{dispatch, Mod:init_dispatch(Config)}]}, + {compress, true} + ], Config); +init_common_groups(Name = https_compress, Config, Mod) -> + init_https(Name, [ + {env, [{dispatch, Mod:init_dispatch(Config)}]}, + {compress, true} + ], Config); +init_common_groups(Name = spdy_compress, Config, Mod) -> + init_spdy(Name, [ + {env, [{dispatch, Mod:init_dispatch(Config)}]}, + {compress, true} + ], Config). + +%% Support functions for testing using Gun. + +gun_open(Config) -> + gun_open(Config, []). + +gun_open(Config, Opts) -> + {ok, ConnPid} = gun:open("localhost", config(port, Config), + [{retry, 0}, {type, config(type, Config)}|Opts]), + ConnPid. + +gun_monitor_open(Config) -> + gun_monitor_open(Config, []). + +gun_monitor_open(Config, Opts) -> + ConnPid = gun_open(Config, Opts), + {ConnPid, monitor(process, ConnPid)}. + +gun_is_gone(ConnPid, MRef) -> + receive {'DOWN', MRef, process, ConnPid, gone} -> ok + after 500 -> error(timeout) end. + +%% Support functions for testing using a raw socket. + +raw_open(Config) -> + Transport = case config(type, Config) of + tcp -> gen_tcp; + ssl -> ssl + end, + {_, Opts} = lists:keyfind(opts, 1, Config), + {ok, Socket} = Transport:connect("localhost", config(port, Config), + [binary, {active, false}, {packet, raw}, + {reuseaddr, true}, {nodelay, true}|Opts]), + {raw_client, Socket, Transport}. + +raw_send({raw_client, Socket, Transport}, Data) -> + Transport:send(Socket, Data). + +raw_recv_head({raw_client, Socket, Transport}) -> + {ok, Data} = Transport:recv(Socket, 0, 5000), + raw_recv_head(Socket, Transport, Data). + +raw_recv_head(Socket, Transport, Buffer) -> + case binary:match(Buffer, <<"\r\n\r\n">>) of + nomatch -> + {ok, Data} = Transport:recv(Socket, 0, 5000), + raw_recv_head(Socket, Transport, << Buffer/binary, Data/binary >>); + {_, _} -> + Buffer + end. + +raw_expect_recv({raw_client, Socket, Transport}, Expect) -> + {ok, Expect} = Transport:recv(Socket, iolist_size(Expect), 5000), + ok. diff --git a/rabbitmq-server/deps/cowboy/test/eunit_SUITE.erl b/rabbitmq-server/deps/cowboy/test/eunit_SUITE.erl new file mode 100644 index 0000000..1d817c6 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/eunit_SUITE.erl @@ -0,0 +1,22 @@ +%% Copyright (c) 2013-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(eunit_SUITE). +-compile(export_all). + +all() -> + [eunit]. + +eunit(_) -> + ok = eunit:test({application, cowboy}). diff --git a/rabbitmq-server/deps/cowboy/test/handlers/input_crash_h.erl b/rabbitmq-server/deps/cowboy/test/handlers/input_crash_h.erl new file mode 100644 index 0000000..668d053 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/handlers/input_crash_h.erl @@ -0,0 +1,10 @@ +%% This module crashes on request input data +%% depending on the given option. + +-module(input_crash_h). + +-export([init/3]). + +init(_, Req, content_length) -> + cowboy_error_h:ignore(cow_http_hd, number, 2), + cowboy_req:parse_header(<<"content-length">>, Req). diff --git a/rabbitmq-server/deps/cowboy/test/handlers/long_polling_h.erl b/rabbitmq-server/deps/cowboy/test/handlers/long_polling_h.erl new file mode 100644 index 0000000..21f1d4d --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/handlers/long_polling_h.erl @@ -0,0 +1,27 @@ +%% This module implements a loop handler for long-polling. +%% It starts by sending itself a message after 200ms, +%% then sends another after that for a total of 3 messages. +%% When it receives the last message, it sends a 102 reply back. + +-module(long_polling_h). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +init(_, Req, _) -> + erlang:send_after(200, self(), timeout), + {loop, Req, 2, 5000, hibernate}. + +info(timeout, Req, 0) -> + {ok, Req2} = cowboy_req:reply(102, Req), + {ok, Req2, 0}; +info(timeout, Req, Count) -> + erlang:send_after(200, self(), timeout), + {loop, Req, Count - 1, hibernate}. + +terminate({normal, shutdown}, _, 0) -> + ok; +terminate({error, overflow}, _, _) -> + ok. diff --git a/rabbitmq-server/deps/cowboy/test/handlers/loop_handler_body_h.erl b/rabbitmq-server/deps/cowboy/test/handlers/loop_handler_body_h.erl new file mode 100644 index 0000000..db69b02 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/handlers/loop_handler_body_h.erl @@ -0,0 +1,24 @@ +%% This module implements a loop handler that reads +%% the request body after sending itself a message, +%% checks that its size is exactly 100000 bytes, +%% then sends a 200 reply back. + +-module(loop_handler_body_h). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +init(_, Req, _) -> + self() ! timeout, + {loop, Req, undefined, 5000, hibernate}. + +info(timeout, Req, State) -> + {ok, Body, Req2} = cowboy_req:body(Req), + 100000 = byte_size(Body), + {ok, Req3} = cowboy_req:reply(200, Req2), + {ok, Req3, State}. + +terminate({normal, shutdown}, _, _) -> + ok. diff --git a/rabbitmq-server/deps/cowboy/test/handlers/loop_handler_timeout_h.erl b/rabbitmq-server/deps/cowboy/test/handlers/loop_handler_timeout_h.erl new file mode 100644 index 0000000..1125046 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/handlers/loop_handler_timeout_h.erl @@ -0,0 +1,23 @@ +%% This module implements a loop handler that sends +%% itself a timeout that will intentionally arrive +%% too late, as it configures itself to only wait +%% 200ms before closing the connection in init/3. +%% This results in a 204 reply being sent back by Cowboy. + +-module(loop_handler_timeout_h). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +init(_, Req, _) -> + erlang:send_after(1000, self(), timeout), + {loop, Req, undefined, 200, hibernate}. + +info(timeout, Req, State) -> + {ok, Req2} = cowboy_req:reply(500, Req), + {ok, Req2, State}. + +terminate({normal, timeout}, _, _) -> + ok. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE.erl new file mode 100644 index 0000000..af3a453 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE.erl @@ -0,0 +1,1111 @@ +%% Copyright (c) 2011-2014, Loïc Hoguin +%% Copyright (c) 2011, Anthony Ramine +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(http_SUITE). +-compile(export_all). + +-import(cowboy_test, [config/2]). +-import(cowboy_test, [gun_open/1]). +-import(cowboy_test, [gun_monitor_open/1]). +-import(cowboy_test, [gun_monitor_open/2]). +-import(cowboy_test, [gun_is_gone/2]). +-import(cowboy_test, [raw_open/1]). +-import(cowboy_test, [raw_send/2]). +-import(cowboy_test, [raw_recv_head/1]). +-import(cowboy_test, [raw_expect_recv/2]). + +%% ct. + +all() -> + [ + {group, http}, + {group, https}, + {group, http_compress}, + {group, https_compress}, + {group, onrequest}, + {group, onresponse}, + {group, onresponse_capitalize}, + {group, parse_host}, + {group, set_env} + ]. + +groups() -> + Tests = cowboy_test:all(?MODULE) -- [ + onrequest, onrequest_reply, onrequest_hook, + onresponse_crash, onresponse_reply, onresponse_capitalize, + parse_host, set_env_dispatch + ], + [ + {http, [parallel], Tests}, + {https, [parallel], Tests}, + {http_compress, [parallel], Tests}, + {https_compress, [parallel], Tests}, + {onrequest, [parallel], [ + onrequest, + onrequest_reply + ]}, + {onresponse, [parallel], [ + onresponse_crash, + onresponse_reply + ]}, + {onresponse_capitalize, [parallel], [ + onresponse_capitalize + ]}, + {parse_host, [], [ + parse_host + ]}, + {set_env, [], [ + set_env_dispatch + ]} + ]. + +init_per_suite(Config) -> + Dir = config(priv_dir, Config) ++ "/static", + ct_helper:create_static_dir(Dir), + [{static_dir, Dir}|Config]. + +end_per_suite(Config) -> + ct_helper:delete_static_dir(config(static_dir, Config)). + +init_per_group(Name = http, Config) -> + cowboy_test:init_http(Name, [ + {env, [{dispatch, init_dispatch(Config)}]} + ], Config); +init_per_group(Name = https, Config) -> + cowboy_test:init_https(Name, [ + {env, [{dispatch, init_dispatch(Config)}]} + ], Config); +init_per_group(Name = http_compress, Config) -> + cowboy_test:init_http(Name, [ + {env, [{dispatch, init_dispatch(Config)}]}, + {compress, true} + ], Config); +init_per_group(Name = https_compress, Config) -> + cowboy_test:init_https(Name, [ + {env, [{dispatch, init_dispatch(Config)}]}, + {compress, true} + ], Config); +%% Most, if not all of these, should be in separate test suites. +init_per_group(onrequest, Config) -> + {ok, _} = cowboy:start_http(onrequest, 100, [{port, 0}], [ + {env, [{dispatch, init_dispatch(Config)}]}, + {max_keepalive, 50}, + {onrequest, fun do_onrequest_hook/1}, + {timeout, 500} + ]), + Port = ranch:get_port(onrequest), + [{type, tcp}, {port, Port}, {opts, []}|Config]; +init_per_group(onresponse, Config) -> + {ok, _} = cowboy:start_http(onresponse, 100, [{port, 0}], [ + {env, [{dispatch, init_dispatch(Config)}]}, + {max_keepalive, 50}, + {onresponse, fun do_onresponse_hook/4}, + {timeout, 500} + ]), + Port = ranch:get_port(onresponse), + [{type, tcp}, {port, Port}, {opts, []}|Config]; +init_per_group(onresponse_capitalize, Config) -> + {ok, _} = cowboy:start_http(onresponse_capitalize, 100, [{port, 0}], [ + {env, [{dispatch, init_dispatch(Config)}]}, + {max_keepalive, 50}, + {onresponse, fun do_onresponse_capitalize_hook/4}, + {timeout, 500} + ]), + Port = ranch:get_port(onresponse_capitalize), + [{type, tcp}, {port, Port}, {opts, []}|Config]; +init_per_group(parse_host, Config) -> + Dispatch = cowboy_router:compile([ + {'_', [ + {"/req_attr", http_req_attr, []} + ]} + ]), + {ok, _} = cowboy:start_http(parse_host, 100, [{port, 0}], [ + {env, [{dispatch, Dispatch}]}, + {max_keepalive, 50}, + {timeout, 500} + ]), + Port = ranch:get_port(parse_host), + [{type, tcp}, {port, Port}, {opts, []}|Config]; +init_per_group(set_env, Config) -> + {ok, _} = cowboy:start_http(set_env, 100, [{port, 0}], [ + {env, [{dispatch, []}]}, + {max_keepalive, 50}, + {timeout, 500} + ]), + Port = ranch:get_port(set_env), + [{type, tcp}, {port, Port}, {opts, []}|Config]. + +end_per_group(Name, _) -> + ok = cowboy:stop_listener(Name). + +%% Dispatch configuration. + +init_dispatch(Config) -> + cowboy_router:compile([ + {"localhost", [ + {"/chunked_response", http_chunked, []}, + {"/streamed_response", http_streamed, []}, + {"/init_shutdown", http_init_shutdown, []}, + {"/headers/dupe", http_handler, + [{headers, [{<<"connection">>, <<"close">>}]}]}, + {"/set_resp/header", http_set_resp, + [{headers, [{<<"vary">>, <<"Accept">>}]}]}, + {"/set_resp/overwrite", http_set_resp, + [{headers, [{<<"server">>, <<"DesireDrive/1.0">>}]}]}, + {"/set_resp/body", http_set_resp, + [{body, <<"A flameless dance does not equal a cycle">>}]}, + {"/stream_body/set_resp", http_stream_body, + [{reply, set_resp}, {body, <<"stream_body_set_resp">>}]}, + {"/stream_body/set_resp_close", + http_stream_body, [ + {reply, set_resp_close}, + {body, <<"stream_body_set_resp_close">>}]}, + {"/stream_body/set_resp_chunked", + http_stream_body, [ + {reply, set_resp_chunked}, + {body, [<<"stream_body">>, <<"_set_resp_chunked">>]}]}, + {"/static/[...]", cowboy_static, + {dir, config(static_dir, Config)}}, + {"/static_mimetypes_function/[...]", cowboy_static, + {dir, config(static_dir, Config), + [{mimetypes, ?MODULE, do_mimetypes_text_html}]}}, + {"/handler_errors", http_errors, []}, + {"/static_attribute_etag/[...]", cowboy_static, + {dir, config(static_dir, Config)}}, + {"/static_function_etag/[...]", cowboy_static, + {dir, config(static_dir, Config), + [{etag, ?MODULE, do_etag_gen}]}}, + {"/static_specify_file/[...]", cowboy_static, + {file, config(static_dir, Config) ++ "/style.css"}}, + {"/multipart", http_multipart, []}, + {"/multipart/large", http_multipart_stream, []}, + {"/echo/body", http_echo_body, []}, + {"/echo/body_qs", http_body_qs, []}, + {"/crash/content-length", input_crash_h, content_length}, + {"/param_all", rest_param_all, []}, + {"/bad_accept", rest_simple_resource, []}, + {"/bad_content_type", rest_patch_resource, []}, + {"/simple", rest_simple_resource, []}, + {"/forbidden_post", rest_forbidden_resource, [true]}, + {"/simple_post", rest_forbidden_resource, [false]}, + {"/missing_get_callbacks", rest_missing_callbacks, []}, + {"/missing_put_callbacks", rest_missing_callbacks, []}, + {"/nodelete", rest_nodelete_resource, []}, + {"/post_charset", rest_post_charset_resource, []}, + {"/postonly", rest_postonly_resource, []}, + {"/patch", rest_patch_resource, []}, + {"/resetags", rest_resource_etags, []}, + {"/rest_expires", rest_expires, []}, + {"/rest_expires_binary", rest_expires_binary, []}, + {"/rest_empty_resource", rest_empty_resource, []}, + {"/loop_stream_recv", http_loop_stream_recv, []}, + {"/", http_handler, []} + ]} + ]). + +%% Callbacks. + +do_etag_gen(_, _, _) -> + {strong, <<"etag">>}. + +do_mimetypes_text_html(_) -> + <<"text/html">>. + +%% Convenience functions. + +do_raw(Data, Config) -> + Client = raw_open(Config), + ok = raw_send(Client, Data), + case catch raw_recv_head(Client) of + {'EXIT', _} -> closed; + Resp -> element(2, cow_http:parse_status_line(Resp)) + end. + +do_get(Path, Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, Path), + {response, _, Status, _} = gun:await(ConnPid, Ref), + gun:close(ConnPid), + Status. + +%% Tests. + +check_raw_status(Config) -> + Huge = [$0 || _ <- lists:seq(1, 5000)], + HugeCookie = lists:flatten(["whatever_man_biiiiiiiiiiiig_cookie_me_want_77=" + "Wed Apr 06 2011 10:38:52 GMT-0500 (CDT)" || _ <- lists:seq(1, 40)]), + ResponsePacket = +"HTTP/1.0 302 Found\r +Location: http://www.google.co.il/\r +Cache-Control: private\r +Content-Type: text/html; charset=UTF-8\r +Set-Cookie: PREF=ID=568f67013d4a7afa:FF=0:TM=1323014101:LM=1323014101:S=XqctDWC65MzKT0zC; expires=Tue, 03-Dec-2013 15:55:01 GMT; path=/; domain=.google.com\r +Date: Sun, 04 Dec 2011 15:55:01 GMT\r +Server: gws\r +Content-Length: 221\r +X-XSS-Protection: 1; mode=block\r +X-Frame-Options: SAMEORIGIN\r +\r + +302 Moved +

302 Moved

+The document has moved +here. +", + Tests = [ + {200, ["GET / HTTP/1.0\r\nHost: localhost\r\n" + "Set-Cookie: ", HugeCookie, "\r\n\r\n"]}, + {200, "\r\n\r\n\r\n\r\n\r\nGET / HTTP/1.1\r\nHost: localhost\r\n\r\n"}, + {200, "GET http://proxy/ HTTP/1.1\r\nHost: localhost\r\n\r\n"}, + {400, "\n"}, + {400, "Garbage\r\n\r\n"}, + {400, "\r\n\r\n\r\n\r\n\r\n\r\n"}, + {400, "GET / HTTP/1.1\r\nHost: ninenines.eu\r\n\r\n"}, + {400, "GET http://proxy/ HTTP/1.1\r\n\r\n"}, + {400, "GET / HTTP/1.1\r\nHost: localhost:bad_port\r\n\r\n"}, + {400, ["POST /crash/content-length HTTP/1.1\r\nHost: localhost\r\nContent-Length: 5000,5000\r\n\r\n", Huge]}, + {505, ResponsePacket}, + {408, "GET / HTTP/1.1\r\n"}, + {408, "GET / HTTP/1.1\r\nHost: localhost"}, + {408, "GET / HTTP/1.1\r\nHost: localhost\r\n"}, + {408, "GET / HTTP/1.1\r\nHost: localhost\r\n\r"}, + {414, Huge}, + {400, "GET / HTTP/1.1\r\n" ++ Huge}, + {505, "GET / HTTP/1.2\r\nHost: localhost\r\n\r\n"}, + {closed, ""}, + {closed, "\r\n"}, + {closed, "\r\n\r\n"}, + {closed, "GET / HTTP/1.1"} + ], + _ = [{Status, Packet} = begin + Ret = do_raw(Packet, Config), + {Ret, Packet} + end || {Status, Packet} <- Tests], + ok. + +check_status(Config) -> + Tests = [ + {200, "/"}, + {200, "/simple"}, + {400, "/static/%2f"}, + {400, "/static/%2e"}, + {400, "/static/%2e%2e"}, + {403, "/static/directory"}, + {403, "/static/directory/"}, + {403, "/static/unreadable"}, + {404, "/not/found"}, + {404, "/static/not_found"}, + {500, "/handler_errors?case=handle_before_reply"}, + {500, "/handler_errors?case=init_before_reply"}, + {666, "/init_shutdown"} + ], + _ = [{Status, URL} = begin + Ret = do_get(URL, Config), + {Ret, URL} + end || {Status, URL} <- Tests]. + +chunked_response(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/chunked_response"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + true = lists:keymember(<<"transfer-encoding">>, 1, Headers), + {ok, <<"chunked_handler\r\nworks fine!">>} = gun:await_body(ConnPid, Ref), + ok. + +%% Check if sending requests whose size is around the MTU breaks something. +echo_body(Config) -> + MTU = ct_helper:get_loopback_mtu(), + _ = [begin + Body = list_to_binary(lists:duplicate(Size, $a)), + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/echo/body", [], Body), + {response, nofin, 200, _} = gun:await(ConnPid, Ref), + {ok, Body} = gun:await_body(ConnPid, Ref) + end || Size <- lists:seq(MTU - 500, MTU)], + ok. + +%% Check if sending request whose size is bigger than 1000000 bytes causes 413 +echo_body_max_length(Config) -> + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/echo/body", [], << 0:2000000/unit:8 >>), + {response, nofin, 413, _} = gun:await(ConnPid, Ref), + ok. + +% check if body_qs echo's back results +echo_body_qs(Config) -> + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/echo/body_qs", [], <<"echo=67890">>), + {response, nofin, 200, _} = gun:await(ConnPid, Ref), + {ok, <<"67890">>} = gun:await_body(ConnPid, Ref), + ok. + +echo_body_qs_max_length(Config) -> + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/echo/body_qs", [], << "echo=", 0:2000000/unit:8 >>), + {response, nofin, 413, _} = gun:await(ConnPid, Ref), + ok. + +error_chain_handle_after_reply(Config) -> + {ConnPid, MRef} = gun_monitor_open(Config), + Ref1 = gun:get(ConnPid, "/"), + Ref2 = gun:get(ConnPid, "/handler_errors?case=handle_after_reply"), + {response, nofin, 200, _} = gun:await(ConnPid, Ref1, MRef), + {response, nofin, 200, _} = gun:await(ConnPid, Ref2, MRef), + gun_is_gone(ConnPid, MRef). + +error_chain_handle_before_reply(Config) -> + {ConnPid, MRef} = gun_monitor_open(Config), + Ref1 = gun:get(ConnPid, "/"), + Ref2 = gun:get(ConnPid, "/handler_errors?case=handle_before_reply"), + {response, nofin, 200, _} = gun:await(ConnPid, Ref1, MRef), + {response, fin, 500, _} = gun:await(ConnPid, Ref2, MRef), + gun_is_gone(ConnPid, MRef). + +error_handle_after_reply(Config) -> + {ConnPid, MRef} = gun_monitor_open(Config), + Ref = gun:get(ConnPid, "/handler_errors?case=handle_after_reply"), + {response, nofin, 200, _} = gun:await(ConnPid, Ref, MRef), + gun_is_gone(ConnPid, MRef). + +error_init_after_reply(Config) -> + {ConnPid, MRef} = gun_monitor_open(Config), + Ref = gun:get(ConnPid, "/handler_errors?case=init_after_reply"), + {response, nofin, 200, _} = gun:await(ConnPid, Ref, MRef), + gun_is_gone(ConnPid, MRef). + +error_init_reply_handle_error(Config) -> + {ConnPid, MRef} = gun_monitor_open(Config), + Ref = gun:get(ConnPid, "/handler_errors?case=init_reply_handle_error"), + {response, nofin, 200, _} = gun:await(ConnPid, Ref, MRef), + gun_is_gone(ConnPid, MRef). + +headers_dupe(Config) -> + {ConnPid, MRef} = gun_monitor_open(Config), + Ref = gun:get(ConnPid, "/headers/dupe"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref, MRef), + %% Ensure that only one connection header was received. + [<<"close">>] = [V || {Name, V} <- Headers, Name =:= <<"connection">>], + gun_is_gone(ConnPid, MRef). + +http10_chunkless(Config) -> + {ConnPid, MRef} = gun_monitor_open(Config, [{http, [{version, 'HTTP/1.0'}]}]), + Ref = gun:get(ConnPid, "/chunked_response"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref, MRef), + false = lists:keyfind(<<"transfer-encoding">>, 1, Headers), + {ok, <<"chunked_handler\r\nworks fine!">>} = gun:await_body(ConnPid, Ref, MRef), + gun_is_gone(ConnPid, MRef). + +http10_hostless(Config) -> + Name = http10_hostless, + Port10 = config(port, Config) + 10, + Transport = case config(type, Config) of + tcp -> ranch_tcp; + ssl -> ranch_ssl + end, + ranch:start_listener(Name, 5, Transport, + config(opts, Config) ++ [{port, Port10}], + cowboy_protocol, [ + {env, [{dispatch, cowboy_router:compile([ + {'_', [{"/http1.0/hostless", http_handler, []}]}])}]}, + {max_keepalive, 50}, + {timeout, 500}] + ), + 200 = do_raw("GET /http1.0/hostless HTTP/1.0\r\n\r\n", + [{port, Port10}|Config]), + cowboy:stop_listener(http10_hostless). + +http10_keepalive_default(Config) -> + Normal = "GET / HTTP/1.0\r\nhost: localhost\r\n\r\n", + Client = raw_open(Config), + ok = raw_send(Client, Normal), + case catch raw_recv_head(Client) of + {'EXIT', _} -> error(closed); + Data -> + {'HTTP/1.0', 200, _, Rest} = cow_http:parse_status_line(Data), + {Headers, _} = cow_http:parse_headers(Rest), + false = lists:keymember(<<"connection">>, 1, Headers) + end, + ok = raw_send(Client, Normal), + case catch raw_recv_head(Client) of + {'EXIT', _} -> closed; + _ -> error(not_closed) + end. + +http10_keepalive_forced(Config) -> + Keepalive = "GET / HTTP/1.0\r\nhost: localhost\r\nConnection: keep-alive\r\n\r\n", + Client = raw_open(Config), + ok = raw_send(Client, Keepalive), + case catch raw_recv_head(Client) of + {'EXIT', _} -> error(closed); + Data -> + {'HTTP/1.0', 200, _, Rest} = cow_http:parse_status_line(Data), + {Headers, _} = cow_http:parse_headers(Rest), + {_, <<"keep-alive">>} = lists:keyfind(<<"connection">>, 1, Headers) + end, + ok = raw_send(Client, Keepalive), + case catch raw_recv_head(Client) of + {'EXIT', Err} -> error({closed, Err}); + _ -> ok + end. + +keepalive_max(Config) -> + {ConnPid, MRef} = gun_monitor_open(Config), + Refs = [gun:get(ConnPid, "/", [{<<"connection">>, <<"keep-alive">>}]) + || _ <- lists:seq(1, 49)], + CloseRef = gun:get(ConnPid, "/", [{<<"connection">>, <<"keep-alive">>}]), + _ = [begin + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref, MRef), + false = lists:keymember(<<"connection">>, 1, Headers) + end || Ref <- Refs], + {response, nofin, 200, Headers} = gun:await(ConnPid, CloseRef, MRef), + {_, <<"close">>} = lists:keyfind(<<"connection">>, 1, Headers), + gun_is_gone(ConnPid, MRef). + +keepalive_nl(Config) -> + ConnPid = gun_open(Config), + Refs = [begin + Ref = gun:get(ConnPid, "/", [{<<"connection">>, <<"keep-alive">>}]), + gun:dbg_send_raw(ConnPid, <<"\r\n">>), + Ref + end || _ <- lists:seq(1, 10)], + _ = [begin + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + false = lists:keymember(<<"connection">>, 1, Headers) + end || Ref <- Refs], + ok. + +keepalive_stream_loop(Config) -> + ConnPid = gun_open(Config), + Refs = [begin + Ref = gun:post(ConnPid, "/loop_stream_recv", + [{<<"transfer-encoding">>, <<"chunked">>}]), + _ = [gun:data(ConnPid, Ref, nofin, << ID:32 >>) + || ID <- lists:seq(1, 250)], + gun:data(ConnPid, Ref, fin, <<>>), + Ref + end || _ <- lists:seq(1, 10)], + _ = [begin + {response, fin, 200, _} = gun:await(ConnPid, Ref) + end || Ref <- Refs], + ok. + +multipart(Config) -> + ConnPid = gun_open(Config), + Body = << + "This is a preamble." + "\r\n--OHai\r\nX-Name:answer\r\n\r\n42" + "\r\n--OHai\r\nServer:Cowboy\r\n\r\nIt rocks!\r\n" + "\r\n--OHai--\r\n" + "This is an epilogue." + >>, + Ref = gun:post(ConnPid, "/multipart", + [{<<"content-type">>, <<"multipart/x-makes-no-sense; boundary=OHai">>}], + Body), + {response, nofin, 200, _} = gun:await(ConnPid, Ref), + {ok, RespBody} = gun:await_body(ConnPid, Ref), + Parts = binary_to_term(RespBody), + Parts = [ + {[{<<"x-name">>, <<"answer">>}], <<"42">>}, + {[{<<"server">>, <<"Cowboy">>}], <<"It rocks!\r\n">>} + ], + ok. + +multipart_chunked(Config) -> + ConnPid = gun_open(Config), + Body = << + "This is a preamble." + "\r\n--OHai\r\nX-Name:answer\r\n\r\n42" + "\r\n--OHai\r\nServer:Cowboy\r\n\r\nIt rocks!\r\n" + "\r\n--OHai--\r\n" + "This is an epilogue." + >>, + Ref = gun:post(ConnPid, "/multipart", [ + {<<"content-type">>, <<"multipart/x-makes-no-sense; boundary=OHai">>}, + {<<"transfer-encoding">>, <<"chunked">>}]), + gun:data(ConnPid, Ref, fin, Body), + {response, nofin, 200, _} = gun:await(ConnPid, Ref), + {ok, RespBody} = gun:await_body(ConnPid, Ref), + Parts = binary_to_term(RespBody), + Parts = [ + {[{<<"x-name">>, <<"answer">>}], <<"42">>}, + {[{<<"server">>, <<"Cowboy">>}], <<"It rocks!\r\n">>} + ], + ok. + +multipart_large(Config) -> + ConnPid = gun_open(Config), + Boundary = "----------", + Big = << 0:9000000/unit:8 >>, + Bigger = << 0:9999999/unit:8 >>, + Body = ["--", Boundary, "\r\ncontent-length: 9000000\r\n\r\n", Big, "\r\n", + "--", Boundary, "\r\ncontent-length: 9999999\r\n\r\n", Bigger, "\r\n", + "--", Boundary, "--\r\n"], + Ref = gun:post(ConnPid, "/multipart/large", + [{<<"content-type">>, ["multipart/x-large; boundary=", Boundary]}], + Body), + {response, fin, 200, _} = gun:await(ConnPid, Ref), + ok. + +do_nc(Config, Input) -> + Cat = os:find_executable("cat"), + Nc = os:find_executable("nc"), + case {Cat, Nc} of + {false, _} -> + {skip, {notfound, cat}}; + {_, false} -> + {skip, {notfound, nc}}; + _Good -> + %% Throw garbage at the server then check if it's still up. + StrPort = integer_to_list(config(port, Config)), + [os:cmd("cat " ++ Input ++ " | nc localhost " ++ StrPort) + || _ <- lists:seq(1, 100)], + 200 = do_get("/", Config) + end. + +nc_rand(Config) -> + do_nc(Config, "/dev/urandom"). + +nc_zero(Config) -> + do_nc(Config, "/dev/zero"). + +onrequest(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + {<<"server">>, <<"Serenity">>} = lists:keyfind(<<"server">>, 1, Headers), + {ok, <<"http_handler">>} = gun:await_body(ConnPid, Ref), + ok. + +onrequest_reply(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/?reply=1"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + {<<"server">>, <<"Cowboy">>} = lists:keyfind(<<"server">>, 1, Headers), + {ok, <<"replied!">>} = gun:await_body(ConnPid, Ref), + ok. + +%% Hook for the above onrequest tests. +do_onrequest_hook(Req) -> + case cowboy_req:qs_val(<<"reply">>, Req) of + {undefined, Req2} -> + cowboy_req:set_resp_header(<<"server">>, <<"Serenity">>, Req2); + {_, Req2} -> + {ok, Req3} = cowboy_req:reply( + 200, [], <<"replied!">>, Req2), + Req3 + end. + +onresponse_capitalize(Config) -> + Client = raw_open(Config), + ok = raw_send(Client, "GET / HTTP/1.1\r\nHost: localhost\r\n\r\n"), + Data = raw_recv_head(Client), + false = nomatch =:= binary:match(Data, <<"Content-Length">>), + ok. + +%% Hook for the above onresponse_capitalize test. +do_onresponse_capitalize_hook(Status, Headers, Body, Req) -> + Headers2 = [{cowboy_bstr:capitalize_token(N), V} + || {N, V} <- Headers], + {ok, Req2} = cowboy_req:reply(Status, Headers2, Body, Req), + Req2. + +onresponse_crash(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/handler_errors?case=init_before_reply"), + {response, fin, 777, Headers} = gun:await(ConnPid, Ref), + {<<"x-hook">>, <<"onresponse">>} = lists:keyfind(<<"x-hook">>, 1, Headers). + +onresponse_reply(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/"), + {response, nofin, 777, Headers} = gun:await(ConnPid, Ref), + {<<"x-hook">>, <<"onresponse">>} = lists:keyfind(<<"x-hook">>, 1, Headers), + ok. + +%% Hook for the above onresponse tests. +do_onresponse_hook(_, Headers, _, Req) -> + {ok, Req2} = cowboy_req:reply( + <<"777 Lucky">>, [{<<"x-hook">>, <<"onresponse">>}|Headers], Req), + Req2. + +parse_host(Config) -> + ConnPid = gun_open(Config), + Tests = [ + {<<"example.org:8080">>, <<"example.org\n8080">>}, + {<<"example.org">>, <<"example.org\n80">>}, + {<<"192.0.2.1:8080">>, <<"192.0.2.1\n8080">>}, + {<<"192.0.2.1">>, <<"192.0.2.1\n80">>}, + {<<"[2001:db8::1]:8080">>, <<"[2001:db8::1]\n8080">>}, + {<<"[2001:db8::1]">>, <<"[2001:db8::1]\n80">>}, + {<<"[::ffff:192.0.2.1]:8080">>, <<"[::ffff:192.0.2.1]\n8080">>}, + {<<"[::ffff:192.0.2.1]">>, <<"[::ffff:192.0.2.1]\n80">>} + ], + [begin + Ref = gun:get(ConnPid, "/req_attr?attr=host_and_port", + [{<<"host">>, Host}]), + {response, nofin, 200, _} = gun:await(ConnPid, Ref), + {ok, Body} = gun:await_body(ConnPid, Ref) + end || {Host, Body} <- Tests], + ok. + +pipeline(Config) -> + ConnPid = gun_open(Config), + Refs = [gun:get(ConnPid, "/") || _ <- lists:seq(1, 5)], + _ = [{response, nofin, 200, _} = gun:await(ConnPid, Ref) || Ref <- Refs], + ok. + +rest_param_all(Config) -> + ConnPid = gun_open(Config), + %% Accept without param. + Ref1 = gun:get(ConnPid, "/param_all", + [{<<"accept">>, <<"text/plain">>}]), + {response, nofin, 200, _} = gun:await(ConnPid, Ref1), + {ok, <<"[]">>} = gun:await_body(ConnPid, Ref1), + %% Accept with param. + Ref2 = gun:get(ConnPid, "/param_all", + [{<<"accept">>, <<"text/plain;level=1">>}]), + {response, nofin, 200, _} = gun:await(ConnPid, Ref2), + {ok, <<"level=1">>} = gun:await_body(ConnPid, Ref2), + %% Accept with param and quality. + Ref3 = gun:get(ConnPid, "/param_all", + [{<<"accept">>, <<"text/plain;level=1;q=0.8, text/plain;level=2;q=0.5">>}]), + {response, nofin, 200, _} = gun:await(ConnPid, Ref3), + {ok, <<"level=1">>} = gun:await_body(ConnPid, Ref3), + Ref4 = gun:get(ConnPid, "/param_all", + [{<<"accept">>, <<"text/plain;level=1;q=0.5, text/plain;level=2;q=0.8">>}]), + {response, nofin, 200, _} = gun:await(ConnPid, Ref4), + {ok, <<"level=2">>} = gun:await_body(ConnPid, Ref4), + %% Without Accept. + Ref5 = gun:get(ConnPid, "/param_all"), + {response, nofin, 200, _} = gun:await(ConnPid, Ref5), + {ok, <<"'*'">>} = gun:await_body(ConnPid, Ref5), + %% Content-Type without param. + Ref6 = gun:put(ConnPid, "/param_all", + [{<<"content-type">>, <<"text/plain">>}]), + {response, fin, 204, _} = gun:await(ConnPid, Ref6), + %% Content-Type with param. + Ref7 = gun:put(ConnPid, "/param_all", + [{<<"content-type">>, <<"text/plain; charset=utf-8">>}]), + {response, fin, 204, _} = gun:await(ConnPid, Ref7), + ok. + +rest_bad_accept(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/bad_accept", + [{<<"accept">>, <<"1">>}]), + {response, fin, 400, _} = gun:await(ConnPid, Ref), + ok. + +rest_bad_content_type(Config) -> + ConnPid = gun_open(Config), + Ref = gun:patch(ConnPid, "/bad_content_type", + [{<<"content-type">>, <<"text/plain, text/html">>}], <<"Whatever">>), + {response, fin, 415, _} = gun:await(ConnPid, Ref), + ok. + +rest_expires(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/rest_expires"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + {_, Expires} = lists:keyfind(<<"expires">>, 1, Headers), + {_, LastModified} = lists:keyfind(<<"last-modified">>, 1, Headers), + Expires = LastModified = <<"Fri, 21 Sep 2012 22:36:14 GMT">>, + ok. + +rest_expires_binary(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/rest_expires_binary"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + {_, <<"0">>} = lists:keyfind(<<"expires">>, 1, Headers), + ok. + +rest_keepalive(Config) -> + ConnPid = gun_open(Config), + Refs = [gun:get(ConnPid, "/simple") || _ <- lists:seq(1, 10)], + _ = [begin + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + false = lists:keymember(<<"connection">>, 1, Headers) + end || Ref <- Refs], + ok. + +rest_keepalive_post(Config) -> + ConnPid = gun_open(Config), + Refs = [{ + gun:post(ConnPid, "/forbidden_post", + [{<<"content-type">>, <<"text/plain">>}]), + gun:post(ConnPid, "/simple_post", + [{<<"content-type">>, <<"text/plain">>}]) + } || _ <- lists:seq(1, 5)], + _ = [begin + {response, fin, 403, Headers1} = gun:await(ConnPid, Ref1), + false = lists:keymember(<<"connection">>, 1, Headers1), + {response, fin, 303, Headers2} = gun:await(ConnPid, Ref2), + false = lists:keymember(<<"connection">>, 1, Headers2) + end || {Ref1, Ref2} <- Refs], + ok. + +rest_missing_get_callbacks(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/missing_get_callbacks"), + {response, fin, 500, _} = gun:await(ConnPid, Ref), + ok. + +rest_missing_put_callbacks(Config) -> + ConnPid = gun_open(Config), + Ref = gun:put(ConnPid, "/missing_put_callbacks", + [{<<"content-type">>, <<"application/json">>}], <<"{}">>), + {response, fin, 500, _} = gun:await(ConnPid, Ref), + ok. + +rest_nodelete(Config) -> + ConnPid = gun_open(Config), + Ref = gun:delete(ConnPid, "/nodelete"), + {response, fin, 500, _} = gun:await(ConnPid, Ref), + ok. + +rest_options_default(Config) -> + ConnPid = gun_open(Config), + Ref = gun:options(ConnPid, "/rest_empty_resource"), + {response, fin, 200, Headers} = gun:await(ConnPid, Ref), + {_, <<"HEAD, GET, OPTIONS">>} = lists:keyfind(<<"allow">>, 1, Headers), + ok. + +rest_patch(Config) -> + Tests = [ + {204, [{<<"content-type">>, <<"text/plain">>}], <<"whatever">>}, + {400, [{<<"content-type">>, <<"text/plain">>}], <<"false">>}, + {400, [{<<"content-type">>, <<"text/plain">>}], <<"halt">>}, + {415, [{<<"content-type">>, <<"application/json">>}], <<"bad_content_type">>} + ], + ConnPid = gun_open(Config), + _ = [begin + Ref = gun:patch(ConnPid, "/patch", Headers, Body), + {response, fin, Status, _} = gun:await(ConnPid, Ref) + end || {Status, Headers, Body} <- Tests], + ok. + +rest_post_charset(Config) -> + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/post_charset", + [{<<"content-type">>, <<"text/plain;charset=UTF-8">>}], "12345"), + {response, fin, 204, _} = gun:await(ConnPid, Ref), + ok. + +rest_postonly(Config) -> + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/postonly", + [{<<"content-type">>, <<"text/plain">>}], "12345"), + {response, fin, 204, _} = gun:await(ConnPid, Ref), + ok. + +rest_resource_get_etag(Config, Type) -> + rest_resource_get_etag(Config, Type, []). + +rest_resource_get_etag(Config, Type, Headers) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/resetags?type=" ++ Type, Headers), + {response, _, Status, RespHeaders} = gun:await(ConnPid, Ref), + case lists:keyfind(<<"etag">>, 1, RespHeaders) of + false -> {Status, false}; + {<<"etag">>, ETag} -> {Status, ETag} + end. + +rest_resource_etags(Config) -> + Tests = [ + {200, <<"W/\"etag-header-value\"">>, "tuple-weak"}, + {200, <<"\"etag-header-value\"">>, "tuple-strong"}, + {200, <<"W/\"etag-header-value\"">>, "binary-weak-quoted"}, + {200, <<"\"etag-header-value\"">>, "binary-strong-quoted"}, + {500, false, "binary-strong-unquoted"}, + {500, false, "binary-weak-unquoted"} + ], + _ = [{Status, ETag, Type} = begin + {Ret, RespETag} = rest_resource_get_etag(Config, Type), + {Ret, RespETag, Type} + end || {Status, ETag, Type} <- Tests]. + +rest_resource_etags_if_none_match(Config) -> + Tests = [ + {304, <<"W/\"etag-header-value\"">>, "tuple-weak"}, + {304, <<"\"etag-header-value\"">>, "tuple-strong"}, + {304, <<"W/\"etag-header-value\"">>, "binary-weak-quoted"}, + {304, <<"\"etag-header-value\"">>, "binary-strong-quoted"} + ], + _ = [{Status, Type} = begin + {Ret, _} = rest_resource_get_etag(Config, Type, + [{<<"if-none-match">>, ETag}]), + {Ret, Type} + end || {Status, ETag, Type} <- Tests]. + +set_env_dispatch(Config) -> + ConnPid1 = gun_open(Config), + Ref1 = gun:get(ConnPid1, "/"), + {response, fin, 400, _} = gun:await(ConnPid1, Ref1), + ok = cowboy:set_env(set_env, dispatch, + cowboy_router:compile([{'_', [{"/", http_handler, []}]}])), + ConnPid2 = gun_open(Config), + Ref2 = gun:get(ConnPid2, "/"), + {response, nofin, 200, _} = gun:await(ConnPid2, Ref2), + ok. + +set_resp_body(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/set_resp/body"), + {response, nofin, 200, _} = gun:await(ConnPid, Ref), + {ok, <<"A flameless dance does not equal a cycle">>} + = gun:await_body(ConnPid, Ref), + ok. + +set_resp_header(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/set_resp/header"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + {_, <<"Accept">>} = lists:keyfind(<<"vary">>, 1, Headers), + {_, _} = lists:keyfind(<<"set-cookie">>, 1, Headers), + ok. + +set_resp_overwrite(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/set_resp/overwrite"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + {_, <<"DesireDrive/1.0">>} = lists:keyfind(<<"server">>, 1, Headers), + ok. + +slowloris(Config) -> + Client = raw_open(Config), + try + [begin + ok = raw_send(Client, [C]), + receive after 25 -> ok end + end || C <- "GET / HTTP/1.1\r\nHost: localhost\r\n" + "User-Agent: Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US)\r\n" + "Cookie: name=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\r\n\r\n"], + error(failure) + catch error:{badmatch, _} -> + ok + end. + +slowloris2(Config) -> + Client = raw_open(Config), + ok = raw_send(Client, "GET / HTTP/1.1\r\n"), + receive after 300 -> ok end, + ok = raw_send(Client, "Host: localhost\r\n"), + receive after 300 -> ok end, + Data = raw_recv_head(Client), + {_, 408, _, _} = cow_http:parse_status_line(Data), + ok. + +static_attribute_etag(Config) -> + ConnPid = gun_open(Config), + Ref1 = gun:get(ConnPid, "/static_attribute_etag/index.html"), + Ref2 = gun:get(ConnPid, "/static_attribute_etag/index.html"), + {response, nofin, 200, Headers1} = gun:await(ConnPid, Ref1), + {response, nofin, 200, Headers2} = gun:await(ConnPid, Ref2), + {_, ETag} = lists:keyfind(<<"etag">>, 1, Headers1), + {_, ETag} = lists:keyfind(<<"etag">>, 1, Headers2), + true = ETag =/= undefined, + ok. + +static_function_etag(Config) -> + ConnPid = gun_open(Config), + Ref1 = gun:get(ConnPid, "/static_function_etag/index.html"), + Ref2 = gun:get(ConnPid, "/static_function_etag/index.html"), + {response, nofin, 200, Headers1} = gun:await(ConnPid, Ref1), + {response, nofin, 200, Headers2} = gun:await(ConnPid, Ref2), + {_, ETag} = lists:keyfind(<<"etag">>, 1, Headers1), + {_, ETag} = lists:keyfind(<<"etag">>, 1, Headers2), + true = ETag =/= undefined, + ok. + +static_mimetypes_function(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/static_mimetypes_function/index.html"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + {_, <<"text/html">>} = lists:keyfind(<<"content-type">>, 1, Headers), + ok. + +static_specify_file(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/static_specify_file"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + {_, <<"text/css">>} = lists:keyfind(<<"content-type">>, 1, Headers), + {ok, <<"body{color:red}\n">>} = gun:await_body(ConnPid, Ref), + ok. + +static_specify_file_catchall(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/static_specify_file/none"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + {_, <<"text/css">>} = lists:keyfind(<<"content-type">>, 1, Headers), + {ok, <<"body{color:red}\n">>} = gun:await_body(ConnPid, Ref), + ok. + +static_test_file(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/static/unknown"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + {_, <<"application/octet-stream">>} = lists:keyfind(<<"content-type">>, 1, Headers), + ok. + +static_test_file_css(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/static/style.css"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + {_, <<"text/css">>} = lists:keyfind(<<"content-type">>, 1, Headers), + ok. + +stream_body_set_resp(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/stream_body/set_resp"), + {response, nofin, 200, _} = gun:await(ConnPid, Ref), + {ok, <<"stream_body_set_resp">>} = gun:await_body(ConnPid, Ref), + ok. + +stream_body_set_resp_close(Config) -> + {ConnPid, MRef} = gun_monitor_open(Config), + Ref = gun:get(ConnPid, "/stream_body/set_resp_close"), + {response, nofin, 200, _} = gun:await(ConnPid, Ref, MRef), + {ok, <<"stream_body_set_resp_close">>} = gun:await_body(ConnPid, Ref, MRef), + gun_is_gone(ConnPid, MRef). + +stream_body_set_resp_chunked(Config) -> + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/stream_body/set_resp_chunked"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref), + {_, <<"chunked">>} = lists:keyfind(<<"transfer-encoding">>, 1, Headers), + {ok, <<"stream_body_set_resp_chunked">>} = gun:await_body(ConnPid, Ref), + ok. + +stream_body_set_resp_chunked10(Config) -> + {ConnPid, MRef} = gun_monitor_open(Config, [{http, [{version, 'HTTP/1.0'}]}]), + Ref = gun:get(ConnPid, "/stream_body/set_resp_chunked"), + {response, nofin, 200, Headers} = gun:await(ConnPid, Ref, MRef), + false = lists:keyfind(<<"transfer-encoding">>, 1, Headers), + {ok, <<"stream_body_set_resp_chunked">>} = gun:await_body(ConnPid, Ref, MRef), + gun_is_gone(ConnPid, MRef). + +%% Undocumented hack: force chunked response to be streamed as HTTP/1.1. +streamed_response(Config) -> + Client = raw_open(Config), + ok = raw_send(Client, "GET /streamed_response HTTP/1.1\r\nHost: localhost\r\n\r\n"), + Data = raw_recv_head(Client), + {'HTTP/1.1', 200, _, Rest} = cow_http:parse_status_line(Data), + {Headers, Rest2} = cow_http:parse_headers(Rest), + false = lists:keymember(<<"transfer-encoding">>, 1, Headers), + Rest2Size = byte_size(Rest2), + ok = case <<"streamed_handler\r\nworks fine!">> of + Rest2 -> ok; + << Rest2:Rest2Size/binary, Expect/bits >> -> raw_expect_recv(Client, Expect) + end. + +te_chunked(Config) -> + Body = list_to_binary(io_lib:format("~p", [lists:seq(1, 100)])), + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/echo/body", + [{<<"transfer-encoding">>, <<"chunked">>}], Body), + {response, nofin, 200, _} = gun:await(ConnPid, Ref), + {ok, Body} = gun:await_body(ConnPid, Ref), + ok. + +do_body_to_chunks(_, <<>>, Acc) -> + lists:reverse([<<"0\r\n\r\n">>|Acc]); +do_body_to_chunks(ChunkSize, Body, Acc) -> + BodySize = byte_size(Body), + ChunkSize2 = case BodySize < ChunkSize of + true -> BodySize; + false -> ChunkSize + end, + << Chunk:ChunkSize2/binary, Rest/binary >> = Body, + ChunkSizeBin = list_to_binary(integer_to_list(ChunkSize2, 16)), + do_body_to_chunks(ChunkSize, Rest, + [<< ChunkSizeBin/binary, "\r\n", Chunk/binary, "\r\n" >>|Acc]). + +te_chunked_chopped(Config) -> + Body = list_to_binary(io_lib:format("~p", [lists:seq(1, 100)])), + Body2 = iolist_to_binary(do_body_to_chunks(50, Body, [])), + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/echo/body", + [{<<"transfer-encoding">>, <<"chunked">>}]), + _ = [begin + ok = gun:dbg_send_raw(ConnPid, << C >>), + receive after 10 -> ok end + end || << C >> <= Body2], + {response, nofin, 200, _} = gun:await(ConnPid, Ref), + {ok, Body} = gun:await_body(ConnPid, Ref), + ok. + +te_chunked_delayed(Config) -> + Body = list_to_binary(io_lib:format("~p", [lists:seq(1, 100)])), + Chunks = do_body_to_chunks(50, Body, []), + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/echo/body", + [{<<"transfer-encoding">>, <<"chunked">>}]), + _ = [begin + ok = gun:dbg_send_raw(ConnPid, Chunk), + receive after 10 -> ok end + end || Chunk <- Chunks], + {response, nofin, 200, _} = gun:await(ConnPid, Ref), + {ok, Body} = gun:await_body(ConnPid, Ref), + ok. + +te_chunked_split_body(Config) -> + Body = list_to_binary(io_lib:format("~p", [lists:seq(1, 100)])), + Chunks = do_body_to_chunks(50, Body, []), + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/echo/body", + [{<<"transfer-encoding">>, <<"chunked">>}]), + _ = [begin + case Chunk of + <<"0\r\n\r\n">> -> + ok = gun:dbg_send_raw(ConnPid, Chunk); + _ -> + [Size, ChunkBody, <<>>] = + binary:split(Chunk, [<<"\r\n">>], [global]), + PartASize = random:uniform(byte_size(ChunkBody)), + <> = ChunkBody, + ok = gun:dbg_send_raw(ConnPid, [Size, <<"\r\n">>, PartA]), + receive after 10 -> ok end, + ok = gun:dbg_send_raw(ConnPid, [PartB, <<"\r\n">>]) + end + end || Chunk <- Chunks], + {response, nofin, 200, _} = gun:await(ConnPid, Ref), + {ok, Body} = gun:await_body(ConnPid, Ref), + ok. + +te_chunked_split_crlf(Config) -> + Body = list_to_binary(io_lib:format("~p", [lists:seq(1, 100)])), + Chunks = do_body_to_chunks(50, Body, []), + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/echo/body", + [{<<"transfer-encoding">>, <<"chunked">>}]), + _ = [begin + %% Split in the newline just before the end of the chunk. + Len = byte_size(Chunk) - (random:uniform(2) - 1), + << Chunk2:Len/binary, End/binary >> = Chunk, + ok = gun:dbg_send_raw(ConnPid, Chunk2), + receive after 10 -> ok end, + ok = gun:dbg_send_raw(ConnPid, End) + end || Chunk <- Chunks], + {response, nofin, 200, _} = gun:await(ConnPid, Ref), + {ok, Body} = gun:await_body(ConnPid, Ref), + ok. + +te_identity(Config) -> + Body = list_to_binary(io_lib:format("~p", [lists:seq(1, 100)])), + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/echo/body", [], Body), + {response, nofin, 200, _} = gun:await(ConnPid, Ref), + {ok, Body} = gun:await_body(ConnPid, Ref), + ok. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_body_qs.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_body_qs.erl new file mode 100644 index 0000000..8a438e6 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_body_qs.erl @@ -0,0 +1,40 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(http_body_qs). +-behaviour(cowboy_http_handler). +-export([init/3, handle/2, terminate/3]). + +init({_, http}, Req, _) -> + {ok, Req, undefined}. + +handle(Req, State) -> + {Method, Req2} = cowboy_req:method(Req), + HasBody = cowboy_req:has_body(Req2), + {ok, Req3} = maybe_echo(Method, HasBody, Req2), + {ok, Req3, State}. + +maybe_echo(<<"POST">>, true, Req) -> + case cowboy_req:body_qs(Req) of + {badlength, Req2} -> + echo(badlength, Req2); + {ok, PostVals, Req2} -> + echo(proplists:get_value(<<"echo">>, PostVals), Req2) + end; + +maybe_echo(<<"POST">>, false, Req) -> + cowboy_req:reply(400, [], <<"Missing body.">>, Req); +maybe_echo(_, _, Req) -> + %% Method not allowed. + cowboy_req:reply(405, Req). + +echo(badlength, Req) -> + cowboy_req:reply(413, [], <<"POST body bigger than 16000 bytes">>, Req); +echo(undefined, Req) -> + cowboy_req:reply(400, [], <<"Missing echo parameter.">>, Req); +echo(Echo, Req) -> + cowboy_req:reply(200, [ + {<<"content-type">>, <<"text/plain; charset=utf-8">>} + ], Echo, Req). + +terminate(_, _, _) -> + ok. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_chunked.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_chunked.erl new file mode 100644 index 0000000..447c0f6 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_chunked.erl @@ -0,0 +1,19 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(http_chunked). +-behaviour(cowboy_http_handler). +-export([init/3, handle/2, terminate/3]). + +init({_Transport, http}, Req, _Opts) -> + {ok, Req, undefined}. + +handle(Req, State) -> + {ok, Req2} = cowboy_req:chunked_reply(200, Req), + timer:sleep(100), + cowboy_req:chunk("chunked_handler\r\n", Req2), + timer:sleep(100), + cowboy_req:chunk("works fine!", Req2), + {ok, Req2, State}. + +terminate(_, _, _) -> + ok. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_echo_body.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_echo_body.erl new file mode 100644 index 0000000..3334b95 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_echo_body.erl @@ -0,0 +1,29 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(http_echo_body). +-behaviour(cowboy_http_handler). +-export([init/3, handle/2, terminate/3]). + +init({_, http}, Req, _) -> + {ok, Req, undefined}. + +handle(Req, State) -> + true = cowboy_req:has_body(Req), + {ok, Req3} = case cowboy_req:body(Req, [{length, 1000000}]) of + {ok, Body, Req2} -> handle_body(Req2, Body); + {more, _, Req2} -> handle_badlength(Req2) + end, + {ok, Req3, State}. + +handle_badlength(Req) -> + {ok, Req2} = cowboy_req:reply(413, [], <<"Request entity too large">>, Req), + {ok, Req2}. + +handle_body(Req, Body) -> + {Size, Req2} = cowboy_req:body_length(Req), + Size = byte_size(Body), + {ok, Req3} = cowboy_req:reply(200, [], Body, Req2), + {ok, Req3}. + +terminate(_, _, _) -> + ok. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_errors.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_errors.erl similarity index 56% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_errors.erl rename to rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_errors.erl index 1c23207..35ac3bd 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_errors.erl +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_errors.erl @@ -1,40 +1,38 @@ %% Feel free to use, reuse and abuse the code in this file. --module(http_handler_errors). +-module(http_errors). -behaviour(cowboy_http_handler). --export([init/3, handle/2, terminate/2]). +-export([init/3, handle/2, terminate/3]). init({_Transport, http}, Req, _Opts) -> - {Case, Req1} = cowboy_http_req:qs_val(<<"case">>, Req), + {Case, Req1} = cowboy_req:qs_val(<<"case">>, Req), case_init(Case, Req1). case_init(<<"init_before_reply">> = Case, _Req) -> + cowboy_error_h:ignore(?MODULE, case_init, 2), erlang:error(Case); - case_init(<<"init_after_reply">> = Case, Req) -> - {ok, _Req1} = cowboy_http_req:reply(200, [], "http_handler_crashes", Req), + cowboy_error_h:ignore(?MODULE, case_init, 2), + {ok, _Req1} = cowboy_req:reply(200, [], "http_handler_crashes", Req), erlang:error(Case); - case_init(<<"init_reply_handle_error">> = Case, Req) -> - {ok, Req1} = cowboy_http_req:reply(200, [], "http_handler_crashes", Req), + {ok, Req1} = cowboy_req:reply(200, [], "http_handler_crashes", Req), {ok, Req1, Case}; - case_init(<<"handle_before_reply">> = Case, Req) -> {ok, Req, Case}; - case_init(<<"handle_after_reply">> = Case, Req) -> {ok, Req, Case}. - handle(_Req, <<"init_reply_handle_error">> = Case) -> + cowboy_error_h:ignore(?MODULE, handle, 2), erlang:error(Case); - handle(_Req, <<"handle_before_reply">> = Case) -> + cowboy_error_h:ignore(?MODULE, handle, 2), erlang:error(Case); - handle(Req, <<"handle_after_reply">> = Case) -> - {ok, _Req1} = cowboy_http_req:reply(200, [], "http_handler_crashes", Req), + cowboy_error_h:ignore(?MODULE, handle, 2), + {ok, _Req1} = cowboy_req:reply(200, [], "http_handler_crashes", Req), erlang:error(Case). -terminate(_Req, _State) -> +terminate(_, _, _) -> ok. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_handler.erl similarity index 76% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler.erl rename to rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_handler.erl index 76a85d4..e1f1665 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler.erl +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_handler.erl @@ -2,7 +2,7 @@ -module(http_handler). -behaviour(cowboy_http_handler). --export([init/3, handle/2, terminate/2]). +-export([init/3, handle/2, terminate/3]). -record(state, {headers, body}). @@ -12,8 +12,8 @@ init({_Transport, http}, Req, Opts) -> {ok, Req, #state{headers=Headers, body=Body}}. handle(Req, State=#state{headers=Headers, body=Body}) -> - {ok, Req2} = cowboy_http_req:reply(200, Headers, Body, Req), + {ok, Req2} = cowboy_req:reply(200, Headers, Body, Req), {ok, Req2, State}. -terminate(_Req, _State) -> +terminate(_, _, _) -> ok. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_init_shutdown.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_init_shutdown.erl new file mode 100644 index 0000000..1445569 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_init_shutdown.erl @@ -0,0 +1,17 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(http_init_shutdown). +-behaviour(cowboy_http_handler). +-export([init/3, handle/2, terminate/3]). + +init({_Transport, http}, Req, _Opts) -> + {ok, Req2} = cowboy_req:reply(<<"666 Init Shutdown Testing">>, + [{<<"connection">>, <<"close">>}], Req), + {shutdown, Req2, undefined}. + +handle(Req, State) -> + {ok, Req2} = cowboy_req:reply(200, [], "Hello world!", Req), + {ok, Req2, State}. + +terminate(_, _, _) -> + ok. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_loop_stream_recv.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_loop_stream_recv.erl new file mode 100644 index 0000000..ce0d1da --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_loop_stream_recv.erl @@ -0,0 +1,34 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(http_loop_stream_recv). +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +init({_, http}, Req, _) -> + receive after 100 -> ok end, + self() ! stream, + {loop, Req, undefined, 100}. + +info(stream, Req, undefined) -> + stream(Req, 1, <<>>). + +stream(Req, ID, Acc) -> + case cowboy_req:body(Req) of + {ok, <<>>, Req2} -> + {ok, Req3} = cowboy_req:reply(200, Req2), + {ok, Req3, undefined}; + {_, Data, Req2} -> + parse_id(Req2, ID, << Acc/binary, Data/binary >>) + end. + +parse_id(Req, ID, Data) -> + case Data of + << ID:32, Rest/bits >> -> + parse_id(Req, ID + 1, Rest); + _ -> + stream(Req, ID, Data) + end. + +terminate({normal, shutdown}, _, _) -> + ok. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_multipart.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_multipart.erl new file mode 100644 index 0000000..79bfeb8 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_multipart.erl @@ -0,0 +1,25 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(http_multipart). +-behaviour(cowboy_http_handler). +-export([init/3, handle/2, terminate/3]). + +init({_Transport, http}, Req, []) -> + {ok, Req, {}}. + +handle(Req, State) -> + {Result, Req2} = acc_multipart(Req, []), + {ok, Req3} = cowboy_req:reply(200, [], term_to_binary(Result), Req2), + {ok, Req3, State}. + +terminate(_, _, _) -> + ok. + +acc_multipart(Req, Acc) -> + case cowboy_req:part(Req) of + {ok, Headers, Req2} -> + {ok, Body, Req3} = cowboy_req:part_body(Req2), + acc_multipart(Req3, [{Headers, Body}|Acc]); + {done, Req2} -> + {lists:reverse(Acc), Req2} + end. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_multipart_stream.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_multipart_stream.erl new file mode 100644 index 0000000..926d150 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_multipart_stream.erl @@ -0,0 +1,34 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(http_multipart_stream). +-behaviour(cowboy_http_handler). +-export([init/3, handle/2, terminate/3]). + +init(_, Req, []) -> + {ok, Req, undefined}. + +handle(Req, State) -> + Req2 = multipart(Req), + {ok, Req3} = cowboy_req:reply(200, Req2), + {ok, Req3, State}. + +terminate(_, _, _) -> + ok. + +multipart(Req) -> + case cowboy_req:part(Req) of + {ok, [{<<"content-length">>, BinLength}], Req2} -> + Length = list_to_integer(binary_to_list(BinLength)), + {Length, Req3} = stream_body(Req2, 0), + multipart(Req3); + {done, Req2} -> + Req2 + end. + +stream_body(Req, N) -> + case cowboy_req:part_body(Req) of + {ok, Data, Req2} -> + {N + byte_size(Data), Req2}; + {more, Data, Req2} -> + stream_body(Req2, N + byte_size(Data)) + end. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_req_attr.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_req_attr.erl new file mode 100644 index 0000000..eb5e70e --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_req_attr.erl @@ -0,0 +1,19 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(http_req_attr). +-behaviour(cowboy_http_handler). +-export([init/3, handle/2, terminate/3]). + +init({_, http}, Req, _) -> + {Attr, Req2} = cowboy_req:qs_val(<<"attr">>, Req), + {ok, Req2, Attr}. + +handle(Req, <<"host_and_port">> = Attr) -> + {Host, Req2} = cowboy_req:host(Req), + {Port, Req3} = cowboy_req:port(Req2), + Value = [Host, "\n", integer_to_list(Port)], + {ok, Req4} = cowboy_req:reply(200, [], Value, Req3), + {ok, Req4, Attr}. + +terminate(_, _, _) -> + ok. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_set_resp.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_set_resp.erl new file mode 100644 index 0000000..821cc1d --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_set_resp.erl @@ -0,0 +1,31 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(http_set_resp). +-behaviour(cowboy_http_handler). +-export([init/3, handle/2, terminate/3]). + +init({_Transport, http}, Req, Opts) -> + Headers = proplists:get_value(headers, Opts, []), + Body = proplists:get_value(body, Opts, <<"http_handler_set_resp">>), + Req2 = lists:foldl(fun({Name, Value}, R) -> + cowboy_req:set_resp_header(Name, Value, R) + end, Req, Headers), + Req3 = cowboy_req:set_resp_body(Body, Req2), + Req4 = cowboy_req:set_resp_header(<<"x-cowboy-test">>, <<"ok">>, Req3), + Req5 = cowboy_req:set_resp_cookie(<<"cake">>, <<"lie">>, [], Req4), + {ok, Req5, undefined}. + +handle(Req, State) -> + case cowboy_req:has_resp_header(<<"x-cowboy-test">>, Req) of + false -> {ok, Req, State}; + true -> + case cowboy_req:has_resp_body(Req) of + false -> {ok, Req, State}; + true -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State} + end + end. + +terminate(_, _, _) -> + ok. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_stream_body.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_stream_body.erl new file mode 100644 index 0000000..d896797 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_stream_body.erl @@ -0,0 +1,32 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(http_stream_body). +-behaviour(cowboy_http_handler). +-export([init/3, handle/2, terminate/3]). + +-record(state, {headers, body, reply}). + +init({_Transport, http}, Req, Opts) -> + Headers = proplists:get_value(headers, Opts, []), + Body = proplists:get_value(body, Opts, "http_handler_stream_body"), + Reply = proplists:get_value(reply, Opts), + {ok, Req, #state{headers=Headers, body=Body, reply=Reply}}. + +handle(Req, State=#state{headers=_Headers, body=Body, reply=Reply}) -> + SFun = fun(Socket, Transport) -> Transport:send(Socket, Body) end, + Req2 = case Reply of + set_resp -> + SLen = iolist_size(Body), + cowboy_req:set_resp_body_fun(SLen, SFun, Req); + set_resp_close -> + cowboy_req:set_resp_body_fun(SFun, Req); + set_resp_chunked -> + %% Here Body should be a list of chunks, not a binary. + SFun2 = fun(SendFun) -> lists:foreach(SendFun, Body) end, + cowboy_req:set_resp_body_fun(chunked, SFun2, Req) + end, + {ok, Req3} = cowboy_req:reply(200, Req2), + {ok, Req3, State}. + +terminate(_, _, _) -> + ok. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_streamed.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_streamed.erl new file mode 100644 index 0000000..674cc40 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/http_streamed.erl @@ -0,0 +1,20 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(http_streamed). +-behaviour(cowboy_http_handler). +-export([init/3, handle/2, terminate/3]). + +init({_Transport, http}, Req, _Opts) -> + {ok, Req, undefined}. + +handle(Req, State) -> + Req2 = cowboy_req:set([{resp_state, waiting_stream}], Req), + {ok, Req3} = cowboy_req:chunked_reply(200, Req2), + timer:sleep(100), + cowboy_req:chunk("streamed_handler\r\n", Req3), + timer:sleep(100), + cowboy_req:chunk("works fine!", Req3), + {ok, Req3, State}. + +terminate(_, _, _) -> + ok. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_empty_resource.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_empty_resource.erl new file mode 100644 index 0000000..7e7c00a --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_empty_resource.erl @@ -0,0 +1,5 @@ +-module(rest_empty_resource). +-export([init/3]). + +init(_Transport, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_expires.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_expires.erl new file mode 100644 index 0000000..4209041 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_expires.erl @@ -0,0 +1,22 @@ +-module(rest_expires). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_text_plain/2]). +-export([expires/2]). +-export([last_modified/2]). + +init(_Transport, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"plain">>, []}, get_text_plain}], Req, State}. + +get_text_plain(Req, State) -> + {<<"This is REST!">>, Req, State}. + +expires(Req, State) -> + {{{2012, 9, 21}, {22, 36, 14}}, Req, State}. + +last_modified(Req, State) -> + {{{2012, 9, 21}, {22, 36, 14}}, Req, State}. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_expires_binary.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_expires_binary.erl new file mode 100644 index 0000000..4cbd001 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_expires_binary.erl @@ -0,0 +1,18 @@ +-module(rest_expires_binary). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_text_plain/2]). +-export([expires/2]). + +init(_Transport, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"plain">>, []}, get_text_plain}], Req, State}. + +get_text_plain(Req, State) -> + {<<"This is REST!">>, Req, State}. + +expires(Req, State) -> + {<<"0">>, Req, State}. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/rest_forbidden_resource.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_forbidden_resource.erl similarity index 67% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/rest_forbidden_resource.erl rename to rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_forbidden_resource.erl index 90dee84..920ba31 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/rest_forbidden_resource.erl +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_forbidden_resource.erl @@ -1,16 +1,16 @@ -module(rest_forbidden_resource). -export([init/3, rest_init/2, allowed_methods/2, forbidden/2, content_types_provided/2, content_types_accepted/2, - post_is_create/2, create_path/2, to_text/2, from_text/2]). + to_text/2, from_text/2]). init(_Transport, _Req, _Opts) -> - {upgrade, protocol, cowboy_http_rest}. + {upgrade, protocol, cowboy_rest}. rest_init(Req, [Forbidden]) -> {ok, Req, Forbidden}. allowed_methods(Req, State) -> - {['GET', 'HEAD', 'POST'], Req, State}. + {[<<"GET">>, <<"HEAD">>, <<"POST">>], Req, State}. forbidden(Req, State=true) -> {true, Req, State}; @@ -23,18 +23,9 @@ content_types_provided(Req, State) -> content_types_accepted(Req, State) -> {[{{<<"text">>, <<"plain">>, []}, from_text}], Req, State}. -post_is_create(Req, State) -> - {true, Req, State}. - -create_path(Req, State) -> - {Path, Req2} = cowboy_http_req:raw_path(Req), - {Path, Req2, State}. - to_text(Req, State) -> {<<"This is REST!">>, Req, State}. from_text(Req, State) -> - {true, Req, State}. - - - + {Path, Req2} = cowboy_req:path(Req), + {{true, Path}, Req2, State}. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_missing_callbacks.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_missing_callbacks.erl new file mode 100644 index 0000000..94bfbbd --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_missing_callbacks.erl @@ -0,0 +1,23 @@ +-module(rest_missing_callbacks). +-export([init/3]). +-export([allowed_methods/2]). +-export([content_types_accepted/2]). +-export([content_types_provided/2]). + +init(_Transport, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +allowed_methods(Req, State) -> + {[<<"GET">>, <<"PUT">>], Req, State}. + +content_types_accepted(Req, State) -> + cowboy_error_h:ignore(cowboy_rest, process_content_type, 3), + {[ + {<<"application/json">>, put_application_json} + ], Req, State}. + +content_types_provided(Req, State) -> + cowboy_error_h:ignore(cowboy_rest, set_resp_body, 2), + {[ + {<<"text/plain">>, get_text_plain} + ], Req, State}. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_nodelete_resource.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_nodelete_resource.erl new file mode 100644 index 0000000..9f9670c --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_nodelete_resource.erl @@ -0,0 +1,17 @@ +-module(rest_nodelete_resource). +-export([init/3, allowed_methods/2, content_types_provided/2, + get_text_plain/2]). + +init(_Transport, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +allowed_methods(Req, State) -> + {[<<"GET">>, <<"HEAD">>, <<"DELETE">>], Req, State}. + + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"plain">>, []}, get_text_plain}], Req, State}. + +get_text_plain(Req, State) -> + {<<"This is REST!">>, Req, State}. + diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_param_all.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_param_all.erl new file mode 100644 index 0000000..09b8cd3 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_param_all.erl @@ -0,0 +1,36 @@ +-module(rest_param_all). + +-export([init/3]). +-export([allowed_methods/2]). +-export([content_types_provided/2]). +-export([get_text_plain/2]). +-export([content_types_accepted/2]). +-export([put_text_plain/2]). + +init(_Transport, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +allowed_methods(Req, State) -> + {[<<"GET">>, <<"PUT">>], Req, State}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"plain">>, '*'}, get_text_plain}], Req, State}. + +get_text_plain(Req, State) -> + {{_, _, Param}, Req2} = + cowboy_req:meta(media_type, Req, {{<<"text">>, <<"plain">>}, []}), + Body = if + Param == '*' -> + <<"'*'">>; + Param == [] -> + <<"[]">>; + Param /= [] -> + iolist_to_binary([[Key, $=, Value] || {Key, Value} <- Param]) + end, + {Body, Req2, State}. + +content_types_accepted(Req, State) -> + {[{{<<"text">>, <<"plain">>, '*'}, put_text_plain}], Req, State}. + +put_text_plain(Req, State) -> + {true, Req, State}. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_patch_resource.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_patch_resource.erl new file mode 100644 index 0000000..e265f6f --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_patch_resource.erl @@ -0,0 +1,34 @@ +-module(rest_patch_resource). +-export([init/3, allowed_methods/2, content_types_provided/2, get_text_plain/2, + content_types_accepted/2, patch_text_plain/2]). + +init(_Transport, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +allowed_methods(Req, State) -> + {[<<"HEAD">>, <<"GET">>, <<"PATCH">>], Req, State}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"plain">>, []}, get_text_plain}], Req, State}. + +get_text_plain(Req, State) -> + {<<"This is REST!">>, Req, State}. + +content_types_accepted(Req, State) -> + case cowboy_req:method(Req) of + {<<"PATCH">>, Req0} -> + {[{{<<"text">>, <<"plain">>, []}, patch_text_plain}], Req0, State}; + {_, Req0} -> + {[], Req0, State} + end. + +patch_text_plain(Req, State) -> + case cowboy_req:body(Req) of + {ok, <<"halt">>, Req0} -> + {ok, Req1} = cowboy_req:reply(400, Req0), + {halt, Req1, State}; + {ok, <<"false">>, Req0} -> + {false, Req0, State}; + {ok, _Body, Req0} -> + {true, Req0, State} + end. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_post_charset_resource.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_post_charset_resource.erl new file mode 100644 index 0000000..9ccfa61 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_post_charset_resource.erl @@ -0,0 +1,15 @@ +-module(rest_post_charset_resource). +-export([init/3, allowed_methods/2, content_types_accepted/2, from_text/2]). + +init(_Transport, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +allowed_methods(Req, State) -> + {[<<"POST">>], Req, State}. + +content_types_accepted(Req, State) -> + {[{{<<"text">>, <<"plain">>, [{<<"charset">>, <<"utf-8">>}]}, + from_text}], Req, State}. + +from_text(Req, State) -> + {true, Req, State}. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_postonly_resource.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_postonly_resource.erl new file mode 100644 index 0000000..4f725c9 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_postonly_resource.erl @@ -0,0 +1,14 @@ +-module(rest_postonly_resource). +-export([init/3, allowed_methods/2, content_types_accepted/2, from_text/2]). + +init(_Transport, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +allowed_methods(Req, State) -> + {[<<"POST">>], Req, State}. + +content_types_accepted(Req, State) -> + {[{{<<"text">>, <<"plain">>, '*'}, from_text}], Req, State}. + +from_text(Req, State) -> + {true, Req, State}. diff --git a/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_resource_etags.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_resource_etags.erl new file mode 100644 index 0000000..2652f57 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_resource_etags.erl @@ -0,0 +1,32 @@ +-module(rest_resource_etags). +-export([init/3, generate_etag/2, content_types_provided/2, get_text_plain/2]). + +init(_Transport, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +generate_etag(Req, State) -> + case cowboy_req:qs_val(<<"type">>, Req) of + %% Correct return values from generate_etag/2. + {<<"tuple-weak">>, Req2} -> + {{weak, <<"etag-header-value">>}, Req2, State}; + {<<"tuple-strong">>, Req2} -> + {{strong, <<"etag-header-value">>}, Req2, State}; + %% Backwards compatible return values from generate_etag/2. + {<<"binary-weak-quoted">>, Req2} -> + {<<"W/\"etag-header-value\"">>, Req2, State}; + {<<"binary-strong-quoted">>, Req2} -> + {<<"\"etag-header-value\"">>, Req2, State}; + %% Invalid return values from generate_etag/2. + {<<"binary-strong-unquoted">>, Req2} -> + cowboy_error_h:ignore(cowboy_http, quoted_string, 2), + {<<"etag-header-value">>, Req2, State}; + {<<"binary-weak-unquoted">>, Req2} -> + cowboy_error_h:ignore(cowboy_http, quoted_string, 2), + {<<"W/etag-header-value">>, Req2, State} + end. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"plain">>, []}, get_text_plain}], Req, State}. + +get_text_plain(Req, State) -> + {<<"This is REST!">>, Req, State}. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/rest_simple_resource.erl b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_simple_resource.erl similarity index 88% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/rest_simple_resource.erl rename to rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_simple_resource.erl index e2c573c..97145dd 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/rest_simple_resource.erl +++ b/rabbitmq-server/deps/cowboy/test/http_SUITE_data/rest_simple_resource.erl @@ -2,7 +2,7 @@ -export([init/3, content_types_provided/2, get_text_plain/2]). init(_Transport, _Req, _Opts) -> - {upgrade, protocol, cowboy_http_rest}. + {upgrade, protocol, cowboy_rest}. content_types_provided(Req, State) -> {[{{<<"text">>, <<"plain">>, []}, get_text_plain}], Req, State}. diff --git a/rabbitmq-server/deps/cowboy/test/loop_handler_SUITE.erl b/rabbitmq-server/deps/cowboy/test/loop_handler_SUITE.erl new file mode 100644 index 0000000..5f69490 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/loop_handler_SUITE.erl @@ -0,0 +1,87 @@ +%% Copyright (c) 2011-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(loop_handler_SUITE). +-compile(export_all). + +-import(cowboy_test, [config/2]). +-import(cowboy_test, [doc/1]). +-import(cowboy_test, [gun_open/1]). + +%% ct. + +all() -> + cowboy_test:common_all(). + +groups() -> + cowboy_test:common_groups(cowboy_test:all(?MODULE)). + +init_per_group(Name, Config) -> + cowboy_test:init_common_groups(Name, Config, ?MODULE). + +end_per_group(Name, _) -> + cowboy:stop_listener(Name). + +%% Dispatch configuration. + +init_dispatch(_) -> + cowboy_router:compile([{'_', [ + {"/long_polling", long_polling_h, []}, + {"/loop_body", loop_handler_body_h, []}, + {"/loop_timeout", loop_handler_timeout_h, []} + ]}]). + +%% Tests. + +long_polling(Config) -> + doc("Simple long-polling."), + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/long_polling"), + {response, fin, 102, _} = gun:await(ConnPid, Ref), + ok. + +long_polling_body(Config) -> + doc("Long-polling with a body that falls within the configurable limits."), + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/long_polling", [], << 0:5000/unit:8 >>), + {response, fin, 102, _} = gun:await(ConnPid, Ref), + ok. + +long_polling_body_too_large(Config) -> + doc("Long-polling with a body that exceeds the configurable limits."), + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/long_polling", [], << 0:100000/unit:8 >>), + {response, fin, 500, _} = gun:await(ConnPid, Ref), + ok. + +long_polling_pipeline(Config) -> + doc("Pipeline of long-polling calls."), + ConnPid = gun_open(Config), + Refs = [gun:get(ConnPid, "/long_polling") || _ <- lists:seq(1, 2)], + _ = [{response, fin, 102, _} = gun:await(ConnPid, Ref) || Ref <- Refs], + ok. + +loop_body(Config) -> + doc("Check that a loop handler can read the request body in info/3."), + ConnPid = gun_open(Config), + Ref = gun:post(ConnPid, "/loop_body", [], << 0:100000/unit:8 >>), + {response, fin, 200, _} = gun:await(ConnPid, Ref), + ok. + +loop_timeout(Config) -> + doc("Ensure that the loop handler timeout results in a 204 response."), + ConnPid = gun_open(Config), + Ref = gun:get(ConnPid, "/loop_timeout"), + {response, fin, 204, _} = gun:await(ConnPid, Ref), + ok. diff --git a/rabbitmq-server/deps/cowboy/test/spdy_SUITE.erl b/rabbitmq-server/deps/cowboy/test/spdy_SUITE.erl new file mode 100644 index 0000000..b26e5f8 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/spdy_SUITE.erl @@ -0,0 +1,147 @@ +%% Copyright (c) 2013-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(spdy_SUITE). +-compile(export_all). + +-import(cowboy_test, [config/2]). +-import(cowboy_test, [gun_monitor_open/1]). +-import(cowboy_test, [raw_open/1]). +-import(cowboy_test, [raw_send/2]). + +%% ct. + +all() -> + [{group, spdy}]. + +groups() -> + [{spdy, [], cowboy_test:all(?MODULE)}]. + +init_per_suite(Config) -> + case proplists:get_value(ssl_app, ssl:versions()) of + Version when Version < "5.2.1" -> + {skip, "No NPN support in SSL application."}; + _ -> + Dir = config(priv_dir, Config) ++ "/static", + ct_helper:create_static_dir(Dir), + [{static_dir, Dir}|Config] + end. + +end_per_suite(Config) -> + ct_helper:delete_static_dir(config(static_dir, Config)). + +init_per_group(Name, Config) -> + cowboy_test:init_spdy(Name, [ + {env, [{dispatch, init_dispatch(Config)}]} + ], Config). + +end_per_group(Name, _) -> + cowboy:stop_listener(Name). + +%% Dispatch configuration. + +init_dispatch(Config) -> + cowboy_router:compile([ + {"localhost", [ + {"/static/[...]", cowboy_static, + {dir, config(static_dir, Config)}}, + {"/echo/body", http_echo_body, []}, + {"/chunked", http_chunked, []}, + {"/", http_handler, []} + ]} + ]). + +%% Convenience functions. + +do_get(ConnPid, MRef, Host, Path) -> + StreamRef = gun:get(ConnPid, Path, [{":host", Host}]), + {response, IsFin, Status, _} = gun:await(ConnPid, StreamRef, MRef), + {IsFin, Status}. + +%% Tests. + +check_status(Config) -> + Tests = [ + {200, nofin, "localhost", "/"}, + {200, nofin, "localhost", "/chunked"}, + {200, nofin, "localhost", "/static/style.css"}, + {400, fin, "bad-host", "/"}, + {400, fin, "localhost", "bad-path"}, + {404, fin, "localhost", "/this/path/does/not/exist"} + ], + {ConnPid, MRef} = gun_monitor_open(Config), + _ = [{Status, Fin, Host, Path} = begin + {IsFin, Ret} = do_get(ConnPid, MRef, Host, Path), + {Ret, IsFin, Host, Path} + end || {Status, Fin, Host, Path} <- Tests], + gun:close(ConnPid). + +echo_body(Config) -> + {ConnPid, MRef} = gun_monitor_open(Config), + Body = << 0:800000 >>, + StreamRef = gun:post(ConnPid, "/echo/body", [ + {<<"content-type">>, "application/octet-stream"} + ], Body), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef, MRef), + {ok, Body} = gun:await_body(ConnPid, StreamRef, MRef), + gun:close(ConnPid). + +echo_body_multi(Config) -> + {ConnPid, MRef} = gun_monitor_open(Config), + BodyChunk = << 0:80000 >>, + StreamRef = gun:post(ConnPid, "/echo/body", [ + %% @todo I'm still unhappy with this. It shouldn't be required... + {<<"content-length">>, integer_to_list(byte_size(BodyChunk) * 10)}, + {<<"content-type">>, "application/octet-stream"} + ]), + _ = [gun:data(ConnPid, StreamRef, nofin, BodyChunk) || _ <- lists:seq(1, 9)], + gun:data(ConnPid, StreamRef, fin, BodyChunk), + {response, nofin, 200, _} = gun:await(ConnPid, StreamRef, MRef), + {ok, << 0:800000 >>} = gun:await_body(ConnPid, StreamRef, MRef), + gun:close(ConnPid). + +two_frames_one_packet(Config) -> + {raw_client, Socket, Transport} = Client = raw_open([ + {opts, [{client_preferred_next_protocols, + {client, [<<"spdy/3">>], <<"spdy/3">>}}]} + |Config]), + Zdef = cow_spdy:deflate_init(), + Zinf = cow_spdy:inflate_init(), + ok = raw_send(Client, iolist_to_binary([ + cow_spdy:syn_stream(Zdef, 1, 0, true, false, + 0, <<"GET">>, <<"https">>, <<"localhost">>, + <<"/">>, <<"HTTP/1.1">>, []), + cow_spdy:syn_stream(Zdef, 3, 0, true, false, + 0, <<"GET">>, <<"https">>, <<"localhost">>, + <<"/">>, <<"HTTP/1.1">>, []) + ])), + {Frame1, Rest1} = spdy_recv(Socket, Transport, <<>>), + {syn_reply, _, false, <<"200 OK">>, _, _} = cow_spdy:parse(Frame1, Zinf), + {Frame2, Rest2} = spdy_recv(Socket, Transport, Rest1), + {data, 1, true, _} = cow_spdy:parse(Frame2, Zinf), + {Frame3, Rest3} = spdy_recv(Socket, Transport, Rest2), + {syn_reply, _, false, <<"200 OK">>, _, _} = cow_spdy:parse(Frame3, Zinf), + {Frame4, <<>>} = spdy_recv(Socket, Transport, Rest3), + {data, 3, true, _} = cow_spdy:parse(Frame4, Zinf), + ok. + +spdy_recv(Socket, Transport, Acc) -> + {ok, Data} = Transport:recv(Socket, 0, 5000), + Data2 = << Acc/binary, Data/bits >>, + case cow_spdy:split(Data2) of + false -> + spdy_recv(Socket, Transport, Data2); + {true, Frame, Rest} -> + {Frame, Rest} + end. diff --git a/rabbitmq-server/deps/cowboy/test/ws_SUITE.erl b/rabbitmq-server/deps/cowboy/test/ws_SUITE.erl new file mode 100644 index 0000000..77c82f6 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/ws_SUITE.erl @@ -0,0 +1,710 @@ +%% Copyright (c) 2011-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(ws_SUITE). +-compile(export_all). + +-import(cowboy_test, [config/2]). + +%% ct. + +all() -> + [{group, autobahn}, {group, ws}]. + +groups() -> + BaseTests = cowboy_test:all(?MODULE) -- [autobahn_fuzzingclient], + [{autobahn, [], [autobahn_fuzzingclient]}, {ws, [parallel], BaseTests}]. + +init_per_suite(Config) -> + Config. + +init_per_group(Name = autobahn, Config) -> + %% Some systems have it named pip2. + Out = os:cmd("pip show autobahntestsuite ; pip2 show autobahntestsuite"), + case string:str(Out, "autobahntestsuite") of + 0 -> + ct:print("Skipping the autobahn group because the " + "Autobahn Test Suite is not installed.~nTo install it, " + "please follow the instructions on this page:~n~n " + "http://autobahn.ws/testsuite/installation.html"), + {skip, "Autobahn Test Suite not installed."}; + _ -> + {ok, _} = cowboy:start_http(Name, 100, [{port, 33080}], [ + {env, [{dispatch, init_dispatch()}]}]), + Config + end; +init_per_group(Name = ws, Config) -> + cowboy_test:init_http(Name, [ + {env, [{dispatch, init_dispatch()}]}, + {compress, true} + ], Config). + +end_per_group(Listener, _Config) -> + cowboy:stop_listener(Listener). + +%% Dispatch configuration. + +init_dispatch() -> + cowboy_router:compile([ + {"localhost", [ + {"/ws_echo", ws_echo, []}, + {"/ws_echo_timer", ws_echo_timer, []}, + {"/ws_init_shutdown", ws_init_shutdown, []}, + {"/ws_send_many", ws_send_many, [ + {sequence, [ + {text, <<"one">>}, + {text, <<"two">>}, + {text, <<"seven!">>}]} + ]}, + {"/ws_send_close", ws_send_many, [ + {sequence, [ + {text, <<"send">>}, + close, + {text, <<"won't be received">>}]} + ]}, + {"/ws_send_close_payload", ws_send_many, [ + {sequence, [ + {text, <<"send">>}, + {close, 1001, <<"some text!">>}, + {text, <<"won't be received">>}]} + ]}, + {"/ws_timeout_hibernate", ws_timeout_hibernate, []}, + {"/ws_timeout_cancel", ws_timeout_cancel, []}, + {"/ws_upgrade_with_opts", ws_upgrade_with_opts, + <<"failure">>} + ]} + ]). + +%% Tests. + +autobahn_fuzzingclient(Config) -> + Out = os:cmd("cd " ++ config(priv_dir, Config) + ++ " && wstest -m fuzzingclient -s " + ++ config(data_dir, Config) ++ "client.json"), + Report = config(priv_dir, Config) ++ "reports/servers/index.html", + ct:log("

Full report

~n", [Report]), + ct:print("Autobahn Test Suite report: file://~s~n", [Report]), + ct:log("~s~n", [Out]), + {ok, HTML} = file:read_file(Report), + case length(binary:matches(HTML, <<"case_failed">>)) > 2 of + true -> error(failed); + false -> ok + end. + +%% We do not support hixie76 anymore. +ws0(Config) -> + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}]), + ok = gen_tcp:send(Socket, + "GET /ws_echo_timer HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: WebSocket\r\n" + "Origin: http://localhost\r\n" + "Sec-Websocket-Key1: Y\" 4 1Lj!957b8@0H756!i\r\n" + "Sec-Websocket-Key2: 1711 M;4\\74 80<6\r\n" + "\r\n"), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 400, _}, _} + = erlang:decode_packet(http, Handshake, []). + +ws8(Config) -> + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_echo_timer HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: websocket\r\n" + "Sec-WebSocket-Origin: http://localhost\r\n" + "Sec-WebSocket-Version: 8\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} + = erlang:decode_packet(http, Handshake, []), + [Headers, <<>>] = do_decode_headers( + erlang:decode_packet(httph, Rest, []), []), + {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), + {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), + {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} + = lists:keyfind("sec-websocket-accept", 1, Headers), + ok = gen_tcp:send(Socket, << 16#81, 16#85, 16#37, 16#fa, 16#21, 16#3d, + 16#7f, 16#9f, 16#4d, 16#51, 16#58 >>), + {ok, << 1:1, 0:3, 1:4, 0:1, 5:7, "Hello" >>} + = gen_tcp:recv(Socket, 0, 6000), + {ok, << 1:1, 0:3, 1:4, 0:1, 14:7, "websocket_init" >>} + = gen_tcp:recv(Socket, 0, 6000), + {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} + = gen_tcp:recv(Socket, 0, 6000), + {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} + = gen_tcp:recv(Socket, 0, 6000), + {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} + = gen_tcp:recv(Socket, 0, 6000), + ok = gen_tcp:send(Socket, << 1:1, 0:3, 9:4, 1:1, 0:7, 0:32 >>), %% ping + {ok, << 1:1, 0:3, 10:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), %% pong + ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 1:1, 0:7, 0:32 >>), %% close + {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +ws8_init_shutdown(Config) -> + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_init_shutdown HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: websocket\r\n" + "Sec-WebSocket-Origin: http://localhost\r\n" + "Sec-WebSocket-Version: 8\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 403, "Forbidden"}, _Rest} + = erlang:decode_packet(http, Handshake, []), + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +ws8_single_bytes(Config) -> + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_echo_timer HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: websocket\r\n" + "Sec-WebSocket-Origin: http://localhost\r\n" + "Sec-WebSocket-Version: 8\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} + = erlang:decode_packet(http, Handshake, []), + [Headers, <<>>] = do_decode_headers( + erlang:decode_packet(httph, Rest, []), []), + {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), + {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), + {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} + = lists:keyfind("sec-websocket-accept", 1, Headers), + ok = gen_tcp:send(Socket, << 16#81 >>), %% send one byte + ok = timer:sleep(100), %% sleep for a period + ok = gen_tcp:send(Socket, << 16#85 >>), %% send another and so on + ok = timer:sleep(100), + ok = gen_tcp:send(Socket, << 16#37 >>), + ok = timer:sleep(100), + ok = gen_tcp:send(Socket, << 16#fa >>), + ok = timer:sleep(100), + ok = gen_tcp:send(Socket, << 16#21 >>), + ok = timer:sleep(100), + ok = gen_tcp:send(Socket, << 16#3d >>), + ok = timer:sleep(100), + ok = gen_tcp:send(Socket, << 16#7f >>), + ok = timer:sleep(100), + ok = gen_tcp:send(Socket, << 16#9f >>), + ok = timer:sleep(100), + ok = gen_tcp:send(Socket, << 16#4d >>), + ok = timer:sleep(100), + ok = gen_tcp:send(Socket, << 16#51 >>), + ok = timer:sleep(100), + ok = gen_tcp:send(Socket, << 16#58 >>), + {ok, << 1:1, 0:3, 1:4, 0:1, 14:7, "websocket_init" >>} + = gen_tcp:recv(Socket, 0, 6000), + {ok, << 1:1, 0:3, 1:4, 0:1, 5:7, "Hello" >>} + = gen_tcp:recv(Socket, 0, 6000), + {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} + = gen_tcp:recv(Socket, 0, 6000), + {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} + = gen_tcp:recv(Socket, 0, 6000), + {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} + = gen_tcp:recv(Socket, 0, 6000), + ok = gen_tcp:send(Socket, << 1:1, 0:3, 9:4, 1:1, 0:7, 0:32 >>), %% ping + {ok, << 1:1, 0:3, 10:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), %% pong + ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 1:1, 0:7, 0:32 >>), %% close + {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +ws13(Config) -> + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_echo_timer HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Origin: http://localhost\r\n" + "Sec-WebSocket-Version: 13\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "Upgrade: websocket\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} + = erlang:decode_packet(http, Handshake, []), + [Headers, <<>>] = do_decode_headers( + erlang:decode_packet(httph, Rest, []), []), + {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), + {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), + {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} + = lists:keyfind("sec-websocket-accept", 1, Headers), + %% text + ok = gen_tcp:send(Socket, << 16#81, 16#85, 16#37, 16#fa, 16#21, 16#3d, + 16#7f, 16#9f, 16#4d, 16#51, 16#58 >>), + {ok, << 1:1, 0:3, 1:4, 0:1, 5:7, "Hello" >>} + = gen_tcp:recv(Socket, 0, 6000), + %% binary (empty) + ok = gen_tcp:send(Socket, << 1:1, 0:3, 2:4, 1:1, 0:7, 0:32 >>), + {ok, << 1:1, 0:3, 2:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), + %% binary + ok = gen_tcp:send(Socket, << 16#82, 16#85, 16#37, 16#fa, 16#21, 16#3d, + 16#7f, 16#9f, 16#4d, 16#51, 16#58 >>), + {ok, << 1:1, 0:3, 2:4, 0:1, 5:7, "Hello" >>} + = gen_tcp:recv(Socket, 0, 6000), + %% Receives. + {ok, << 1:1, 0:3, 1:4, 0:1, 14:7, "websocket_init" >>} + = gen_tcp:recv(Socket, 0, 6000), + {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} + = gen_tcp:recv(Socket, 0, 6000), + {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} + = gen_tcp:recv(Socket, 0, 6000), + {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} + = gen_tcp:recv(Socket, 0, 6000), + ok = gen_tcp:send(Socket, << 1:1, 0:3, 9:4, 1:1, 0:7, 0:32 >>), %% ping + {ok, << 1:1, 0:3, 10:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), %% pong + ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 1:1, 0:7, 0:32 >>), %% close + {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +ws_deflate(Config) -> + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}, {nodelay, true}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_echo HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: websocket\r\n" + "Sec-WebSocket-Origin: http://localhost\r\n" + "Sec-WebSocket-Version: 8\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "Sec-WebSocket-Extensions: x-webkit-deflate-frame\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} + = erlang:decode_packet(http, Handshake, []), + [Headers, <<>>] = do_decode_headers( + erlang:decode_packet(httph, Rest, []), []), + {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), + {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), + {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} + = lists:keyfind("sec-websocket-accept", 1, Headers), + {"sec-websocket-extensions", "x-webkit-deflate-frame"} + = lists:keyfind("sec-websocket-extensions", 1, Headers), + + Mask = 16#11223344, + Hello = << 242, 72, 205, 201, 201, 7, 0 >>, + MaskedHello = do_mask(Hello, Mask, <<>>), + + % send compressed text frame containing the Hello string + ok = gen_tcp:send(Socket, << 1:1, 1:1, 0:2, 1:4, 1:1, 7:7, Mask:32, + MaskedHello/binary >>), + % receive compressed text frame containing the Hello string + {ok, << 1:1, 1:1, 0:2, 1:4, 0:1, 7:7, Hello/binary >>} + = gen_tcp:recv(Socket, 0, 6000), + + ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 1:1, 0:7, 0:32 >>), %% close + {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +ws_deflate_chunks(Config) -> + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}, {nodelay, true}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_echo HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: websocket\r\n" + "Sec-WebSocket-Origin: http://localhost\r\n" + "Sec-WebSocket-Version: 8\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "Sec-WebSocket-Extensions: x-webkit-deflate-frame\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} + = erlang:decode_packet(http, Handshake, []), + [Headers, <<>>] = do_decode_headers( + erlang:decode_packet(httph, Rest, []), []), + {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), + {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), + {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} + = lists:keyfind("sec-websocket-accept", 1, Headers), + {"sec-websocket-extensions", "x-webkit-deflate-frame"} + = lists:keyfind("sec-websocket-extensions", 1, Headers), + + Mask = 16#11223344, + Hello = << 242, 72, 205, 201, 201, 7, 0 >>, + MaskedHello = do_mask(Hello, Mask, <<>>), + + % send compressed text frame containing the Hello string + ok = gen_tcp:send(Socket, << 1:1, 1:1, 0:2, 1:4, 1:1, 7:7, Mask:32, + (binary:part(MaskedHello, 0, 4))/binary >>), + ok = timer:sleep(100), + ok = gen_tcp:send(Socket, binary:part(MaskedHello, 4, 3)), + + % receive compressed text frame containing the Hello string + {ok, << 1:1, 1:1, 0:2, 1:4, 0:1, 7:7, Hello/binary >>} + = gen_tcp:recv(Socket, 0, 6000), + + ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 1:1, 0:7, 0:32 >>), %% close + {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +ws_deflate_fragments(Config) -> + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}, {nodelay, true}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_echo HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: websocket\r\n" + "Sec-WebSocket-Origin: http://localhost\r\n" + "Sec-WebSocket-Version: 8\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "Sec-WebSocket-Extensions: x-webkit-deflate-frame\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} + = erlang:decode_packet(http, Handshake, []), + [Headers, <<>>] = do_decode_headers( + erlang:decode_packet(httph, Rest, []), []), + {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), + {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), + {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} + = lists:keyfind("sec-websocket-accept", 1, Headers), + {"sec-websocket-extensions", "x-webkit-deflate-frame"} + = lists:keyfind("sec-websocket-extensions", 1, Headers), + + Mask = 16#11223344, + Hello = << 242, 72, 205, 201, 201, 7, 0 >>, + + % send compressed text frame containing the Hello string + % as 2 separate fragments + ok = gen_tcp:send(Socket, << 0:1, 1:1, 0:2, 1:4, 1:1, 4:7, Mask:32, + (do_mask(binary:part(Hello, 0, 4), Mask, <<>>))/binary >>), + ok = gen_tcp:send(Socket, << 1:1, 1:1, 0:2, 0:4, 1:1, 3:7, Mask:32, + (do_mask(binary:part(Hello, 4, 3), Mask, <<>>))/binary >>), + % receive compressed text frame containing the Hello string + {ok, << 1:1, 1:1, 0:2, 1:4, 0:1, 7:7, Hello/binary >>} + = gen_tcp:recv(Socket, 0, 6000), + + ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 1:1, 0:7, 0:32 >>), %% close + {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +ws_send_close(Config) -> + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_send_close HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: websocket\r\n" + "Sec-WebSocket-Origin: http://localhost\r\n" + "Sec-WebSocket-Version: 8\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} + = erlang:decode_packet(http, Handshake, []), + [Headers, <<>>] = do_decode_headers( + erlang:decode_packet(httph, Rest, []), []), + {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), + {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), + {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} + = lists:keyfind("sec-websocket-accept", 1, Headers), + %% We catch all frames at once and check them directly. + {ok, Many} = gen_tcp:recv(Socket, 8, 6000), + << 1:1, 0:3, 1:4, 0:1, 4:7, "send", + 1:1, 0:3, 8:4, 0:8 >> = Many, + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +ws_send_close_payload(Config) -> + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_send_close_payload HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: websocket\r\n" + "Sec-WebSocket-Origin: http://localhost\r\n" + "Sec-WebSocket-Version: 8\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} + = erlang:decode_packet(http, Handshake, []), + [Headers, <<>>] = do_decode_headers( + erlang:decode_packet(httph, Rest, []), []), + {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), + {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), + {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} + = lists:keyfind("sec-websocket-accept", 1, Headers), + %% We catch all frames at once and check them directly. + {ok, Many} = gen_tcp:recv(Socket, 20, 6000), + << 1:1, 0:3, 1:4, 0:1, 4:7, "send", + 1:1, 0:3, 8:4, 0:1, 12:7, 1001:16, "some text!" >> = Many, + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +ws_send_many(Config) -> + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_send_many HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: websocket\r\n" + "Sec-WebSocket-Origin: http://localhost\r\n" + "Sec-WebSocket-Version: 8\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} + = erlang:decode_packet(http, Handshake, []), + [Headers, <<>>] = do_decode_headers( + erlang:decode_packet(httph, Rest, []), []), + {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), + {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), + {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} + = lists:keyfind("sec-websocket-accept", 1, Headers), + %% We catch all frames at once and check them directly. + {ok, Many} = gen_tcp:recv(Socket, 18, 6000), + << 1:1, 0:3, 1:4, 0:1, 3:7, "one", + 1:1, 0:3, 1:4, 0:1, 3:7, "two", + 1:1, 0:3, 1:4, 0:1, 6:7, "seven!" >> = Many, + ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 1:1, 0:7, 0:32 >>), %% close + {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +ws_text_fragments(Config) -> + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_echo HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: websocket\r\n" + "Sec-WebSocket-Origin: http://localhost\r\n" + "Sec-WebSocket-Version: 8\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} + = erlang:decode_packet(http, Handshake, []), + [Headers, <<>>] = do_decode_headers( + erlang:decode_packet(httph, Rest, []), []), + {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), + {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), + {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} + = lists:keyfind("sec-websocket-accept", 1, Headers), + + ok = gen_tcp:send(Socket, [ + << 0:1, 0:3, 1:4, 1:1, 5:7 >>, + << 16#37 >>, << 16#fa >>, << 16#21 >>, << 16#3d >>, << 16#7f >>, + << 16#9f >>, << 16#4d >>, << 16#51 >>, << 16#58 >>]), + ok = gen_tcp:send(Socket, [ + << 1:1, 0:3, 0:4, 1:1, 5:7 >>, + << 16#37 >>, << 16#fa >>, << 16#21 >>, << 16#3d >>, << 16#7f >>, + << 16#9f >>, << 16#4d >>, << 16#51 >>, << 16#58 >>]), + {ok, << 1:1, 0:3, 1:4, 0:1, 10:7, "HelloHello" >>} + = gen_tcp:recv(Socket, 0, 6000), + + ok = gen_tcp:send(Socket, [ + %% #1 + << 0:1, 0:3, 1:4, 1:1, 5:7 >>, + << 16#37 >>, << 16#fa >>, << 16#21 >>, << 16#3d >>, << 16#7f >>, + << 16#9f >>, << 16#4d >>, << 16#51 >>, << 16#58 >>, + %% #2 + << 0:1, 0:3, 0:4, 1:1, 5:7 >>, + << 16#37 >>, << 16#fa >>, << 16#21 >>, << 16#3d >>, << 16#7f >>, + << 16#9f >>, << 16#4d >>, << 16#51 >>, << 16#58 >>, + %% #3 + << 1:1, 0:3, 0:4, 1:1, 5:7 >>, + << 16#37 >>, << 16#fa >>, << 16#21 >>, << 16#3d >>, << 16#7f >>, + << 16#9f >>, << 16#4d >>, << 16#51 >>, << 16#58 >>]), + {ok, << 1:1, 0:3, 1:4, 0:1, 15:7, "HelloHelloHello" >>} + = gen_tcp:recv(Socket, 0, 6000), + ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 1:1, 0:7, 0:32 >>), %% close + {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +ws_timeout_hibernate(Config) -> + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_timeout_hibernate HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: websocket\r\n" + "Sec-WebSocket-Origin: http://localhost\r\n" + "Sec-WebSocket-Version: 8\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} + = erlang:decode_packet(http, Handshake, []), + [Headers, <<>>] = do_decode_headers( + erlang:decode_packet(httph, Rest, []), []), + {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), + {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), + {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} + = lists:keyfind("sec-websocket-accept", 1, Headers), + {ok, << 1:1, 0:3, 8:4, 0:1, 2:7, 1000:16 >>} = gen_tcp:recv(Socket, 0, 6000), + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +ws_timeout_cancel(Config) -> + %% Erlang messages to a socket should not cancel the timeout + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_timeout_cancel HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: websocket\r\n" + "Sec-WebSocket-Origin: http://localhost\r\n" + "Sec-WebSocket-Version: 8\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} + = erlang:decode_packet(http, Handshake, []), + [Headers, <<>>] = do_decode_headers( + erlang:decode_packet(httph, Rest, []), []), + {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), + {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), + {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} + = lists:keyfind("sec-websocket-accept", 1, Headers), + {ok, << 1:1, 0:3, 8:4, 0:1, 2:7, 1000:16 >>} = gen_tcp:recv(Socket, 0, 6000), + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +ws_timeout_reset(Config) -> + %% Erlang messages across a socket should reset the timeout + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_timeout_cancel HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: websocket\r\n" + "Sec-WebSocket-Origin: http://localhost\r\n" + "Sec-Websocket-Version: 13\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} + = erlang:decode_packet(http, Handshake, []), + [Headers, <<>>] = do_decode_headers( + erlang:decode_packet(httph, Rest, []), []), + {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), + {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), + {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} + = lists:keyfind("sec-websocket-accept", 1, Headers), + [begin + ok = gen_tcp:send(Socket, << 16#81, 16#85, 16#37, 16#fa, 16#21, 16#3d, + 16#7f, 16#9f, 16#4d, 16#51, 16#58 >>), + {ok, << 1:1, 0:3, 1:4, 0:1, 5:7, "Hello" >>} + = gen_tcp:recv(Socket, 0, 6000), + ok = timer:sleep(500) + end || _ <- [1, 2, 3, 4]], + {ok, << 1:1, 0:3, 8:4, 0:1, 2:7, 1000:16 >>} = gen_tcp:recv(Socket, 0, 6000), + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +ws_upgrade_with_opts(Config) -> + {port, Port} = lists:keyfind(port, 1, Config), + {ok, Socket} = gen_tcp:connect("localhost", Port, + [binary, {active, false}, {packet, raw}]), + ok = gen_tcp:send(Socket, [ + "GET /ws_upgrade_with_opts HTTP/1.1\r\n" + "Host: localhost\r\n" + "Connection: Upgrade\r\n" + "Upgrade: websocket\r\n" + "Sec-WebSocket-Origin: http://localhost\r\n" + "Sec-WebSocket-Version: 8\r\n" + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + "\r\n"]), + {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), + {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} + = erlang:decode_packet(http, Handshake, []), + [Headers, <<>>] = do_decode_headers( + erlang:decode_packet(httph, Rest, []), []), + {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), + {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), + {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} + = lists:keyfind("sec-websocket-accept", 1, Headers), + {ok, Response} = gen_tcp:recv(Socket, 9, 6000), + << 1:1, 0:3, 1:4, 0:1, 7:7, "success" >> = Response, + ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 1:1, 0:7, 0:32 >>), %% close + {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), + {error, closed} = gen_tcp:recv(Socket, 0, 6000), + ok. + +%% Internal. + +do_decode_headers({ok, http_eoh, Rest}, Acc) -> + [Acc, Rest]; +do_decode_headers({ok, {http_header, _I, Key, _R, Value}, Rest}, Acc) -> + F = fun(S) when is_atom(S) -> S; (S) -> string:to_lower(S) end, + do_decode_headers(erlang:decode_packet(httph, Rest, []), + [{F(Key), Value}|Acc]). + +do_mask(<<>>, _, Acc) -> + Acc; +do_mask(<< O:32, Rest/bits >>, MaskKey, Acc) -> + T = O bxor MaskKey, + do_mask(Rest, MaskKey, << Acc/binary, T:32 >>); +do_mask(<< O:24 >>, MaskKey, Acc) -> + << MaskKey2:24, _:8 >> = << MaskKey:32 >>, + T = O bxor MaskKey2, + << Acc/binary, T:24 >>; +do_mask(<< O:16 >>, MaskKey, Acc) -> + << MaskKey2:16, _:16 >> = << MaskKey:32 >>, + T = O bxor MaskKey2, + << Acc/binary, T:16 >>; +do_mask(<< O:8 >>, MaskKey, Acc) -> + << MaskKey2:8, _:24 >> = << MaskKey:32 >>, + T = O bxor MaskKey2, + << Acc/binary, T:8 >>. diff --git a/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/client.json b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/client.json new file mode 100644 index 0000000..7899503 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/client.json @@ -0,0 +1,14 @@ +{ + "options": {"failByDrop": false}, + "enable-ssl": false, + + "servers": [{ + "agent": "Cowboy", + "url": "ws://localhost:33080/ws_echo", + "options": {"version": 18} + }], + + "cases": ["*"], + "exclude-cases": [], + "exclude-agent-cases": {} +} diff --git a/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_echo.erl b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_echo.erl new file mode 100644 index 0000000..d4a5f07 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_echo.erl @@ -0,0 +1,27 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(ws_echo). +-behaviour(cowboy_websocket_handler). +-export([init/3]). +-export([websocket_init/3, websocket_handle/3, + websocket_info/3, websocket_terminate/3]). + +init(_Any, _Req, _Opts) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_TransportName, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, undefined}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/websocket_handler.erl b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_echo_timer.erl similarity index 71% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/websocket_handler.erl rename to rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_echo_timer.erl index abb4967..666a26d 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/websocket_handler.erl +++ b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_echo_timer.erl @@ -1,24 +1,17 @@ %% Feel free to use, reuse and abuse the code in this file. --module(websocket_handler). --behaviour(cowboy_http_handler). --behaviour(cowboy_http_websocket_handler). --export([init/3, handle/2, terminate/2]). +-module(ws_echo_timer). +-behaviour(cowboy_websocket_handler). +-export([init/3]). -export([websocket_init/3, websocket_handle/3, websocket_info/3, websocket_terminate/3]). init(_Any, _Req, _Opts) -> - {upgrade, protocol, cowboy_http_websocket}. - -handle(_Req, _State) -> - exit(badarg). - -terminate(_Req, _State) -> - exit(badarg). + {upgrade, protocol, cowboy_websocket}. websocket_init(_TransportName, Req, _Opts) -> erlang:start_timer(1000, self(), <<"websocket_init">>), - Req2 = cowboy_http_req:compact(Req), + Req2 = cowboy_req:compact(Req), {ok, Req2, undefined}. websocket_handle({text, Data}, Req, State) -> diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/websocket_handler_init_shutdown.erl b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_init_shutdown.erl similarity index 55% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/websocket_handler_init_shutdown.erl rename to rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_init_shutdown.erl index aa9e056..2b1dd99 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/websocket_handler_init_shutdown.erl +++ b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_init_shutdown.erl @@ -1,23 +1,16 @@ %% Feel free to use, reuse and abuse the code in this file. --module(websocket_handler_init_shutdown). --behaviour(cowboy_http_handler). --behaviour(cowboy_http_websocket_handler). --export([init/3, handle/2, terminate/2]). +-module(ws_init_shutdown). +-behaviour(cowboy_websocket_handler). +-export([init/3]). -export([websocket_init/3, websocket_handle/3, websocket_info/3, websocket_terminate/3]). init(_Any, _Req, _Opts) -> - {upgrade, protocol, cowboy_http_websocket}. - -handle(_Req, _State) -> - exit(badarg). - -terminate(_Req, _State) -> - exit(badarg). + {upgrade, protocol, cowboy_websocket}. websocket_init(_TransportName, Req, _Opts) -> - {ok, Req2} = cowboy_http_req:reply(403, Req), + {ok, Req2} = cowboy_req:reply(403, Req), {shutdown, Req2}. websocket_handle(_Frame, _Req, _State) -> diff --git a/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_send_many.erl b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_send_many.erl new file mode 100644 index 0000000..2ed4772 --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_send_many.erl @@ -0,0 +1,27 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(ws_send_many). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +init(_Any, _Req, _Opts) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_TransportName, Req, Sequence) -> + Req2 = cowboy_req:compact(Req), + erlang:send_after(10, self(), send_many), + {ok, Req2, Sequence}. + +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(send_many, Req, State = [{sequence, Sequence}]) -> + {reply, Sequence, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. diff --git a/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_timeout_cancel.erl b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_timeout_cancel.erl new file mode 100644 index 0000000..9c7b72b --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_timeout_cancel.erl @@ -0,0 +1,26 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(ws_timeout_cancel). +-behaviour(cowboy_websocket_handler). +-export([init/3]). +-export([websocket_init/3, websocket_handle/3, + websocket_info/3, websocket_terminate/3]). + +init(_Any, _Req, _Opts) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_TransportName, Req, _Opts) -> + erlang:start_timer(500, self(), should_not_cancel_timer), + {ok, Req, undefined, 1000}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}. + +websocket_info(_Info, Req, State) -> + erlang:start_timer(500, self(), should_not_cancel_timer), + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/ws_timeout_hibernate_handler.erl b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_timeout_hibernate.erl similarity index 61% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/ws_timeout_hibernate_handler.erl rename to rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_timeout_hibernate.erl index 777948a..cc91e26 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/ws_timeout_hibernate_handler.erl +++ b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_timeout_hibernate.erl @@ -1,20 +1,13 @@ %% Feel free to use, reuse and abuse the code in this file. --module(ws_timeout_hibernate_handler). --behaviour(cowboy_http_handler). --behaviour(cowboy_http_websocket_handler). --export([init/3, handle/2, terminate/2]). +-module(ws_timeout_hibernate). +-behaviour(cowboy_websocket_handler). +-export([init/3]). -export([websocket_init/3, websocket_handle/3, websocket_info/3, websocket_terminate/3]). init(_Any, _Req, _Opts) -> - {upgrade, protocol, cowboy_http_websocket}. - -handle(_Req, _State) -> - exit(badarg). - -terminate(_Req, _State) -> - exit(badarg). + {upgrade, protocol, cowboy_websocket}. websocket_init(_TransportName, Req, _Opts) -> {ok, Req, undefined, 1000, hibernate}. diff --git a/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_upgrade_with_opts.erl b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_upgrade_with_opts.erl new file mode 100644 index 0000000..b4f82fa --- /dev/null +++ b/rabbitmq-server/deps/cowboy/test/ws_SUITE_data/ws_upgrade_with_opts.erl @@ -0,0 +1,28 @@ +%% Feel free to use, reuse and abuse the code in this file. + +-module(ws_upgrade_with_opts). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +init(_Any, Req, _Opts) -> + {upgrade, protocol, cowboy_websocket, Req, <<"success">>}. + +websocket_init(_TransportName, Req, Response) -> + Req2 = cowboy_req:compact(Req), + erlang:send_after(10, self(), send_response), + {ok, Req2, Response}. + +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(send_response, Req, State = Response) + when is_binary(Response) -> + {reply, {text, Response}, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. diff --git a/rabbitmq-server/deps/cowlib/AUTHORS b/rabbitmq-server/deps/cowlib/AUTHORS new file mode 100644 index 0000000..824ec87 --- /dev/null +++ b/rabbitmq-server/deps/cowlib/AUTHORS @@ -0,0 +1,4 @@ +Cowlib is available thanks to the work of: + +Loïc Hoguin +Mikkel Jensen diff --git a/rabbitmq-server/deps/cowlib/CHANGELOG.md b/rabbitmq-server/deps/cowlib/CHANGELOG.md new file mode 100644 index 0000000..88146e9 --- /dev/null +++ b/rabbitmq-server/deps/cowlib/CHANGELOG.md @@ -0,0 +1,12 @@ +CHANGELOG +========= + +1.0.1 +----- + + * Multipart: no line break after close delimiter + +1.0.0 +----- + + * Initial release. diff --git a/rabbitmq-server/deps/cowlib/LICENSE b/rabbitmq-server/deps/cowlib/LICENSE new file mode 100644 index 0000000..e43ab78 --- /dev/null +++ b/rabbitmq-server/deps/cowlib/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2013-2014, Loïc Hoguin + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/rabbitmq-server/deps/cowlib/Makefile b/rabbitmq-server/deps/cowlib/Makefile new file mode 100644 index 0000000..8b1d1b4 --- /dev/null +++ b/rabbitmq-server/deps/cowlib/Makefile @@ -0,0 +1,43 @@ +# See LICENSE for licensing information. + +PROJECT = cowlib +PLT_APPS = crypto + +include erlang.mk + +.PHONY: gen perfs + +# Mimetypes module generator. + +GEN_URL = http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types +GEN_SRC = src/cow_mimetypes.erl.src +GEN_OUT = src/cow_mimetypes.erl + +gen: + $(gen_verbose) cat $(GEN_SRC) \ + | head -n `grep -n "%% GENERATED" $(GEN_SRC) | cut -d : -f 1` \ + > $(GEN_OUT) + $(gen_verbose) wget -qO - $(GEN_URL) \ + | grep -v ^# \ + | awk '{for (i=2; i<=NF; i++) if ($$i != "") { \ + split($$1, a, "/"); \ + print "all_ext(<<\"" $$i "\">>) -> {<<\"" \ + a[1] "\">>, <<\"" a[2] "\">>, []};"}}' \ + | sort \ + | uniq -w 25 \ + >> $(GEN_OUT) + $(gen_verbose) cat $(GEN_SRC) \ + | tail -n +`grep -n "%% GENERATED" $(GEN_SRC) | cut -d : -f 1` \ + >> $(GEN_OUT) + +# Performance testing. + +deps/horse: + git clone -n -- https://github.com/extend/horse $(DEPS_DIR)/horse + cd $(DEPS_DIR)/horse ; git checkout -q master + $(MAKE) -C $(DEPS_DIR)/horse + +perfs: ERLC_OPTS += -DPERF=1 +'{parse_transform, horse_autoexport}' -DEXTRA=1 +perfs: clean deps deps/horse app + $(gen_verbose) erl -noshell -pa ebin deps/horse/ebin \ + -eval 'horse:app_perf($(PROJECT)), init:stop().' diff --git a/rabbitmq-server/deps/cowlib/README.md b/rabbitmq-server/deps/cowlib/README.md new file mode 100644 index 0000000..9523836 --- /dev/null +++ b/rabbitmq-server/deps/cowlib/README.md @@ -0,0 +1,20 @@ +Cowlib +====== + +Cowlib is a support library for manipulating Web protocols. + +Goals +----- + +Cowlib provides libraries for parsing and building messages +for various Web protocols, including SPDY, HTTP and Websocket. + +It is optimized for completeness rather than speed. No value +is ignored, they are all returned. + +Support +------- + + * Official IRC Channel: #ninenines on irc.freenode.net + * [Mailing Lists](http://lists.ninenines.eu) + * [Commercial Support](http://ninenines.eu/support) diff --git a/rabbitmq-server/deps/cowlib/all.sh b/rabbitmq-server/deps/cowlib/all.sh new file mode 100755 index 0000000..fa9dd16 --- /dev/null +++ b/rabbitmq-server/deps/cowlib/all.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +KERL_INSTALL_PATH=~/erlang +KERL_RELEASES="r15b r15b01 r15b02 r15b03 r16b r16b01 r16b02 r16b03-1 17.0 17.1.2" + +make build-ct-suites + +for rel in $KERL_RELEASES +do + echo + echo " TESTING $rel" + echo + . $KERL_INSTALL_PATH/$rel/activate + CT_OPTS="-label $rel" make tests +done + +xdg-open logs/all_runs.html diff --git a/rabbitmq-server/deps/cowlib/build.config b/rabbitmq-server/deps/cowlib/build.config new file mode 100644 index 0000000..87fd50d --- /dev/null +++ b/rabbitmq-server/deps/cowlib/build.config @@ -0,0 +1,20 @@ +# Core modules. +# +# Do *not* comment or remove them +# unless you know what you are doing! +core/core +core/deps +core/erlc + +# Plugins. +# +# Comment to disable, uncomment to enable. +plugins/bootstrap +#plugins/c_src +plugins/ct +plugins/dialyzer +#plugins/edoc +plugins/elvis +#plugins/erlydtl +#plugins/relx +plugins/shell diff --git a/rabbitmq-server/deps/cowlib/erlang.mk b/rabbitmq-server/deps/cowlib/erlang.mk new file mode 100644 index 0000000..8930dfc --- /dev/null +++ b/rabbitmq-server/deps/cowlib/erlang.mk @@ -0,0 +1 @@ +include ../../erlang.mk diff --git a/rabbitmq-server/deps/cowlib/include/cow_inline.hrl b/rabbitmq-server/deps/cowlib/include/cow_inline.hrl new file mode 100644 index 0000000..36a3558 --- /dev/null +++ b/rabbitmq-server/deps/cowlib/include/cow_inline.hrl @@ -0,0 +1,388 @@ +%% Copyright (c) 2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-ifndef(COW_INLINE_HRL). +-define(COW_INLINE_HRL, 1). + +%% INLINE_LOWERCASE(Function, Rest, Acc, ...) +%% +%% To be included at the end of a case block. +%% Defined for up to 10 extra arguments. + +-define(INLINE_LOWERCASE(Function, Rest, Acc), + $A -> Function(Rest, << Acc/binary, $a >>); + $B -> Function(Rest, << Acc/binary, $b >>); + $C -> Function(Rest, << Acc/binary, $c >>); + $D -> Function(Rest, << Acc/binary, $d >>); + $E -> Function(Rest, << Acc/binary, $e >>); + $F -> Function(Rest, << Acc/binary, $f >>); + $G -> Function(Rest, << Acc/binary, $g >>); + $H -> Function(Rest, << Acc/binary, $h >>); + $I -> Function(Rest, << Acc/binary, $i >>); + $J -> Function(Rest, << Acc/binary, $j >>); + $K -> Function(Rest, << Acc/binary, $k >>); + $L -> Function(Rest, << Acc/binary, $l >>); + $M -> Function(Rest, << Acc/binary, $m >>); + $N -> Function(Rest, << Acc/binary, $n >>); + $O -> Function(Rest, << Acc/binary, $o >>); + $P -> Function(Rest, << Acc/binary, $p >>); + $Q -> Function(Rest, << Acc/binary, $q >>); + $R -> Function(Rest, << Acc/binary, $r >>); + $S -> Function(Rest, << Acc/binary, $s >>); + $T -> Function(Rest, << Acc/binary, $t >>); + $U -> Function(Rest, << Acc/binary, $u >>); + $V -> Function(Rest, << Acc/binary, $v >>); + $W -> Function(Rest, << Acc/binary, $w >>); + $X -> Function(Rest, << Acc/binary, $x >>); + $Y -> Function(Rest, << Acc/binary, $y >>); + $Z -> Function(Rest, << Acc/binary, $z >>); + C -> Function(Rest, << Acc/binary, C >>) +). + +-define(INLINE_LOWERCASE(Function, Rest, A0, Acc), + $A -> Function(Rest, A0, << Acc/binary, $a >>); + $B -> Function(Rest, A0, << Acc/binary, $b >>); + $C -> Function(Rest, A0, << Acc/binary, $c >>); + $D -> Function(Rest, A0, << Acc/binary, $d >>); + $E -> Function(Rest, A0, << Acc/binary, $e >>); + $F -> Function(Rest, A0, << Acc/binary, $f >>); + $G -> Function(Rest, A0, << Acc/binary, $g >>); + $H -> Function(Rest, A0, << Acc/binary, $h >>); + $I -> Function(Rest, A0, << Acc/binary, $i >>); + $J -> Function(Rest, A0, << Acc/binary, $j >>); + $K -> Function(Rest, A0, << Acc/binary, $k >>); + $L -> Function(Rest, A0, << Acc/binary, $l >>); + $M -> Function(Rest, A0, << Acc/binary, $m >>); + $N -> Function(Rest, A0, << Acc/binary, $n >>); + $O -> Function(Rest, A0, << Acc/binary, $o >>); + $P -> Function(Rest, A0, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, << Acc/binary, $q >>); + $R -> Function(Rest, A0, << Acc/binary, $r >>); + $S -> Function(Rest, A0, << Acc/binary, $s >>); + $T -> Function(Rest, A0, << Acc/binary, $t >>); + $U -> Function(Rest, A0, << Acc/binary, $u >>); + $V -> Function(Rest, A0, << Acc/binary, $v >>); + $W -> Function(Rest, A0, << Acc/binary, $w >>); + $X -> Function(Rest, A0, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, << Acc/binary, $z >>); + C -> Function(Rest, A0, << Acc/binary, C >>) +). + +-define(INLINE_LOWERCASE(Function, Rest, A0, A1, Acc), + $A -> Function(Rest, A0, A1, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, << Acc/binary, C >>) +). + +-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, Acc), + $A -> Function(Rest, A0, A1, A2, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, << Acc/binary, C >>) +). + +-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, A3, Acc), + $A -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, A3, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, A3, << Acc/binary, C >>) +). + +-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, A3, A4, Acc), + $A -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, A3, A4, << Acc/binary, C >>) +). + +-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, A3, A4, A5, Acc), + $A -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, A3, A4, A5, << Acc/binary, C >>) +). + +-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, A3, A4, A5, A6, Acc), + $A -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, << Acc/binary, C >>) +). + +-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, A3, A4, A5, A6, A7, Acc), + $A -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, << Acc/binary, C >>) +). + +-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, Acc), + $A -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, << Acc/binary, C >>) +). + +-define(INLINE_LOWERCASE(Function, Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, Acc), + $A -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $a >>); + $B -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $b >>); + $C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $c >>); + $D -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $d >>); + $E -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $e >>); + $F -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $f >>); + $G -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $g >>); + $H -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $h >>); + $I -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $i >>); + $J -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $j >>); + $K -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $k >>); + $L -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $l >>); + $M -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $m >>); + $N -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $n >>); + $O -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $o >>); + $P -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $p >>); + $Q -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $q >>); + $R -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $r >>); + $S -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $s >>); + $T -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $t >>); + $U -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $u >>); + $V -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $v >>); + $W -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $w >>); + $X -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $x >>); + $Y -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $y >>); + $Z -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, $z >>); + C -> Function(Rest, A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, << Acc/binary, C >>) +). + +%% INLINE_LOWERCASE_BC(Bin) +%% +%% Lowercase the entire binary string in a binary comprehension. + +-define(INLINE_LOWERCASE_BC(Bin), + << << case C of + $A -> $a; + $B -> $b; + $C -> $c; + $D -> $d; + $E -> $e; + $F -> $f; + $G -> $g; + $H -> $h; + $I -> $i; + $J -> $j; + $K -> $k; + $L -> $l; + $M -> $m; + $N -> $n; + $O -> $o; + $P -> $p; + $Q -> $q; + $R -> $r; + $S -> $s; + $T -> $t; + $U -> $u; + $V -> $v; + $W -> $w; + $X -> $x; + $Y -> $y; + $Z -> $z; + C -> C + end >> || << C >> <= Bin >>). + +-endif. diff --git a/rabbitmq-server/deps/cowlib/src/cow_cookie.erl b/rabbitmq-server/deps/cowlib/src/cow_cookie.erl new file mode 100644 index 0000000..6db89be --- /dev/null +++ b/rabbitmq-server/deps/cowlib/src/cow_cookie.erl @@ -0,0 +1,267 @@ +%% Copyright (c) 2013-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_cookie). + +-export([parse_cookie/1]). +-export([setcookie/3]). + +-type cookie_option() :: {max_age, non_neg_integer()} + | {domain, binary()} | {path, binary()} + | {secure, boolean()} | {http_only, boolean()}. +-type cookie_opts() :: [cookie_option()]. +-export_type([cookie_opts/0]). + +%% @doc Parse a cookie header string and return a list of key/values. + +-spec parse_cookie(binary()) -> [{binary(), binary()}] | {error, badarg}. +parse_cookie(Cookie) -> + parse_cookie(Cookie, []). + +parse_cookie(<<>>, Acc) -> + lists:reverse(Acc); +parse_cookie(<< $\s, Rest/binary >>, Acc) -> + parse_cookie(Rest, Acc); +parse_cookie(<< $\t, Rest/binary >>, Acc) -> + parse_cookie(Rest, Acc); +parse_cookie(<< $,, Rest/binary >>, Acc) -> + parse_cookie(Rest, Acc); +parse_cookie(<< $;, Rest/binary >>, Acc) -> + parse_cookie(Rest, Acc); +parse_cookie(<< $$, Rest/binary >>, Acc) -> + skip_cookie(Rest, Acc); +parse_cookie(Cookie, Acc) -> + parse_cookie_name(Cookie, Acc, <<>>). + +skip_cookie(<<>>, Acc) -> + lists:reverse(Acc); +skip_cookie(<< $,, Rest/binary >>, Acc) -> + parse_cookie(Rest, Acc); +skip_cookie(<< $;, Rest/binary >>, Acc) -> + parse_cookie(Rest, Acc); +skip_cookie(<< _, Rest/binary >>, Acc) -> + skip_cookie(Rest, Acc). + +parse_cookie_name(<<>>, _, _) -> + {error, badarg}; +parse_cookie_name(<< $=, _/binary >>, _, <<>>) -> + {error, badarg}; +parse_cookie_name(<< $=, Rest/binary >>, Acc, Name) -> + parse_cookie_value(Rest, Acc, Name, <<>>); +parse_cookie_name(<< $,, _/binary >>, _, _) -> + {error, badarg}; +parse_cookie_name(<< $;, _/binary >>, _, _) -> + {error, badarg}; +parse_cookie_name(<< $\s, _/binary >>, _, _) -> + {error, badarg}; +parse_cookie_name(<< $\t, _/binary >>, _, _) -> + {error, badarg}; +parse_cookie_name(<< $\r, _/binary >>, _, _) -> + {error, badarg}; +parse_cookie_name(<< $\n, _/binary >>, _, _) -> + {error, badarg}; +parse_cookie_name(<< $\013, _/binary >>, _, _) -> + {error, badarg}; +parse_cookie_name(<< $\014, _/binary >>, _, _) -> + {error, badarg}; +parse_cookie_name(<< C, Rest/binary >>, Acc, Name) -> + parse_cookie_name(Rest, Acc, << Name/binary, C >>). + +parse_cookie_value(<<>>, Acc, Name, Value) -> + lists:reverse([{Name, parse_cookie_trim(Value)}|Acc]); +parse_cookie_value(<< $;, Rest/binary >>, Acc, Name, Value) -> + parse_cookie(Rest, [{Name, parse_cookie_trim(Value)}|Acc]); +parse_cookie_value(<< $\t, _/binary >>, _, _, _) -> + {error, badarg}; +parse_cookie_value(<< $\r, _/binary >>, _, _, _) -> + {error, badarg}; +parse_cookie_value(<< $\n, _/binary >>, _, _, _) -> + {error, badarg}; +parse_cookie_value(<< $\013, _/binary >>, _, _, _) -> + {error, badarg}; +parse_cookie_value(<< $\014, _/binary >>, _, _, _) -> + {error, badarg}; +parse_cookie_value(<< C, Rest/binary >>, Acc, Name, Value) -> + parse_cookie_value(Rest, Acc, Name, << Value/binary, C >>). + +parse_cookie_trim(Value = <<>>) -> + Value; +parse_cookie_trim(Value) -> + case binary:last(Value) of + $\s -> + Size = byte_size(Value) - 1, + << Value2:Size/binary, _ >> = Value, + parse_cookie_trim(Value2); + _ -> + Value + end. + +-ifdef(TEST). +parse_cookie_test_() -> + %% {Value, Result}. + Tests = [ + {<<"name=value; name2=value2">>, [ + {<<"name">>, <<"value">>}, + {<<"name2">>, <<"value2">>} + ]}, + {<<"$Version=1; Customer=WILE_E_COYOTE; $Path=/acme">>, [ + {<<"Customer">>, <<"WILE_E_COYOTE">>} + ]}, + {<<"$Version=1; Customer=WILE_E_COYOTE; $Path=/acme; " + "Part_Number=Rocket_Launcher_0001; $Path=/acme; " + "Shipping=FedEx; $Path=/acme">>, [ + {<<"Customer">>, <<"WILE_E_COYOTE">>}, + {<<"Part_Number">>, <<"Rocket_Launcher_0001">>}, + {<<"Shipping">>, <<"FedEx">>} + ]}, + %% Space in value. + {<<"foo=Thu Jul 11 2013 15:38:43 GMT+0400 (MSK)">>, + [{<<"foo">>, <<"Thu Jul 11 2013 15:38:43 GMT+0400 (MSK)">>}]}, + %% Comma in value. Google Analytics sets that kind of cookies. + {<<"refk=sOUZDzq2w2; sk=B602064E0139D842D620C7569640DBB4C81C45080651" + "9CC124EF794863E10E80; __utma=64249653.825741573.1380181332.1400" + "015657.1400019557.703; __utmb=64249653.1.10.1400019557; __utmc=" + "64249653; __utmz=64249653.1400019557.703.13.utmcsr=bluesky.chic" + "agotribune.com|utmccn=(referral)|utmcmd=referral|utmcct=/origin" + "als/chi-12-indispensable-digital-tools-bsi,0,0.storygallery">>, [ + {<<"refk">>, <<"sOUZDzq2w2">>}, + {<<"sk">>, <<"B602064E0139D842D620C7569640DBB4C81C45080651" + "9CC124EF794863E10E80">>}, + {<<"__utma">>, <<"64249653.825741573.1380181332.1400" + "015657.1400019557.703">>}, + {<<"__utmb">>, <<"64249653.1.10.1400019557">>}, + {<<"__utmc">>, <<"64249653">>}, + {<<"__utmz">>, <<"64249653.1400019557.703.13.utmcsr=bluesky.chic" + "agotribune.com|utmccn=(referral)|utmcmd=referral|utmcct=/origin" + "als/chi-12-indispensable-digital-tools-bsi,0,0.storygallery">>} + ]}, + %% Potential edge cases (initially from Mochiweb). + {<<"foo=\\x">>, [{<<"foo">>, <<"\\x">>}]}, + {<<"=">>, {error, badarg}}, + {<<" foo ; bar ">>, {error, badarg}}, + {<<"foo=;bar=">>, [{<<"foo">>, <<>>}, {<<"bar">>, <<>>}]}, + {<<"foo=\\\";;bar ">>, {error, badarg}}, + {<<"foo=\\\";;bar=good ">>, + [{<<"foo">>, <<"\\\"">>}, {<<"bar">>, <<"good">>}]}, + {<<"foo=\"\\\";bar">>, {error, badarg}}, + {<<>>, []}, + {<<"foo=bar , baz=wibble ">>, [{<<"foo">>, <<"bar , baz=wibble">>}]} + ], + [{V, fun() -> R = parse_cookie(V) end} || {V, R} <- Tests]. +-endif. + +%% @doc Convert a cookie name, value and options to its iodata form. +%% @end +%% +%% Initially from Mochiweb: +%% * Copyright 2007 Mochi Media, Inc. +%% Initial binary implementation: +%% * Copyright 2011 Thomas Burdick + +-spec setcookie(iodata(), iodata(), cookie_opts()) -> iodata(). +setcookie(Name, Value, Opts) -> + nomatch = binary:match(iolist_to_binary(Name), [<<$=>>, <<$,>>, <<$;>>, + <<$\s>>, <<$\t>>, <<$\r>>, <<$\n>>, <<$\013>>, <<$\014>>]), + nomatch = binary:match(iolist_to_binary(Value), [<<$,>>, <<$;>>, + <<$\s>>, <<$\t>>, <<$\r>>, <<$\n>>, <<$\013>>, <<$\014>>]), + MaxAgeBin = case lists:keyfind(max_age, 1, Opts) of + false -> <<>>; + {_, 0} -> + %% MSIE requires an Expires date in the past to delete a cookie. + <<"; Expires=Thu, 01-Jan-1970 00:00:01 GMT; Max-Age=0">>; + {_, MaxAge} when is_integer(MaxAge), MaxAge > 0 -> + UTC = calendar:universal_time(), + Secs = calendar:datetime_to_gregorian_seconds(UTC), + Expires = calendar:gregorian_seconds_to_datetime(Secs + MaxAge), + [<<"; Expires=">>, cow_date:rfc2109(Expires), + <<"; Max-Age=">>, integer_to_list(MaxAge)] + end, + DomainBin = case lists:keyfind(domain, 1, Opts) of + false -> <<>>; + {_, Domain} -> [<<"; Domain=">>, Domain] + end, + PathBin = case lists:keyfind(path, 1, Opts) of + false -> <<>>; + {_, Path} -> [<<"; Path=">>, Path] + end, + SecureBin = case lists:keyfind(secure, 1, Opts) of + false -> <<>>; + {_, true} -> <<"; Secure">> + end, + HttpOnlyBin = case lists:keyfind(http_only, 1, Opts) of + false -> <<>>; + {_, true} -> <<"; HttpOnly">> + end, + [Name, <<"=">>, Value, <<"; Version=1">>, + MaxAgeBin, DomainBin, PathBin, SecureBin, HttpOnlyBin]. + +-ifdef(TEST). +setcookie_test_() -> + %% {Name, Value, Opts, Result} + Tests = [ + {<<"Customer">>, <<"WILE_E_COYOTE">>, + [{http_only, true}, {domain, <<"acme.com">>}], + <<"Customer=WILE_E_COYOTE; Version=1; " + "Domain=acme.com; HttpOnly">>}, + {<<"Customer">>, <<"WILE_E_COYOTE">>, + [{path, <<"/acme">>}], + <<"Customer=WILE_E_COYOTE; Version=1; Path=/acme">>}, + {<<"Customer">>, <<"WILE_E_COYOTE">>, + [{path, <<"/acme">>}, {badoption, <<"negatory">>}], + <<"Customer=WILE_E_COYOTE; Version=1; Path=/acme">>} + ], + [{R, fun() -> R = iolist_to_binary(setcookie(N, V, O)) end} + || {N, V, O, R} <- Tests]. + +setcookie_max_age_test() -> + F = fun(N, V, O) -> + binary:split(iolist_to_binary( + setcookie(N, V, O)), <<";">>, [global]) + end, + [<<"Customer=WILE_E_COYOTE">>, + <<" Version=1">>, + <<" Expires=", _/binary>>, + <<" Max-Age=111">>, + <<" Secure">>] = F(<<"Customer">>, <<"WILE_E_COYOTE">>, + [{max_age, 111}, {secure, true}]), + case catch F(<<"Customer">>, <<"WILE_E_COYOTE">>, [{max_age, -111}]) of + {'EXIT', {{case_clause, {max_age, -111}}, _}} -> ok + end, + [<<"Customer=WILE_E_COYOTE">>, + <<" Version=1">>, + <<" Expires=", _/binary>>, + <<" Max-Age=86417">>] = F(<<"Customer">>, <<"WILE_E_COYOTE">>, + [{max_age, 86417}]), + ok. + +setcookie_failures_test_() -> + F = fun(N, V) -> + try setcookie(N, V, []) of + _ -> + false + catch _:_ -> + true + end + end, + Tests = [ + {<<"Na=me">>, <<"Value">>}, + {<<"Name;">>, <<"Value">>}, + {<<"\r\name">>, <<"Value">>}, + {<<"Name">>, <<"Value;">>}, + {<<"Name">>, <<"\value">>} + ], + [{iolist_to_binary(io_lib:format("{~p, ~p} failure", [N, V])), + fun() -> true = F(N, V) end} + || {N, V} <- Tests]. +-endif. diff --git a/rabbitmq-server/deps/cowlib/src/cow_date.erl b/rabbitmq-server/deps/cowlib/src/cow_date.erl new file mode 100644 index 0000000..f794c82 --- /dev/null +++ b/rabbitmq-server/deps/cowlib/src/cow_date.erl @@ -0,0 +1,206 @@ +%% Copyright (c) 2013-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_date). + +-export([rfc2109/1]). + +%% @doc Return the date formatted according to RFC2109. + +-spec rfc2109(calendar:datetime()) -> binary(). +rfc2109({Date = {Y, Mo, D}, {H, Mi, S}}) -> + Wday = calendar:day_of_the_week(Date), + << (weekday(Wday))/binary, ", ", + (pad_int(D))/binary, "-", + (month(Mo))/binary, "-", + (year(Y))/binary, " ", + (pad_int(H))/binary, ":", + (pad_int(Mi))/binary, ":", + (pad_int(S))/binary, " GMT" >>. + +-ifdef(TEST). +rfc2109_test_() -> + Tests = [ + {<<"Sat, 14-May-2011 14:25:33 GMT">>, {{2011, 5, 14}, {14, 25, 33}}}, + {<<"Sun, 01-Jan-2012 00:00:00 GMT">>, {{2012, 1, 1}, { 0, 0, 0}}} + ], + [{R, fun() -> R = rfc2109(D) end} || {R, D} <- Tests]. +-endif. + +-ifdef(PERF). +horse_rfc2019_20130101_000000() -> + horse:repeat(100000, + rfc2109({{2013, 1, 1}, {0, 0, 0}}) + ). + +horse_rfc2019_20131231_235959() -> + horse:repeat(100000, + rfc2109({{2013, 12, 31}, {23, 59, 59}}) + ). + +horse_rfc2019_12340506_070809() -> + horse:repeat(100000, + rfc2109({{1234, 5, 6}, {7, 8, 9}}) + ). +-endif. + +%% Internal. + +-spec pad_int(0..59) -> <<_:16>>. +pad_int( 0) -> <<"00">>; +pad_int( 1) -> <<"01">>; +pad_int( 2) -> <<"02">>; +pad_int( 3) -> <<"03">>; +pad_int( 4) -> <<"04">>; +pad_int( 5) -> <<"05">>; +pad_int( 6) -> <<"06">>; +pad_int( 7) -> <<"07">>; +pad_int( 8) -> <<"08">>; +pad_int( 9) -> <<"09">>; +pad_int(10) -> <<"10">>; +pad_int(11) -> <<"11">>; +pad_int(12) -> <<"12">>; +pad_int(13) -> <<"13">>; +pad_int(14) -> <<"14">>; +pad_int(15) -> <<"15">>; +pad_int(16) -> <<"16">>; +pad_int(17) -> <<"17">>; +pad_int(18) -> <<"18">>; +pad_int(19) -> <<"19">>; +pad_int(20) -> <<"20">>; +pad_int(21) -> <<"21">>; +pad_int(22) -> <<"22">>; +pad_int(23) -> <<"23">>; +pad_int(24) -> <<"24">>; +pad_int(25) -> <<"25">>; +pad_int(26) -> <<"26">>; +pad_int(27) -> <<"27">>; +pad_int(28) -> <<"28">>; +pad_int(29) -> <<"29">>; +pad_int(30) -> <<"30">>; +pad_int(31) -> <<"31">>; +pad_int(32) -> <<"32">>; +pad_int(33) -> <<"33">>; +pad_int(34) -> <<"34">>; +pad_int(35) -> <<"35">>; +pad_int(36) -> <<"36">>; +pad_int(37) -> <<"37">>; +pad_int(38) -> <<"38">>; +pad_int(39) -> <<"39">>; +pad_int(40) -> <<"40">>; +pad_int(41) -> <<"41">>; +pad_int(42) -> <<"42">>; +pad_int(43) -> <<"43">>; +pad_int(44) -> <<"44">>; +pad_int(45) -> <<"45">>; +pad_int(46) -> <<"46">>; +pad_int(47) -> <<"47">>; +pad_int(48) -> <<"48">>; +pad_int(49) -> <<"49">>; +pad_int(50) -> <<"50">>; +pad_int(51) -> <<"51">>; +pad_int(52) -> <<"52">>; +pad_int(53) -> <<"53">>; +pad_int(54) -> <<"54">>; +pad_int(55) -> <<"55">>; +pad_int(56) -> <<"56">>; +pad_int(57) -> <<"57">>; +pad_int(58) -> <<"58">>; +pad_int(59) -> <<"59">>. + +-spec weekday(1..7) -> <<_:24>>. +weekday(1) -> <<"Mon">>; +weekday(2) -> <<"Tue">>; +weekday(3) -> <<"Wed">>; +weekday(4) -> <<"Thu">>; +weekday(5) -> <<"Fri">>; +weekday(6) -> <<"Sat">>; +weekday(7) -> <<"Sun">>. + +-spec month(1..12) -> <<_:24>>. +month( 1) -> <<"Jan">>; +month( 2) -> <<"Feb">>; +month( 3) -> <<"Mar">>; +month( 4) -> <<"Apr">>; +month( 5) -> <<"May">>; +month( 6) -> <<"Jun">>; +month( 7) -> <<"Jul">>; +month( 8) -> <<"Aug">>; +month( 9) -> <<"Sep">>; +month(10) -> <<"Oct">>; +month(11) -> <<"Nov">>; +month(12) -> <<"Dec">>. + +-spec year(pos_integer()) -> <<_:32>>. +year(1970) -> <<"1970">>; +year(1971) -> <<"1971">>; +year(1972) -> <<"1972">>; +year(1973) -> <<"1973">>; +year(1974) -> <<"1974">>; +year(1975) -> <<"1975">>; +year(1976) -> <<"1976">>; +year(1977) -> <<"1977">>; +year(1978) -> <<"1978">>; +year(1979) -> <<"1979">>; +year(1980) -> <<"1980">>; +year(1981) -> <<"1981">>; +year(1982) -> <<"1982">>; +year(1983) -> <<"1983">>; +year(1984) -> <<"1984">>; +year(1985) -> <<"1985">>; +year(1986) -> <<"1986">>; +year(1987) -> <<"1987">>; +year(1988) -> <<"1988">>; +year(1989) -> <<"1989">>; +year(1990) -> <<"1990">>; +year(1991) -> <<"1991">>; +year(1992) -> <<"1992">>; +year(1993) -> <<"1993">>; +year(1994) -> <<"1994">>; +year(1995) -> <<"1995">>; +year(1996) -> <<"1996">>; +year(1997) -> <<"1997">>; +year(1998) -> <<"1998">>; +year(1999) -> <<"1999">>; +year(2000) -> <<"2000">>; +year(2001) -> <<"2001">>; +year(2002) -> <<"2002">>; +year(2003) -> <<"2003">>; +year(2004) -> <<"2004">>; +year(2005) -> <<"2005">>; +year(2006) -> <<"2006">>; +year(2007) -> <<"2007">>; +year(2008) -> <<"2008">>; +year(2009) -> <<"2009">>; +year(2010) -> <<"2010">>; +year(2011) -> <<"2011">>; +year(2012) -> <<"2012">>; +year(2013) -> <<"2013">>; +year(2014) -> <<"2014">>; +year(2015) -> <<"2015">>; +year(2016) -> <<"2016">>; +year(2017) -> <<"2017">>; +year(2018) -> <<"2018">>; +year(2019) -> <<"2019">>; +year(2020) -> <<"2020">>; +year(2021) -> <<"2021">>; +year(2022) -> <<"2022">>; +year(2023) -> <<"2023">>; +year(2024) -> <<"2024">>; +year(2025) -> <<"2025">>; +year(2026) -> <<"2026">>; +year(2027) -> <<"2027">>; +year(2028) -> <<"2028">>; +year(2029) -> <<"2029">>; +year(Year) -> list_to_binary(integer_to_list(Year)). diff --git a/rabbitmq-server/deps/cowlib/src/cow_http.erl b/rabbitmq-server/deps/cowlib/src/cow_http.erl new file mode 100644 index 0000000..f7e3cdd --- /dev/null +++ b/rabbitmq-server/deps/cowlib/src/cow_http.erl @@ -0,0 +1,301 @@ +%% Copyright (c) 2013-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_http). + +%% @todo parse_request_line +-export([parse_status_line/1]). +-export([parse_headers/1]). + +-export([parse_fullhost/1]). +-export([parse_fullpath/1]). +-export([parse_version/1]). + +-export([request/4]). +-export([version/1]). + +-type version() :: 'HTTP/1.0' | 'HTTP/1.1'. +-type status() :: 100..999. +-type headers() :: [{binary(), iodata()}]. + +-include("cow_inline.hrl"). + +%% @doc Parse the status line. + +-spec parse_status_line(binary()) -> {version(), status(), binary(), binary()}. +parse_status_line(<< "HTTP/1.1 200 OK\r\n", Rest/bits >>) -> + {'HTTP/1.1', 200, <<"OK">>, Rest}; +parse_status_line(<< "HTTP/1.1 404 Not Found\r\n", Rest/bits >>) -> + {'HTTP/1.1', 404, <<"Not Found">>, Rest}; +parse_status_line(<< "HTTP/1.1 500 Internal Server Error\r\n", Rest/bits >>) -> + {'HTTP/1.1', 500, <<"Internal Server Error">>, Rest}; +parse_status_line(<< "HTTP/1.1 ", Status/bits >>) -> + parse_status_line(Status, 'HTTP/1.1'); +parse_status_line(<< "HTTP/1.0 ", Status/bits >>) -> + parse_status_line(Status, 'HTTP/1.0'). + +parse_status_line(<< H, T, U, " ", Rest/bits >>, Version) + when $0 =< H, H =< $9, $0 =< T, T =< $9, $0 =< U, U =< $9 -> + Status = (H - $0) * 100 + (T - $0) * 10 + (U - $0), + {Pos, _} = binary:match(Rest, <<"\r">>), + << StatusStr:Pos/binary, "\r\n", Rest2/bits >> = Rest, + {Version, Status, StatusStr, Rest2}. + +-ifdef(TEST). +parse_status_line_test_() -> + Tests = [ + {<<"HTTP/1.1 200 OK\r\nRest">>, + {'HTTP/1.1', 200, <<"OK">>, <<"Rest">>}}, + {<<"HTTP/1.0 404 Not Found\r\nRest">>, + {'HTTP/1.0', 404, <<"Not Found">>, <<"Rest">>}}, + {<<"HTTP/1.1 500 Something very funny here\r\nRest">>, + {'HTTP/1.1', 500, <<"Something very funny here">>, <<"Rest">>}}, + {<<"HTTP/1.1 200 \r\nRest">>, + {'HTTP/1.1', 200, <<>>, <<"Rest">>}} + ], + [{V, fun() -> R = parse_status_line(V) end} + || {V, R} <- Tests]. + +parse_status_line_error_test_() -> + Tests = [ + <<>>, + <<"HTTP/1.1">>, + <<"HTTP/1.1 200\r\n">>, + <<"HTTP/1.1 200 OK">>, + <<"HTTP/1.1 200 OK\r">>, + <<"HTTP/1.1 200 OK\n">>, + <<"HTTP/0.9 200 OK\r\n">>, + <<"HTTP/1.1 42 Answer\r\n">>, + <<"HTTP/1.1 999999999 More than OK\r\n">>, + <<"content-type: text/plain\r\n">>, + <<0:80, "\r\n">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_status_line(V)) end} + || V <- Tests]. +-endif. + +-ifdef(PERF). +horse_parse_status_line_200() -> + horse:repeat(200000, + parse_status_line(<<"HTTP/1.1 200 OK\r\n">>) + ). + +horse_parse_status_line_404() -> + horse:repeat(200000, + parse_status_line(<<"HTTP/1.1 404 Not Found\r\n">>) + ). + +horse_parse_status_line_500() -> + horse:repeat(200000, + parse_status_line(<<"HTTP/1.1 500 Internal Server Error\r\n">>) + ). + +horse_parse_status_line_other() -> + horse:repeat(200000, + parse_status_line(<<"HTTP/1.1 416 Requested range not satisfiable\r\n">>) + ). +-endif. + +%% @doc Parse the list of headers. + +-spec parse_headers(binary()) -> {[{binary(), binary()}], binary()}. +parse_headers(Data) -> + parse_header(Data, []). + +parse_header(<< $\r, $\n, Rest/bits >>, Acc) -> + {lists:reverse(Acc), Rest}; +parse_header(Data, Acc) -> + parse_hd_name(Data, Acc, <<>>). + +parse_hd_name(<< C, Rest/bits >>, Acc, SoFar) -> + case C of + $: -> parse_hd_before_value(Rest, Acc, SoFar); + $\s -> parse_hd_name_ws(Rest, Acc, SoFar); + $\t -> parse_hd_name_ws(Rest, Acc, SoFar); + ?INLINE_LOWERCASE(parse_hd_name, Rest, Acc, SoFar) + end. + +parse_hd_name_ws(<< C, Rest/bits >>, Acc, Name) -> + case C of + $: -> parse_hd_before_value(Rest, Acc, Name); + $\s -> parse_hd_name_ws(Rest, Acc, Name); + $\t -> parse_hd_name_ws(Rest, Acc, Name) + end. + +parse_hd_before_value(<< $\s, Rest/bits >>, Acc, Name) -> + parse_hd_before_value(Rest, Acc, Name); +parse_hd_before_value(<< $\t, Rest/bits >>, Acc, Name) -> + parse_hd_before_value(Rest, Acc, Name); +parse_hd_before_value(Data, Acc, Name) -> + parse_hd_value(Data, Acc, Name, <<>>). + +parse_hd_value(<< $\r, Rest/bits >>, Acc, Name, SoFar) -> + case Rest of + << $\n, C, Rest2/bits >> when C =:= $\s; C =:= $\t -> + parse_hd_value(Rest2, Acc, Name, << SoFar/binary, C >>); + << $\n, Rest2/bits >> -> + parse_header(Rest2, [{Name, SoFar}|Acc]) + end; +parse_hd_value(<< C, Rest/bits >>, Acc, Name, SoFar) -> + parse_hd_value(Rest, Acc, Name, << SoFar/binary, C >>). + +-ifdef(TEST). +parse_headers_test_() -> + Tests = [ + {<<"\r\nRest">>, + {[], <<"Rest">>}}, + {<<"Server: Erlang/R17\r\n" + "Date: Sun, 23 Feb 2014 09:30:39 GMT\r\n" + "Multiline-Header: why hello!\r\n" + " I didn't see you all the way over there!\r\n" + "Content-Length: 12\r\n" + "Content-Type: text/plain\r\n" + "\r\nRest">>, + {[{<<"server">>, <<"Erlang/R17">>}, + {<<"date">>, <<"Sun, 23 Feb 2014 09:30:39 GMT">>}, + {<<"multiline-header">>, + <<"why hello! I didn't see you all the way over there!">>}, + {<<"content-length">>, <<"12">>}, + {<<"content-type">>, <<"text/plain">>}], + <<"Rest">>}} + ], + [{V, fun() -> R = parse_headers(V) end} + || {V, R} <- Tests]. + +parse_headers_error_test_() -> + Tests = [ + <<>>, + <<"\r">>, + <<"Malformed\r\n\r\n">>, + <<"content-type: text/plain\r\nMalformed\r\n\r\n">>, + <<"HTTP/1.1 200 OK\r\n\r\n">>, + <<0:80, "\r\n\r\n">>, + <<"content-type: text/plain\r\ncontent-length: 12\r\n">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_headers(V)) end} + || V <- Tests]. +-endif. + +-ifdef(PERF). +horse_parse_headers() -> + horse:repeat(50000, + parse_headers(<<"Server: Erlang/R17\r\n" + "Date: Sun, 23 Feb 2014 09:30:39 GMT\r\n" + "Multiline-Header: why hello!\r\n" + " I didn't see you all the way over there!\r\n" + "Content-Length: 12\r\n" + "Content-Type: text/plain\r\n" + "\r\nRest">>) + ). +-endif. + +%% @doc Extract host and port from a binary. +%% +%% Because the hostname is case insensitive it is converted +%% to lowercase. + +-spec parse_fullhost(binary()) -> {binary(), undefined | non_neg_integer()}. +parse_fullhost(Fullhost) -> + parse_fullhost(Fullhost, false, <<>>). + +parse_fullhost(<< $[, Rest/bits >>, false, <<>>) -> + parse_fullhost(Rest, true, << $[ >>); +parse_fullhost(<<>>, false, Acc) -> + {Acc, undefined}; +%% @todo Optimize. +parse_fullhost(<< $:, Rest/bits >>, false, Acc) -> + {Acc, list_to_integer(binary_to_list(Rest))}; +parse_fullhost(<< $], Rest/bits >>, true, Acc) -> + parse_fullhost(Rest, false, << Acc/binary, $] >>); +parse_fullhost(<< C, Rest/bits >>, E, Acc) -> + case C of + ?INLINE_LOWERCASE(parse_fullhost, Rest, E, Acc) + end. + +-ifdef(TEST). +parse_fullhost_test() -> + {<<"example.org">>, 8080} = parse_fullhost(<<"example.org:8080">>), + {<<"example.org">>, undefined} = parse_fullhost(<<"example.org">>), + {<<"192.0.2.1">>, 8080} = parse_fullhost(<<"192.0.2.1:8080">>), + {<<"192.0.2.1">>, undefined} = parse_fullhost(<<"192.0.2.1">>), + {<<"[2001:db8::1]">>, 8080} = parse_fullhost(<<"[2001:db8::1]:8080">>), + {<<"[2001:db8::1]">>, undefined} = parse_fullhost(<<"[2001:db8::1]">>), + {<<"[::ffff:192.0.2.1]">>, 8080} + = parse_fullhost(<<"[::ffff:192.0.2.1]:8080">>), + {<<"[::ffff:192.0.2.1]">>, undefined} + = parse_fullhost(<<"[::ffff:192.0.2.1]">>), + ok. +-endif. + +%% @doc Extract path and query string from a binary. + +-spec parse_fullpath(binary()) -> {binary(), binary()}. +parse_fullpath(Fullpath) -> + parse_fullpath(Fullpath, <<>>). + +parse_fullpath(<<>>, Path) -> + {Path, <<>>}; +parse_fullpath(<< $?, Qs/binary >>, Path) -> + {Path, Qs}; +parse_fullpath(<< C, Rest/binary >>, SoFar) -> + parse_fullpath(Rest, << SoFar/binary, C >>). + +-ifdef(TEST). +parse_fullpath_test() -> + {<<"*">>, <<>>} = parse_fullpath(<<"*">>), + {<<"/">>, <<>>} = parse_fullpath(<<"/">>), + {<<"/path/to/resource">>, <<>>} = parse_fullpath(<<"/path/to/resource">>), + {<<"/">>, <<>>} = parse_fullpath(<<"/?">>), + {<<"/">>, <<"q=cowboy">>} = parse_fullpath(<<"/?q=cowboy">>), + {<<"/path/to/resource">>, <<"q=cowboy">>} + = parse_fullpath(<<"/path/to/resource?q=cowboy">>), + ok. +-endif. + +%% @doc Convert an HTTP version to atom. + +-spec parse_version(binary()) -> version(). +parse_version(<<"HTTP/1.1">>) -> 'HTTP/1.1'; +parse_version(<<"HTTP/1.0">>) -> 'HTTP/1.0'. + +-ifdef(TEST). +parse_version_test() -> + 'HTTP/1.1' = parse_version(<<"HTTP/1.1">>), + 'HTTP/1.0' = parse_version(<<"HTTP/1.0">>), + {'EXIT', _} = (catch parse_version(<<"HTTP/1.2">>)), + ok. +-endif. + +%% @doc Return formatted request-line and headers. +%% @todo Add tests when the corresponding reverse functions are added. + +-spec request(binary(), iodata(), version(), headers()) -> iodata(). +request(Method, Path, Version, Headers) -> + [Method, <<" ">>, Path, <<" ">>, version(Version), <<"\r\n">>, + [[N, <<": ">>, V, <<"\r\n">>] || {N, V} <- Headers], + <<"\r\n">>]. + +%% @doc Return the version as a binary. + +-spec version(version()) -> binary(). +version('HTTP/1.1') -> <<"HTTP/1.1">>; +version('HTTP/1.0') -> <<"HTTP/1.0">>. + +-ifdef(TEST). +version_test() -> + <<"HTTP/1.1">> = version('HTTP/1.1'), + <<"HTTP/1.0">> = version('HTTP/1.0'), + {'EXIT', _} = (catch version('HTTP/1.2')), + ok. +-endif. diff --git a/rabbitmq-server/deps/cowlib/src/cow_http_hd.erl b/rabbitmq-server/deps/cowlib/src/cow_http_hd.erl new file mode 100644 index 0000000..35cf2f4 --- /dev/null +++ b/rabbitmq-server/deps/cowlib/src/cow_http_hd.erl @@ -0,0 +1,194 @@ +%% Copyright (c) 2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_http_hd). + +-export([parse_connection/1]). +-export([parse_content_length/1]). +-export([parse_transfer_encoding/1]). + +-include("cow_inline.hrl"). + +%% @doc Parse the Connection header. + +-spec parse_connection(binary()) -> [binary()]. +parse_connection(<<"close">>) -> + [<<"close">>]; +parse_connection(<<"keep-alive">>) -> + [<<"keep-alive">>]; +parse_connection(Connection) -> + nonempty(token_ci_list(Connection, [])). + +-ifdef(TEST). +parse_connection_test_() -> + Tests = [ + {<<"close">>, [<<"close">>]}, + {<<"ClOsE">>, [<<"close">>]}, + {<<"Keep-Alive">>, [<<"keep-alive">>]}, + {<<"keep-alive, Upgrade">>, [<<"keep-alive">>, <<"upgrade">>]} + ], + [{V, fun() -> R = parse_connection(V) end} || {V, R} <- Tests]. +-endif. + +-ifdef(PERF). +horse_parse_connection_close() -> + horse:repeat(200000, + parse_connection(<<"close">>) + ). + +horse_parse_connection_keepalive() -> + horse:repeat(200000, + parse_connection(<<"keep-alive">>) + ). + +horse_parse_connection_keepalive_upgrade() -> + horse:repeat(200000, + parse_connection(<<"keep-alive, upgrade">>) + ). +-endif. + +%% @doc Parse the Content-Length header. +%% +%% The value has at least one digit, and may be followed by whitespace. + +-spec parse_content_length(binary()) -> non_neg_integer(). +parse_content_length(<< $0 >>) -> 0; +parse_content_length(<< $0, R/bits >>) -> number(R, 0); +parse_content_length(<< $1, R/bits >>) -> number(R, 1); +parse_content_length(<< $2, R/bits >>) -> number(R, 2); +parse_content_length(<< $3, R/bits >>) -> number(R, 3); +parse_content_length(<< $4, R/bits >>) -> number(R, 4); +parse_content_length(<< $5, R/bits >>) -> number(R, 5); +parse_content_length(<< $6, R/bits >>) -> number(R, 6); +parse_content_length(<< $7, R/bits >>) -> number(R, 7); +parse_content_length(<< $8, R/bits >>) -> number(R, 8); +parse_content_length(<< $9, R/bits >>) -> number(R, 9). + +-ifdef(TEST). +parse_content_length_test_() -> + Tests = [ + {<<"0">>, 0}, + {<<"42 ">>, 42}, + {<<"69\t">>, 69}, + {<<"1337">>, 1337}, + {<<"1234567890">>, 1234567890}, + {<<"1234567890 ">>, 1234567890} + ], + [{V, fun() -> R = parse_content_length(V) end} || {V, R} <- Tests]. +-endif. + +-ifdef(PERF). +horse_parse_content_length_zero() -> + horse:repeat(100000, + parse_content_length(<<"0">>) + ). + +horse_parse_content_length_giga() -> + horse:repeat(100000, + parse_content_length(<<"1234567890">>) + ). +-endif. + +%% @doc Parse the Transfer-Encoding header. +%% +%% @todo Extension parameters. + +-spec parse_transfer_encoding(binary()) -> [binary()]. +parse_transfer_encoding(<<"chunked">>) -> + [<<"chunked">>]; +parse_transfer_encoding(TransferEncoding) -> + nonempty(token_ci_list(TransferEncoding, [])). + +-ifdef(TEST). +parse_transfer_encoding_test_() -> + Tests = [ + {<<"a , , , ">>, [<<"a">>]}, + {<<" , , , a">>, [<<"a">>]}, + {<<"a , , b">>, [<<"a">>, <<"b">>]}, + {<<"chunked">>, [<<"chunked">>]}, + {<<"chunked, something">>, [<<"chunked">>, <<"something">>]} + ], + [{V, fun() -> R = parse_transfer_encoding(V) end} || {V, R} <- Tests]. + +parse_transfer_encoding_error_test_() -> + Tests = [ + <<>>, + <<" ">>, + <<" , ">>, + <<",,,">>, + <<"a b">> + ], + [{V, fun() -> {'EXIT', _} = (catch parse_transfer_encoding(V)) end} + || V <- Tests]. +-endif. + +-ifdef(PERF). +horse_parse_transfer_encoding_chunked() -> + horse:repeat(200000, + parse_transfer_encoding(<<"chunked">>) + ). + +horse_parse_transfer_encoding_custom() -> + horse:repeat(200000, + parse_transfer_encoding(<<"chunked, something">>) + ). +-endif. + +%% Internal. + +%% Only return if the list is not empty. +nonempty(L) when L =/= [] -> L. + +%% Parse a number optionally followed by whitespace. +number(<< $0, R/bits >>, Acc) -> number(R, Acc * 10); +number(<< $1, R/bits >>, Acc) -> number(R, Acc * 10 + 1); +number(<< $2, R/bits >>, Acc) -> number(R, Acc * 10 + 2); +number(<< $3, R/bits >>, Acc) -> number(R, Acc * 10 + 3); +number(<< $4, R/bits >>, Acc) -> number(R, Acc * 10 + 4); +number(<< $5, R/bits >>, Acc) -> number(R, Acc * 10 + 5); +number(<< $6, R/bits >>, Acc) -> number(R, Acc * 10 + 6); +number(<< $7, R/bits >>, Acc) -> number(R, Acc * 10 + 7); +number(<< $8, R/bits >>, Acc) -> number(R, Acc * 10 + 8); +number(<< $9, R/bits >>, Acc) -> number(R, Acc * 10 + 9); +number(<< $\s, R/bits >>, Acc) -> ws_end(R), Acc; +number(<< $\t, R/bits >>, Acc) -> ws_end(R), Acc; +number(<<>>, Acc) -> Acc. + +ws_end(<< $\s, R/bits >>) -> ws_end(R); +ws_end(<< $\t, R/bits >>) -> ws_end(R); +ws_end(<<>>) -> ok. + +%% Parse a list of case insensitive tokens. +token_ci_list(<<>>, Acc) -> lists:reverse(Acc); +token_ci_list(<< $\s, R/bits >>, Acc) -> token_ci_list(R, Acc); +token_ci_list(<< $\t, R/bits >>, Acc) -> token_ci_list(R, Acc); +token_ci_list(<< $,, R/bits >>, Acc) -> token_ci_list(R, Acc); +token_ci_list(<< C, R/bits >>, Acc) -> + case C of + ?INLINE_LOWERCASE(token_ci_list, R, Acc, <<>>) + end. + +token_ci_list(<<>>, Acc, T) -> lists:reverse([T|Acc]); +token_ci_list(<< $\s, R/bits >>, Acc, T) -> token_ci_list_sep(R, Acc, T); +token_ci_list(<< $\t, R/bits >>, Acc, T) -> token_ci_list_sep(R, Acc, T); +token_ci_list(<< $,, R/bits >>, Acc, T) -> token_ci_list(R, [T|Acc]); +token_ci_list(<< C, R/bits >>, Acc, T) -> + case C of + ?INLINE_LOWERCASE(token_ci_list, R, Acc, T) + end. + +token_ci_list_sep(<<>>, Acc, T) -> lists:reverse([T|Acc]); +token_ci_list_sep(<< $\s, R/bits >>, Acc, T) -> token_ci_list_sep(R, Acc, T); +token_ci_list_sep(<< $\t, R/bits >>, Acc, T) -> token_ci_list_sep(R, Acc, T); +token_ci_list_sep(<< $,, R/bits >>, Acc, T) -> token_ci_list(R, [T|Acc]). diff --git a/rabbitmq-server/deps/cowlib/src/cow_http_te.erl b/rabbitmq-server/deps/cowlib/src/cow_http_te.erl new file mode 100644 index 0000000..5ab71f4 --- /dev/null +++ b/rabbitmq-server/deps/cowlib/src/cow_http_te.erl @@ -0,0 +1,327 @@ +%% Copyright (c) 2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_http_te). + +%% Identity. +-export([stream_identity/2]). +-export([identity/1]). + +%% Chunked. +-export([stream_chunked/2]). +-export([chunk/1]). +-export([last_chunk/0]). + +%% The state type is the same for both identity and chunked. +-type state() :: {non_neg_integer(), non_neg_integer()}. + +-type decode_ret() :: more + | {more, Data::binary(), state()} + | {more, Data::binary(), RemLen::non_neg_integer(), state()} + | {more, Data::binary(), Rest::binary(), state()} + | {done, TotalLen::non_neg_integer(), Rest::binary()} + | {done, Data::binary(), TotalLen::non_neg_integer(), Rest::binary()}. +-export_type([decode_ret/0]). + +-ifdef(EXTRA). +dripfeed(<< C, Rest/bits >>, Acc, State, F) -> + case F(<< Acc/binary, C >>, State) of + more -> + dripfeed(Rest, << Acc/binary, C >>, State, F); + {more, _, State2} -> + dripfeed(Rest, <<>>, State2, F); + {more, _, Length, State2} when is_integer(Length) -> + dripfeed(Rest, <<>>, State2, F); + {more, _, Acc2, State2} -> + dripfeed(Rest, Acc2, State2, F); + {done, _, <<>>} -> + ok; + {done, _, _, <<>>} -> + ok + end. +-endif. + +%% Identity. + +%% @doc Decode an identity stream. + +-spec stream_identity(Data, State) + -> {more, Data, Len, State} | {done, Data, Len, Data} + when Data::binary(), State::state(), Len::non_neg_integer(). +stream_identity(Data, {Streamed, Total}) -> + Streamed2 = Streamed + byte_size(Data), + if + Streamed2 < Total -> + {more, Data, Total - Streamed2, {Streamed2, Total}}; + true -> + Size = Total - Streamed, + << Data2:Size/binary, Rest/bits >> = Data, + {done, Data2, Total, Rest} + end. + +-spec identity(Data) -> Data when Data::iodata(). +identity(Data) -> + Data. + +-ifdef(TEST). +stream_identity_test() -> + {done, <<>>, 0, <<>>} + = stream_identity(identity(<<>>), {0, 0}), + {done, <<"\r\n">>, 2, <<>>} + = stream_identity(identity(<<"\r\n">>), {0, 2}), + {done, << 0:80000 >>, 10000, <<>>} + = stream_identity(identity(<< 0:80000 >>), {0, 10000}), + ok. + +stream_identity_parts_test() -> + {more, << 0:8000 >>, 1999, S1} + = stream_identity(<< 0:8000 >>, {0, 2999}), + {more, << 0:8000 >>, 999, S2} + = stream_identity(<< 0:8000 >>, S1), + {done, << 0:7992 >>, 2999, <<>>} + = stream_identity(<< 0:7992 >>, S2), + ok. +-endif. + +-ifdef(PERF). +%% Using the same data as the chunked one for comparison. + +horse_stream_identity() -> + horse:repeat(10000, + stream_identity(<< + "4\r\n" + "Wiki\r\n" + "5\r\n" + "pedia\r\n" + "e\r\n" + " in\r\n\r\nchunks.\r\n" + "0\r\n" + "\r\n">>, {0, 43}) + ). + +horse_stream_identity_dripfeed() -> + horse:repeat(10000, + dripfeed(<< + "4\r\n" + "Wiki\r\n" + "5\r\n" + "pedia\r\n" + "e\r\n" + " in\r\n\r\nchunks.\r\n" + "0\r\n" + "\r\n">>, <<>>, {0, 43}, fun stream_identity/2) + ). +-endif. + +%% Chunked. + +%% @doc Decode a chunked stream. + +-spec stream_chunked(Data, State) + -> more | {more, Data, State} | {more, Data, Len, State} + | {more, Data, Data, State} + | {done, Len, Data} | {done, Data, Len, Data} + when Data::binary(), State::state(), Len::non_neg_integer(). +stream_chunked(Data, State) -> + stream_chunked(Data, State, <<>>). + +%% New chunk. +stream_chunked(Data = << C, _/bits >>, {0, Streamed}, Acc) when C =/= $\r -> + case chunked_len(Data, Streamed, Acc, 0) of + {next, Rest, State, Acc2} -> + stream_chunked(Rest, State, Acc2); + {more, State, Acc2} -> + {more, Acc2, Data, State}; + Ret -> + Ret + end; +%% Trailing \r\n before next chunk. +stream_chunked(<< "\r\n", Rest/bits >>, {2, Streamed}, Acc) -> + stream_chunked(Rest, {0, Streamed}, Acc); +%% Trailing \r before next chunk. +stream_chunked(<< "\r" >>, {2, Streamed}, Acc) -> + {more, Acc, {1, Streamed}}; +%% Trailing \n before next chunk. +stream_chunked(<< "\n", Rest/bits >>, {1, Streamed}, Acc) -> + stream_chunked(Rest, {0, Streamed}, Acc); +%% More data needed. +stream_chunked(<<>>, State = {Rem, _}, Acc) -> + {more, Acc, Rem, State}; +%% Chunk data. +stream_chunked(Data, {Rem, Streamed}, Acc) when Rem > 2 -> + DataSize = byte_size(Data), + RemSize = Rem - 2, + case Data of + << Chunk:RemSize/binary, "\r\n", Rest/bits >> -> + stream_chunked(Rest, {0, Streamed + RemSize}, << Acc/binary, Chunk/binary >>); + << Chunk:RemSize/binary, "\r" >> -> + {more, << Acc/binary, Chunk/binary >>, {1, Streamed + RemSize}}; + %% Everything in Data is part of the chunk. + _ -> + Rem2 = Rem - DataSize, + {more, << Acc/binary, Data/binary >>, Rem2, {Rem2, Streamed + DataSize}} + end. + +chunked_len(<< $0, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16); +chunked_len(<< $1, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 1); +chunked_len(<< $2, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 2); +chunked_len(<< $3, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 3); +chunked_len(<< $4, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 4); +chunked_len(<< $5, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 5); +chunked_len(<< $6, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 6); +chunked_len(<< $7, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 7); +chunked_len(<< $8, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 8); +chunked_len(<< $9, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 9); +chunked_len(<< $A, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 10); +chunked_len(<< $B, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 11); +chunked_len(<< $C, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 12); +chunked_len(<< $D, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 13); +chunked_len(<< $E, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 14); +chunked_len(<< $F, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 15); +chunked_len(<< $a, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 10); +chunked_len(<< $b, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 11); +chunked_len(<< $c, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 12); +chunked_len(<< $d, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 13); +chunked_len(<< $e, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 14); +chunked_len(<< $f, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 15); +%% Final chunk. +chunked_len(<< "\r\n\r\n", R/bits >>, S, <<>>, 0) -> {done, S, R}; +chunked_len(<< "\r\n\r\n", R/bits >>, S, A, 0) -> {done, A, S, R}; +chunked_len(_, _, _, 0) -> more; +%% Normal chunk. Add 2 to Len for the trailing \r\n. +chunked_len(<< "\r\n", R/bits >>, S, A, Len) -> {next, R, {Len + 2, S}, A}; +chunked_len(<<"\r">>, _, <<>>, _) -> more; +chunked_len(<<"\r">>, S, A, _) -> {more, {0, S}, A}; +chunked_len(<<>>, _, <<>>, _) -> more; +chunked_len(<<>>, S, A, _) -> {more, {0, S}, A}. + +%% @doc Encode a chunk. + +-spec chunk(D) -> D when D::iodata(). +chunk(Data) -> + [integer_to_list(iolist_size(Data), 16), <<"\r\n">>, + Data, <<"\r\n">>]. + +%% @doc Encode the last chunk of a chunked stream. + +-spec last_chunk() -> << _:40 >>. +last_chunk() -> + <<"0\r\n\r\n">>. + +-ifdef(TEST). +stream_chunked_identity_test() -> + {done, <<"Wikipedia in\r\n\r\nchunks.">>, 23, <<>>} + = stream_chunked(iolist_to_binary([ + chunk("Wiki"), + chunk("pedia"), + chunk(" in\r\n\r\nchunks."), + last_chunk() + ]), {0, 0}), + ok. + +stream_chunked_one_pass_test() -> + {done, 0, <<>>} = stream_chunked(<<"0\r\n\r\n">>, {0, 0}), + {done, <<"Wikipedia in\r\n\r\nchunks.">>, 23, <<>>} + = stream_chunked(<< + "4\r\n" + "Wiki\r\n" + "5\r\n" + "pedia\r\n" + "e\r\n" + " in\r\n\r\nchunks.\r\n" + "0\r\n" + "\r\n">>, {0, 0}), + ok. + +stream_chunked_n_passes_test() -> + S0 = {0, 0}, + more = stream_chunked(<<"4\r">>, S0), + {more, <<>>, 6, S1} = stream_chunked(<<"4\r\n">>, S0), + {more, <<"Wiki">>, 0, S2} = stream_chunked(<<"Wiki\r\n">>, S1), + {more, <<"pedia">>, <<"e\r">>, S3} = stream_chunked(<<"5\r\npedia\r\ne\r">>, S2), + {more, <<" in\r\n\r\nchunks.">>, 2, S4} = stream_chunked(<<"e\r\n in\r\n\r\nchunks.">>, S3), + {done, 23, <<>>} = stream_chunked(<<"\r\n0\r\n\r\n">>, S4), + %% A few extra for coverage purposes. + more = stream_chunked(<<"\n3">>, {1, 0}), + {more, <<"abc">>, 2, {2, 3}} = stream_chunked(<<"\n3\r\nabc">>, {1, 0}), + {more, <<"abc">>, {1, 3}} = stream_chunked(<<"3\r\nabc\r">>, {0, 0}), + {more, <<"abc">>, <<"123">>, {0, 3}} = stream_chunked(<<"3\r\nabc\r\n123">>, {0, 0}), + ok. + +stream_chunked_dripfeed_test() -> + dripfeed(<< + "4\r\n" + "Wiki\r\n" + "5\r\n" + "pedia\r\n" + "e\r\n" + " in\r\n\r\nchunks.\r\n" + "0\r\n" + "\r\n">>, <<>>, {0, 0}, fun stream_chunked/2). + +do_body_to_chunks(_, <<>>, Acc) -> + lists:reverse([<<"0\r\n\r\n">>|Acc]); +do_body_to_chunks(ChunkSize, Body, Acc) -> + BodySize = byte_size(Body), + ChunkSize2 = case BodySize < ChunkSize of + true -> BodySize; + false -> ChunkSize + end, + << Chunk:ChunkSize2/binary, Rest/binary >> = Body, + ChunkSizeBin = list_to_binary(integer_to_list(ChunkSize2, 16)), + do_body_to_chunks(ChunkSize, Rest, + [<< ChunkSizeBin/binary, "\r\n", Chunk/binary, "\r\n" >>|Acc]). + +stream_chunked_dripfeed2_test() -> + Body = list_to_binary(io_lib:format("~p", [lists:seq(1, 100)])), + Body2 = iolist_to_binary(do_body_to_chunks(50, Body, [])), + dripfeed(Body2, <<>>, {0, 0}, fun stream_chunked/2). + +stream_chunked_error_test_() -> + Tests = [ + {<<>>, undefined}, + {<<"\n\naaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa">>, {2, 0}} + ], + [{lists:flatten(io_lib:format("value ~p state ~p", [V, S])), + fun() -> {'EXIT', _} = (catch stream_chunked(V, S)) end} + || {V, S} <- Tests]. +-endif. + +-ifdef(PERF). +horse_stream_chunked() -> + horse:repeat(10000, + stream_chunked(<< + "4\r\n" + "Wiki\r\n" + "5\r\n" + "pedia\r\n" + "e\r\n" + " in\r\n\r\nchunks.\r\n" + "0\r\n" + "\r\n">>, {0, 0}) + ). + +horse_stream_chunked_dripfeed() -> + horse:repeat(10000, + dripfeed(<< + "4\r\n" + "Wiki\r\n" + "5\r\n" + "pedia\r\n" + "e\r\n" + " in\r\n\r\nchunks.\r\n" + "0\r\n" + "\r\n">>, <<>>, {0, 43}, fun stream_chunked/2) + ). +-endif. diff --git a/rabbitmq-server/deps/cowlib/src/cow_mimetypes.erl b/rabbitmq-server/deps/cowlib/src/cow_mimetypes.erl new file mode 100644 index 0000000..69284e1 --- /dev/null +++ b/rabbitmq-server/deps/cowlib/src/cow_mimetypes.erl @@ -0,0 +1,1041 @@ +%% Copyright (c) 2013-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_mimetypes). + +-export([all/1]). +-export([web/1]). + +%% @doc Return the mimetype for any file by looking at its extension. + +-spec all(binary()) -> {binary(), binary(), []}. +all(Path) -> + case filename:extension(Path) of + <<>> -> {<<"application">>, <<"octet-stream">>, []}; + << $., Ext/binary >> -> all_ext(Ext) + end. + +%% @doc Return the mimetype for a Web related file by looking at its extension. + +-spec web(binary()) -> {binary(), binary(), []}. +web(Path) -> + case filename:extension(Path) of + <<>> -> {<<"application">>, <<"octet-stream">>, []}; + << $., Ext/binary >> -> web_ext(Ext) + end. + +%% Internal. + +%% GENERATED +all_ext(<<"123">>) -> {<<"application">>, <<"vnd.lotus-1-2-3">>, []}; +all_ext(<<"3dml">>) -> {<<"text">>, <<"vnd.in3d.3dml">>, []}; +all_ext(<<"3ds">>) -> {<<"image">>, <<"x-3ds">>, []}; +all_ext(<<"3g2">>) -> {<<"video">>, <<"3gpp2">>, []}; +all_ext(<<"3gp">>) -> {<<"video">>, <<"3gpp">>, []}; +all_ext(<<"7z">>) -> {<<"application">>, <<"x-7z-compressed">>, []}; +all_ext(<<"aab">>) -> {<<"application">>, <<"x-authorware-bin">>, []}; +all_ext(<<"aac">>) -> {<<"audio">>, <<"x-aac">>, []}; +all_ext(<<"aam">>) -> {<<"application">>, <<"x-authorware-map">>, []}; +all_ext(<<"aas">>) -> {<<"application">>, <<"x-authorware-seg">>, []}; +all_ext(<<"abw">>) -> {<<"application">>, <<"x-abiword">>, []}; +all_ext(<<"ac">>) -> {<<"application">>, <<"pkix-attr-cert">>, []}; +all_ext(<<"acc">>) -> {<<"application">>, <<"vnd.americandynamics.acc">>, []}; +all_ext(<<"ace">>) -> {<<"application">>, <<"x-ace-compressed">>, []}; +all_ext(<<"acu">>) -> {<<"application">>, <<"vnd.acucobol">>, []}; +all_ext(<<"acutc">>) -> {<<"application">>, <<"vnd.acucorp">>, []}; +all_ext(<<"adp">>) -> {<<"audio">>, <<"adpcm">>, []}; +all_ext(<<"aep">>) -> {<<"application">>, <<"vnd.audiograph">>, []}; +all_ext(<<"afm">>) -> {<<"application">>, <<"x-font-type1">>, []}; +all_ext(<<"afp">>) -> {<<"application">>, <<"vnd.ibm.modcap">>, []}; +all_ext(<<"ahead">>) -> {<<"application">>, <<"vnd.ahead.space">>, []}; +all_ext(<<"ai">>) -> {<<"application">>, <<"postscript">>, []}; +all_ext(<<"aif">>) -> {<<"audio">>, <<"x-aiff">>, []}; +all_ext(<<"aifc">>) -> {<<"audio">>, <<"x-aiff">>, []}; +all_ext(<<"aiff">>) -> {<<"audio">>, <<"x-aiff">>, []}; +all_ext(<<"air">>) -> {<<"application">>, <<"vnd.adobe.air-application-installer-package+zip">>, []}; +all_ext(<<"ait">>) -> {<<"application">>, <<"vnd.dvb.ait">>, []}; +all_ext(<<"ami">>) -> {<<"application">>, <<"vnd.amiga.ami">>, []}; +all_ext(<<"apk">>) -> {<<"application">>, <<"vnd.android.package-archive">>, []}; +all_ext(<<"appcache">>) -> {<<"text">>, <<"cache-manifest">>, []}; +all_ext(<<"application">>) -> {<<"application">>, <<"x-ms-application">>, []}; +all_ext(<<"apr">>) -> {<<"application">>, <<"vnd.lotus-approach">>, []}; +all_ext(<<"arc">>) -> {<<"application">>, <<"x-freearc">>, []}; +all_ext(<<"asc">>) -> {<<"application">>, <<"pgp-signature">>, []}; +all_ext(<<"asf">>) -> {<<"video">>, <<"x-ms-asf">>, []}; +all_ext(<<"asm">>) -> {<<"text">>, <<"x-asm">>, []}; +all_ext(<<"aso">>) -> {<<"application">>, <<"vnd.accpac.simply.aso">>, []}; +all_ext(<<"asx">>) -> {<<"video">>, <<"x-ms-asf">>, []}; +all_ext(<<"atc">>) -> {<<"application">>, <<"vnd.acucorp">>, []}; +all_ext(<<"atom">>) -> {<<"application">>, <<"atom+xml">>, []}; +all_ext(<<"atomcat">>) -> {<<"application">>, <<"atomcat+xml">>, []}; +all_ext(<<"atomsvc">>) -> {<<"application">>, <<"atomsvc+xml">>, []}; +all_ext(<<"atx">>) -> {<<"application">>, <<"vnd.antix.game-component">>, []}; +all_ext(<<"au">>) -> {<<"audio">>, <<"basic">>, []}; +all_ext(<<"avi">>) -> {<<"video">>, <<"x-msvideo">>, []}; +all_ext(<<"aw">>) -> {<<"application">>, <<"applixware">>, []}; +all_ext(<<"azf">>) -> {<<"application">>, <<"vnd.airzip.filesecure.azf">>, []}; +all_ext(<<"azs">>) -> {<<"application">>, <<"vnd.airzip.filesecure.azs">>, []}; +all_ext(<<"azw">>) -> {<<"application">>, <<"vnd.amazon.ebook">>, []}; +all_ext(<<"bat">>) -> {<<"application">>, <<"x-msdownload">>, []}; +all_ext(<<"bcpio">>) -> {<<"application">>, <<"x-bcpio">>, []}; +all_ext(<<"bdf">>) -> {<<"application">>, <<"x-font-bdf">>, []}; +all_ext(<<"bdm">>) -> {<<"application">>, <<"vnd.syncml.dm+wbxml">>, []}; +all_ext(<<"bed">>) -> {<<"application">>, <<"vnd.realvnc.bed">>, []}; +all_ext(<<"bh2">>) -> {<<"application">>, <<"vnd.fujitsu.oasysprs">>, []}; +all_ext(<<"bin">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"blb">>) -> {<<"application">>, <<"x-blorb">>, []}; +all_ext(<<"blorb">>) -> {<<"application">>, <<"x-blorb">>, []}; +all_ext(<<"bmi">>) -> {<<"application">>, <<"vnd.bmi">>, []}; +all_ext(<<"bmp">>) -> {<<"image">>, <<"bmp">>, []}; +all_ext(<<"book">>) -> {<<"application">>, <<"vnd.framemaker">>, []}; +all_ext(<<"box">>) -> {<<"application">>, <<"vnd.previewsystems.box">>, []}; +all_ext(<<"boz">>) -> {<<"application">>, <<"x-bzip2">>, []}; +all_ext(<<"bpk">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"btif">>) -> {<<"image">>, <<"prs.btif">>, []}; +all_ext(<<"bz2">>) -> {<<"application">>, <<"x-bzip2">>, []}; +all_ext(<<"bz">>) -> {<<"application">>, <<"x-bzip">>, []}; +all_ext(<<"c11amc">>) -> {<<"application">>, <<"vnd.cluetrust.cartomobile-config">>, []}; +all_ext(<<"c11amz">>) -> {<<"application">>, <<"vnd.cluetrust.cartomobile-config-pkg">>, []}; +all_ext(<<"c4d">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []}; +all_ext(<<"c4f">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []}; +all_ext(<<"c4g">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []}; +all_ext(<<"c4p">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []}; +all_ext(<<"c4u">>) -> {<<"application">>, <<"vnd.clonk.c4group">>, []}; +all_ext(<<"cab">>) -> {<<"application">>, <<"vnd.ms-cab-compressed">>, []}; +all_ext(<<"caf">>) -> {<<"audio">>, <<"x-caf">>, []}; +all_ext(<<"cap">>) -> {<<"application">>, <<"vnd.tcpdump.pcap">>, []}; +all_ext(<<"car">>) -> {<<"application">>, <<"vnd.curl.car">>, []}; +all_ext(<<"cat">>) -> {<<"application">>, <<"vnd.ms-pki.seccat">>, []}; +all_ext(<<"cb7">>) -> {<<"application">>, <<"x-cbr">>, []}; +all_ext(<<"cba">>) -> {<<"application">>, <<"x-cbr">>, []}; +all_ext(<<"cbr">>) -> {<<"application">>, <<"x-cbr">>, []}; +all_ext(<<"cbt">>) -> {<<"application">>, <<"x-cbr">>, []}; +all_ext(<<"cbz">>) -> {<<"application">>, <<"x-cbr">>, []}; +all_ext(<<"cct">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"cc">>) -> {<<"text">>, <<"x-c">>, []}; +all_ext(<<"ccxml">>) -> {<<"application">>, <<"ccxml+xml">>, []}; +all_ext(<<"cdbcmsg">>) -> {<<"application">>, <<"vnd.contact.cmsg">>, []}; +all_ext(<<"cdf">>) -> {<<"application">>, <<"x-netcdf">>, []}; +all_ext(<<"cdkey">>) -> {<<"application">>, <<"vnd.mediastation.cdkey">>, []}; +all_ext(<<"cdmia">>) -> {<<"application">>, <<"cdmi-capability">>, []}; +all_ext(<<"cdmic">>) -> {<<"application">>, <<"cdmi-container">>, []}; +all_ext(<<"cdmid">>) -> {<<"application">>, <<"cdmi-domain">>, []}; +all_ext(<<"cdmio">>) -> {<<"application">>, <<"cdmi-object">>, []}; +all_ext(<<"cdmiq">>) -> {<<"application">>, <<"cdmi-queue">>, []}; +all_ext(<<"cdx">>) -> {<<"chemical">>, <<"x-cdx">>, []}; +all_ext(<<"cdxml">>) -> {<<"application">>, <<"vnd.chemdraw+xml">>, []}; +all_ext(<<"cdy">>) -> {<<"application">>, <<"vnd.cinderella">>, []}; +all_ext(<<"cer">>) -> {<<"application">>, <<"pkix-cert">>, []}; +all_ext(<<"cfs">>) -> {<<"application">>, <<"x-cfs-compressed">>, []}; +all_ext(<<"cgm">>) -> {<<"image">>, <<"cgm">>, []}; +all_ext(<<"chat">>) -> {<<"application">>, <<"x-chat">>, []}; +all_ext(<<"chm">>) -> {<<"application">>, <<"vnd.ms-htmlhelp">>, []}; +all_ext(<<"chrt">>) -> {<<"application">>, <<"vnd.kde.kchart">>, []}; +all_ext(<<"cif">>) -> {<<"chemical">>, <<"x-cif">>, []}; +all_ext(<<"cii">>) -> {<<"application">>, <<"vnd.anser-web-certificate-issue-initiation">>, []}; +all_ext(<<"cil">>) -> {<<"application">>, <<"vnd.ms-artgalry">>, []}; +all_ext(<<"cla">>) -> {<<"application">>, <<"vnd.claymore">>, []}; +all_ext(<<"class">>) -> {<<"application">>, <<"java-vm">>, []}; +all_ext(<<"clkk">>) -> {<<"application">>, <<"vnd.crick.clicker.keyboard">>, []}; +all_ext(<<"clkp">>) -> {<<"application">>, <<"vnd.crick.clicker.palette">>, []}; +all_ext(<<"clkt">>) -> {<<"application">>, <<"vnd.crick.clicker.template">>, []}; +all_ext(<<"clkw">>) -> {<<"application">>, <<"vnd.crick.clicker.wordbank">>, []}; +all_ext(<<"clkx">>) -> {<<"application">>, <<"vnd.crick.clicker">>, []}; +all_ext(<<"clp">>) -> {<<"application">>, <<"x-msclip">>, []}; +all_ext(<<"cmc">>) -> {<<"application">>, <<"vnd.cosmocaller">>, []}; +all_ext(<<"cmdf">>) -> {<<"chemical">>, <<"x-cmdf">>, []}; +all_ext(<<"cml">>) -> {<<"chemical">>, <<"x-cml">>, []}; +all_ext(<<"cmp">>) -> {<<"application">>, <<"vnd.yellowriver-custom-menu">>, []}; +all_ext(<<"cmx">>) -> {<<"image">>, <<"x-cmx">>, []}; +all_ext(<<"cod">>) -> {<<"application">>, <<"vnd.rim.cod">>, []}; +all_ext(<<"com">>) -> {<<"application">>, <<"x-msdownload">>, []}; +all_ext(<<"conf">>) -> {<<"text">>, <<"plain">>, []}; +all_ext(<<"cpio">>) -> {<<"application">>, <<"x-cpio">>, []}; +all_ext(<<"cpp">>) -> {<<"text">>, <<"x-c">>, []}; +all_ext(<<"cpt">>) -> {<<"application">>, <<"mac-compactpro">>, []}; +all_ext(<<"crd">>) -> {<<"application">>, <<"x-mscardfile">>, []}; +all_ext(<<"crl">>) -> {<<"application">>, <<"pkix-crl">>, []}; +all_ext(<<"crt">>) -> {<<"application">>, <<"x-x509-ca-cert">>, []}; +all_ext(<<"cryptonote">>) -> {<<"application">>, <<"vnd.rig.cryptonote">>, []}; +all_ext(<<"csh">>) -> {<<"application">>, <<"x-csh">>, []}; +all_ext(<<"csml">>) -> {<<"chemical">>, <<"x-csml">>, []}; +all_ext(<<"csp">>) -> {<<"application">>, <<"vnd.commonspace">>, []}; +all_ext(<<"css">>) -> {<<"text">>, <<"css">>, []}; +all_ext(<<"cst">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"csv">>) -> {<<"text">>, <<"csv">>, []}; +all_ext(<<"c">>) -> {<<"text">>, <<"x-c">>, []}; +all_ext(<<"cu">>) -> {<<"application">>, <<"cu-seeme">>, []}; +all_ext(<<"curl">>) -> {<<"text">>, <<"vnd.curl">>, []}; +all_ext(<<"cww">>) -> {<<"application">>, <<"prs.cww">>, []}; +all_ext(<<"cxt">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"cxx">>) -> {<<"text">>, <<"x-c">>, []}; +all_ext(<<"dae">>) -> {<<"model">>, <<"vnd.collada+xml">>, []}; +all_ext(<<"daf">>) -> {<<"application">>, <<"vnd.mobius.daf">>, []}; +all_ext(<<"dart">>) -> {<<"application">>, <<"vnd.dart">>, []}; +all_ext(<<"dataless">>) -> {<<"application">>, <<"vnd.fdsn.seed">>, []}; +all_ext(<<"davmount">>) -> {<<"application">>, <<"davmount+xml">>, []}; +all_ext(<<"dbk">>) -> {<<"application">>, <<"docbook+xml">>, []}; +all_ext(<<"dcr">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"dcurl">>) -> {<<"text">>, <<"vnd.curl.dcurl">>, []}; +all_ext(<<"dd2">>) -> {<<"application">>, <<"vnd.oma.dd2+xml">>, []}; +all_ext(<<"ddd">>) -> {<<"application">>, <<"vnd.fujixerox.ddd">>, []}; +all_ext(<<"deb">>) -> {<<"application">>, <<"x-debian-package">>, []}; +all_ext(<<"def">>) -> {<<"text">>, <<"plain">>, []}; +all_ext(<<"deploy">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"der">>) -> {<<"application">>, <<"x-x509-ca-cert">>, []}; +all_ext(<<"dfac">>) -> {<<"application">>, <<"vnd.dreamfactory">>, []}; +all_ext(<<"dgc">>) -> {<<"application">>, <<"x-dgc-compressed">>, []}; +all_ext(<<"dic">>) -> {<<"text">>, <<"x-c">>, []}; +all_ext(<<"dir">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"dis">>) -> {<<"application">>, <<"vnd.mobius.dis">>, []}; +all_ext(<<"dist">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"distz">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"djv">>) -> {<<"image">>, <<"vnd.djvu">>, []}; +all_ext(<<"djvu">>) -> {<<"image">>, <<"vnd.djvu">>, []}; +all_ext(<<"dll">>) -> {<<"application">>, <<"x-msdownload">>, []}; +all_ext(<<"dmg">>) -> {<<"application">>, <<"x-apple-diskimage">>, []}; +all_ext(<<"dmp">>) -> {<<"application">>, <<"vnd.tcpdump.pcap">>, []}; +all_ext(<<"dms">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"dna">>) -> {<<"application">>, <<"vnd.dna">>, []}; +all_ext(<<"doc">>) -> {<<"application">>, <<"msword">>, []}; +all_ext(<<"docm">>) -> {<<"application">>, <<"vnd.ms-word.document.macroenabled.12">>, []}; +all_ext(<<"docx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.wordprocessingml.document">>, []}; +all_ext(<<"dot">>) -> {<<"application">>, <<"msword">>, []}; +all_ext(<<"dotm">>) -> {<<"application">>, <<"vnd.ms-word.template.macroenabled.12">>, []}; +all_ext(<<"dotx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.wordprocessingml.template">>, []}; +all_ext(<<"dp">>) -> {<<"application">>, <<"vnd.osgi.dp">>, []}; +all_ext(<<"dpg">>) -> {<<"application">>, <<"vnd.dpgraph">>, []}; +all_ext(<<"dra">>) -> {<<"audio">>, <<"vnd.dra">>, []}; +all_ext(<<"dsc">>) -> {<<"text">>, <<"prs.lines.tag">>, []}; +all_ext(<<"dssc">>) -> {<<"application">>, <<"dssc+der">>, []}; +all_ext(<<"dtb">>) -> {<<"application">>, <<"x-dtbook+xml">>, []}; +all_ext(<<"dtd">>) -> {<<"application">>, <<"xml-dtd">>, []}; +all_ext(<<"dts">>) -> {<<"audio">>, <<"vnd.dts">>, []}; +all_ext(<<"dtshd">>) -> {<<"audio">>, <<"vnd.dts.hd">>, []}; +all_ext(<<"dump">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"dvb">>) -> {<<"video">>, <<"vnd.dvb.file">>, []}; +all_ext(<<"dvi">>) -> {<<"application">>, <<"x-dvi">>, []}; +all_ext(<<"dwf">>) -> {<<"model">>, <<"vnd.dwf">>, []}; +all_ext(<<"dwg">>) -> {<<"image">>, <<"vnd.dwg">>, []}; +all_ext(<<"dxf">>) -> {<<"image">>, <<"vnd.dxf">>, []}; +all_ext(<<"dxp">>) -> {<<"application">>, <<"vnd.spotfire.dxp">>, []}; +all_ext(<<"dxr">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"ecelp4800">>) -> {<<"audio">>, <<"vnd.nuera.ecelp4800">>, []}; +all_ext(<<"ecelp7470">>) -> {<<"audio">>, <<"vnd.nuera.ecelp7470">>, []}; +all_ext(<<"ecelp9600">>) -> {<<"audio">>, <<"vnd.nuera.ecelp9600">>, []}; +all_ext(<<"ecma">>) -> {<<"application">>, <<"ecmascript">>, []}; +all_ext(<<"edm">>) -> {<<"application">>, <<"vnd.novadigm.edm">>, []}; +all_ext(<<"edx">>) -> {<<"application">>, <<"vnd.novadigm.edx">>, []}; +all_ext(<<"efif">>) -> {<<"application">>, <<"vnd.picsel">>, []}; +all_ext(<<"ei6">>) -> {<<"application">>, <<"vnd.pg.osasli">>, []}; +all_ext(<<"elc">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"emf">>) -> {<<"application">>, <<"x-msmetafile">>, []}; +all_ext(<<"eml">>) -> {<<"message">>, <<"rfc822">>, []}; +all_ext(<<"emma">>) -> {<<"application">>, <<"emma+xml">>, []}; +all_ext(<<"emz">>) -> {<<"application">>, <<"x-msmetafile">>, []}; +all_ext(<<"eol">>) -> {<<"audio">>, <<"vnd.digital-winds">>, []}; +all_ext(<<"eot">>) -> {<<"application">>, <<"vnd.ms-fontobject">>, []}; +all_ext(<<"eps">>) -> {<<"application">>, <<"postscript">>, []}; +all_ext(<<"epub">>) -> {<<"application">>, <<"epub+zip">>, []}; +all_ext(<<"es3">>) -> {<<"application">>, <<"vnd.eszigno3+xml">>, []}; +all_ext(<<"esa">>) -> {<<"application">>, <<"vnd.osgi.subsystem">>, []}; +all_ext(<<"esf">>) -> {<<"application">>, <<"vnd.epson.esf">>, []}; +all_ext(<<"et3">>) -> {<<"application">>, <<"vnd.eszigno3+xml">>, []}; +all_ext(<<"etx">>) -> {<<"text">>, <<"x-setext">>, []}; +all_ext(<<"eva">>) -> {<<"application">>, <<"x-eva">>, []}; +all_ext(<<"evy">>) -> {<<"application">>, <<"x-envoy">>, []}; +all_ext(<<"exe">>) -> {<<"application">>, <<"x-msdownload">>, []}; +all_ext(<<"exi">>) -> {<<"application">>, <<"exi">>, []}; +all_ext(<<"ext">>) -> {<<"application">>, <<"vnd.novadigm.ext">>, []}; +all_ext(<<"ez2">>) -> {<<"application">>, <<"vnd.ezpix-album">>, []}; +all_ext(<<"ez3">>) -> {<<"application">>, <<"vnd.ezpix-package">>, []}; +all_ext(<<"ez">>) -> {<<"application">>, <<"andrew-inset">>, []}; +all_ext(<<"f4v">>) -> {<<"video">>, <<"x-f4v">>, []}; +all_ext(<<"f77">>) -> {<<"text">>, <<"x-fortran">>, []}; +all_ext(<<"f90">>) -> {<<"text">>, <<"x-fortran">>, []}; +all_ext(<<"fbs">>) -> {<<"image">>, <<"vnd.fastbidsheet">>, []}; +all_ext(<<"fcdt">>) -> {<<"application">>, <<"vnd.adobe.formscentral.fcdt">>, []}; +all_ext(<<"fcs">>) -> {<<"application">>, <<"vnd.isac.fcs">>, []}; +all_ext(<<"fdf">>) -> {<<"application">>, <<"vnd.fdf">>, []}; +all_ext(<<"fe_launch">>) -> {<<"application">>, <<"vnd.denovo.fcselayout-link">>, []}; +all_ext(<<"fg5">>) -> {<<"application">>, <<"vnd.fujitsu.oasysgp">>, []}; +all_ext(<<"fgd">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"fh4">>) -> {<<"image">>, <<"x-freehand">>, []}; +all_ext(<<"fh5">>) -> {<<"image">>, <<"x-freehand">>, []}; +all_ext(<<"fh7">>) -> {<<"image">>, <<"x-freehand">>, []}; +all_ext(<<"fhc">>) -> {<<"image">>, <<"x-freehand">>, []}; +all_ext(<<"fh">>) -> {<<"image">>, <<"x-freehand">>, []}; +all_ext(<<"fig">>) -> {<<"application">>, <<"x-xfig">>, []}; +all_ext(<<"flac">>) -> {<<"audio">>, <<"x-flac">>, []}; +all_ext(<<"fli">>) -> {<<"video">>, <<"x-fli">>, []}; +all_ext(<<"flo">>) -> {<<"application">>, <<"vnd.micrografx.flo">>, []}; +all_ext(<<"flv">>) -> {<<"video">>, <<"x-flv">>, []}; +all_ext(<<"flw">>) -> {<<"application">>, <<"vnd.kde.kivio">>, []}; +all_ext(<<"flx">>) -> {<<"text">>, <<"vnd.fmi.flexstor">>, []}; +all_ext(<<"fly">>) -> {<<"text">>, <<"vnd.fly">>, []}; +all_ext(<<"fm">>) -> {<<"application">>, <<"vnd.framemaker">>, []}; +all_ext(<<"fnc">>) -> {<<"application">>, <<"vnd.frogans.fnc">>, []}; +all_ext(<<"for">>) -> {<<"text">>, <<"x-fortran">>, []}; +all_ext(<<"fpx">>) -> {<<"image">>, <<"vnd.fpx">>, []}; +all_ext(<<"frame">>) -> {<<"application">>, <<"vnd.framemaker">>, []}; +all_ext(<<"fsc">>) -> {<<"application">>, <<"vnd.fsc.weblaunch">>, []}; +all_ext(<<"fst">>) -> {<<"image">>, <<"vnd.fst">>, []}; +all_ext(<<"ftc">>) -> {<<"application">>, <<"vnd.fluxtime.clip">>, []}; +all_ext(<<"f">>) -> {<<"text">>, <<"x-fortran">>, []}; +all_ext(<<"fti">>) -> {<<"application">>, <<"vnd.anser-web-funds-transfer-initiation">>, []}; +all_ext(<<"fvt">>) -> {<<"video">>, <<"vnd.fvt">>, []}; +all_ext(<<"fxp">>) -> {<<"application">>, <<"vnd.adobe.fxp">>, []}; +all_ext(<<"fxpl">>) -> {<<"application">>, <<"vnd.adobe.fxp">>, []}; +all_ext(<<"fzs">>) -> {<<"application">>, <<"vnd.fuzzysheet">>, []}; +all_ext(<<"g2w">>) -> {<<"application">>, <<"vnd.geoplan">>, []}; +all_ext(<<"g3">>) -> {<<"image">>, <<"g3fax">>, []}; +all_ext(<<"g3w">>) -> {<<"application">>, <<"vnd.geospace">>, []}; +all_ext(<<"gac">>) -> {<<"application">>, <<"vnd.groove-account">>, []}; +all_ext(<<"gam">>) -> {<<"application">>, <<"x-tads">>, []}; +all_ext(<<"gbr">>) -> {<<"application">>, <<"rpki-ghostbusters">>, []}; +all_ext(<<"gca">>) -> {<<"application">>, <<"x-gca-compressed">>, []}; +all_ext(<<"gdl">>) -> {<<"model">>, <<"vnd.gdl">>, []}; +all_ext(<<"geo">>) -> {<<"application">>, <<"vnd.dynageo">>, []}; +all_ext(<<"gex">>) -> {<<"application">>, <<"vnd.geometry-explorer">>, []}; +all_ext(<<"ggb">>) -> {<<"application">>, <<"vnd.geogebra.file">>, []}; +all_ext(<<"ggt">>) -> {<<"application">>, <<"vnd.geogebra.tool">>, []}; +all_ext(<<"ghf">>) -> {<<"application">>, <<"vnd.groove-help">>, []}; +all_ext(<<"gif">>) -> {<<"image">>, <<"gif">>, []}; +all_ext(<<"gim">>) -> {<<"application">>, <<"vnd.groove-identity-message">>, []}; +all_ext(<<"gml">>) -> {<<"application">>, <<"gml+xml">>, []}; +all_ext(<<"gmx">>) -> {<<"application">>, <<"vnd.gmx">>, []}; +all_ext(<<"gnumeric">>) -> {<<"application">>, <<"x-gnumeric">>, []}; +all_ext(<<"gph">>) -> {<<"application">>, <<"vnd.flographit">>, []}; +all_ext(<<"gpx">>) -> {<<"application">>, <<"gpx+xml">>, []}; +all_ext(<<"gqf">>) -> {<<"application">>, <<"vnd.grafeq">>, []}; +all_ext(<<"gqs">>) -> {<<"application">>, <<"vnd.grafeq">>, []}; +all_ext(<<"gram">>) -> {<<"application">>, <<"srgs">>, []}; +all_ext(<<"gramps">>) -> {<<"application">>, <<"x-gramps-xml">>, []}; +all_ext(<<"gre">>) -> {<<"application">>, <<"vnd.geometry-explorer">>, []}; +all_ext(<<"grv">>) -> {<<"application">>, <<"vnd.groove-injector">>, []}; +all_ext(<<"grxml">>) -> {<<"application">>, <<"srgs+xml">>, []}; +all_ext(<<"gsf">>) -> {<<"application">>, <<"x-font-ghostscript">>, []}; +all_ext(<<"gtar">>) -> {<<"application">>, <<"x-gtar">>, []}; +all_ext(<<"gtm">>) -> {<<"application">>, <<"vnd.groove-tool-message">>, []}; +all_ext(<<"gtw">>) -> {<<"model">>, <<"vnd.gtw">>, []}; +all_ext(<<"gv">>) -> {<<"text">>, <<"vnd.graphviz">>, []}; +all_ext(<<"gxf">>) -> {<<"application">>, <<"gxf">>, []}; +all_ext(<<"gxt">>) -> {<<"application">>, <<"vnd.geonext">>, []}; +all_ext(<<"h261">>) -> {<<"video">>, <<"h261">>, []}; +all_ext(<<"h263">>) -> {<<"video">>, <<"h263">>, []}; +all_ext(<<"h264">>) -> {<<"video">>, <<"h264">>, []}; +all_ext(<<"hal">>) -> {<<"application">>, <<"vnd.hal+xml">>, []}; +all_ext(<<"hbci">>) -> {<<"application">>, <<"vnd.hbci">>, []}; +all_ext(<<"hdf">>) -> {<<"application">>, <<"x-hdf">>, []}; +all_ext(<<"hh">>) -> {<<"text">>, <<"x-c">>, []}; +all_ext(<<"hlp">>) -> {<<"application">>, <<"winhlp">>, []}; +all_ext(<<"hpgl">>) -> {<<"application">>, <<"vnd.hp-hpgl">>, []}; +all_ext(<<"hpid">>) -> {<<"application">>, <<"vnd.hp-hpid">>, []}; +all_ext(<<"hps">>) -> {<<"application">>, <<"vnd.hp-hps">>, []}; +all_ext(<<"hqx">>) -> {<<"application">>, <<"mac-binhex40">>, []}; +all_ext(<<"h">>) -> {<<"text">>, <<"x-c">>, []}; +all_ext(<<"htke">>) -> {<<"application">>, <<"vnd.kenameaapp">>, []}; +all_ext(<<"html">>) -> {<<"text">>, <<"html">>, []}; +all_ext(<<"htm">>) -> {<<"text">>, <<"html">>, []}; +all_ext(<<"hvd">>) -> {<<"application">>, <<"vnd.yamaha.hv-dic">>, []}; +all_ext(<<"hvp">>) -> {<<"application">>, <<"vnd.yamaha.hv-voice">>, []}; +all_ext(<<"hvs">>) -> {<<"application">>, <<"vnd.yamaha.hv-script">>, []}; +all_ext(<<"i2g">>) -> {<<"application">>, <<"vnd.intergeo">>, []}; +all_ext(<<"icc">>) -> {<<"application">>, <<"vnd.iccprofile">>, []}; +all_ext(<<"ice">>) -> {<<"x-conference">>, <<"x-cooltalk">>, []}; +all_ext(<<"icm">>) -> {<<"application">>, <<"vnd.iccprofile">>, []}; +all_ext(<<"ico">>) -> {<<"image">>, <<"x-icon">>, []}; +all_ext(<<"ics">>) -> {<<"text">>, <<"calendar">>, []}; +all_ext(<<"ief">>) -> {<<"image">>, <<"ief">>, []}; +all_ext(<<"ifb">>) -> {<<"text">>, <<"calendar">>, []}; +all_ext(<<"ifm">>) -> {<<"application">>, <<"vnd.shana.informed.formdata">>, []}; +all_ext(<<"iges">>) -> {<<"model">>, <<"iges">>, []}; +all_ext(<<"igl">>) -> {<<"application">>, <<"vnd.igloader">>, []}; +all_ext(<<"igm">>) -> {<<"application">>, <<"vnd.insors.igm">>, []}; +all_ext(<<"igs">>) -> {<<"model">>, <<"iges">>, []}; +all_ext(<<"igx">>) -> {<<"application">>, <<"vnd.micrografx.igx">>, []}; +all_ext(<<"iif">>) -> {<<"application">>, <<"vnd.shana.informed.interchange">>, []}; +all_ext(<<"imp">>) -> {<<"application">>, <<"vnd.accpac.simply.imp">>, []}; +all_ext(<<"ims">>) -> {<<"application">>, <<"vnd.ms-ims">>, []}; +all_ext(<<"ink">>) -> {<<"application">>, <<"inkml+xml">>, []}; +all_ext(<<"inkml">>) -> {<<"application">>, <<"inkml+xml">>, []}; +all_ext(<<"install">>) -> {<<"application">>, <<"x-install-instructions">>, []}; +all_ext(<<"in">>) -> {<<"text">>, <<"plain">>, []}; +all_ext(<<"iota">>) -> {<<"application">>, <<"vnd.astraea-software.iota">>, []}; +all_ext(<<"ipfix">>) -> {<<"application">>, <<"ipfix">>, []}; +all_ext(<<"ipk">>) -> {<<"application">>, <<"vnd.shana.informed.package">>, []}; +all_ext(<<"irm">>) -> {<<"application">>, <<"vnd.ibm.rights-management">>, []}; +all_ext(<<"irp">>) -> {<<"application">>, <<"vnd.irepository.package+xml">>, []}; +all_ext(<<"iso">>) -> {<<"application">>, <<"x-iso9660-image">>, []}; +all_ext(<<"itp">>) -> {<<"application">>, <<"vnd.shana.informed.formtemplate">>, []}; +all_ext(<<"ivp">>) -> {<<"application">>, <<"vnd.immervision-ivp">>, []}; +all_ext(<<"ivu">>) -> {<<"application">>, <<"vnd.immervision-ivu">>, []}; +all_ext(<<"jad">>) -> {<<"text">>, <<"vnd.sun.j2me.app-descriptor">>, []}; +all_ext(<<"jam">>) -> {<<"application">>, <<"vnd.jam">>, []}; +all_ext(<<"jar">>) -> {<<"application">>, <<"java-archive">>, []}; +all_ext(<<"java">>) -> {<<"text">>, <<"x-java-source">>, []}; +all_ext(<<"jisp">>) -> {<<"application">>, <<"vnd.jisp">>, []}; +all_ext(<<"jlt">>) -> {<<"application">>, <<"vnd.hp-jlyt">>, []}; +all_ext(<<"jnlp">>) -> {<<"application">>, <<"x-java-jnlp-file">>, []}; +all_ext(<<"joda">>) -> {<<"application">>, <<"vnd.joost.joda-archive">>, []}; +all_ext(<<"jpeg">>) -> {<<"image">>, <<"jpeg">>, []}; +all_ext(<<"jpe">>) -> {<<"image">>, <<"jpeg">>, []}; +all_ext(<<"jpg">>) -> {<<"image">>, <<"jpeg">>, []}; +all_ext(<<"jpgm">>) -> {<<"video">>, <<"jpm">>, []}; +all_ext(<<"jpgv">>) -> {<<"video">>, <<"jpeg">>, []}; +all_ext(<<"jpm">>) -> {<<"video">>, <<"jpm">>, []}; +all_ext(<<"js">>) -> {<<"application">>, <<"javascript">>, []}; +all_ext(<<"json">>) -> {<<"application">>, <<"json">>, []}; +all_ext(<<"jsonml">>) -> {<<"application">>, <<"jsonml+json">>, []}; +all_ext(<<"kar">>) -> {<<"audio">>, <<"midi">>, []}; +all_ext(<<"karbon">>) -> {<<"application">>, <<"vnd.kde.karbon">>, []}; +all_ext(<<"kfo">>) -> {<<"application">>, <<"vnd.kde.kformula">>, []}; +all_ext(<<"kia">>) -> {<<"application">>, <<"vnd.kidspiration">>, []}; +all_ext(<<"kml">>) -> {<<"application">>, <<"vnd.google-earth.kml+xml">>, []}; +all_ext(<<"kmz">>) -> {<<"application">>, <<"vnd.google-earth.kmz">>, []}; +all_ext(<<"kne">>) -> {<<"application">>, <<"vnd.kinar">>, []}; +all_ext(<<"knp">>) -> {<<"application">>, <<"vnd.kinar">>, []}; +all_ext(<<"kon">>) -> {<<"application">>, <<"vnd.kde.kontour">>, []}; +all_ext(<<"kpr">>) -> {<<"application">>, <<"vnd.kde.kpresenter">>, []}; +all_ext(<<"kpt">>) -> {<<"application">>, <<"vnd.kde.kpresenter">>, []}; +all_ext(<<"kpxx">>) -> {<<"application">>, <<"vnd.ds-keypoint">>, []}; +all_ext(<<"ksp">>) -> {<<"application">>, <<"vnd.kde.kspread">>, []}; +all_ext(<<"ktr">>) -> {<<"application">>, <<"vnd.kahootz">>, []}; +all_ext(<<"ktx">>) -> {<<"image">>, <<"ktx">>, []}; +all_ext(<<"ktz">>) -> {<<"application">>, <<"vnd.kahootz">>, []}; +all_ext(<<"kwd">>) -> {<<"application">>, <<"vnd.kde.kword">>, []}; +all_ext(<<"kwt">>) -> {<<"application">>, <<"vnd.kde.kword">>, []}; +all_ext(<<"lasxml">>) -> {<<"application">>, <<"vnd.las.las+xml">>, []}; +all_ext(<<"latex">>) -> {<<"application">>, <<"x-latex">>, []}; +all_ext(<<"lbd">>) -> {<<"application">>, <<"vnd.llamagraphics.life-balance.desktop">>, []}; +all_ext(<<"lbe">>) -> {<<"application">>, <<"vnd.llamagraphics.life-balance.exchange+xml">>, []}; +all_ext(<<"les">>) -> {<<"application">>, <<"vnd.hhe.lesson-player">>, []}; +all_ext(<<"lha">>) -> {<<"application">>, <<"x-lzh-compressed">>, []}; +all_ext(<<"link66">>) -> {<<"application">>, <<"vnd.route66.link66+xml">>, []}; +all_ext(<<"list3820">>) -> {<<"application">>, <<"vnd.ibm.modcap">>, []}; +all_ext(<<"listafp">>) -> {<<"application">>, <<"vnd.ibm.modcap">>, []}; +all_ext(<<"list">>) -> {<<"text">>, <<"plain">>, []}; +all_ext(<<"lnk">>) -> {<<"application">>, <<"x-ms-shortcut">>, []}; +all_ext(<<"log">>) -> {<<"text">>, <<"plain">>, []}; +all_ext(<<"lostxml">>) -> {<<"application">>, <<"lost+xml">>, []}; +all_ext(<<"lrf">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"lrm">>) -> {<<"application">>, <<"vnd.ms-lrm">>, []}; +all_ext(<<"ltf">>) -> {<<"application">>, <<"vnd.frogans.ltf">>, []}; +all_ext(<<"lvp">>) -> {<<"audio">>, <<"vnd.lucent.voice">>, []}; +all_ext(<<"lwp">>) -> {<<"application">>, <<"vnd.lotus-wordpro">>, []}; +all_ext(<<"lzh">>) -> {<<"application">>, <<"x-lzh-compressed">>, []}; +all_ext(<<"m13">>) -> {<<"application">>, <<"x-msmediaview">>, []}; +all_ext(<<"m14">>) -> {<<"application">>, <<"x-msmediaview">>, []}; +all_ext(<<"m1v">>) -> {<<"video">>, <<"mpeg">>, []}; +all_ext(<<"m21">>) -> {<<"application">>, <<"mp21">>, []}; +all_ext(<<"m2a">>) -> {<<"audio">>, <<"mpeg">>, []}; +all_ext(<<"m2v">>) -> {<<"video">>, <<"mpeg">>, []}; +all_ext(<<"m3a">>) -> {<<"audio">>, <<"mpeg">>, []}; +all_ext(<<"m3u8">>) -> {<<"application">>, <<"vnd.apple.mpegurl">>, []}; +all_ext(<<"m3u">>) -> {<<"audio">>, <<"x-mpegurl">>, []}; +all_ext(<<"m4u">>) -> {<<"video">>, <<"vnd.mpegurl">>, []}; +all_ext(<<"m4v">>) -> {<<"video">>, <<"x-m4v">>, []}; +all_ext(<<"ma">>) -> {<<"application">>, <<"mathematica">>, []}; +all_ext(<<"mads">>) -> {<<"application">>, <<"mads+xml">>, []}; +all_ext(<<"mag">>) -> {<<"application">>, <<"vnd.ecowin.chart">>, []}; +all_ext(<<"maker">>) -> {<<"application">>, <<"vnd.framemaker">>, []}; +all_ext(<<"man">>) -> {<<"text">>, <<"troff">>, []}; +all_ext(<<"mar">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"mathml">>) -> {<<"application">>, <<"mathml+xml">>, []}; +all_ext(<<"mb">>) -> {<<"application">>, <<"mathematica">>, []}; +all_ext(<<"mbk">>) -> {<<"application">>, <<"vnd.mobius.mbk">>, []}; +all_ext(<<"mbox">>) -> {<<"application">>, <<"mbox">>, []}; +all_ext(<<"mc1">>) -> {<<"application">>, <<"vnd.medcalcdata">>, []}; +all_ext(<<"mcd">>) -> {<<"application">>, <<"vnd.mcd">>, []}; +all_ext(<<"mcurl">>) -> {<<"text">>, <<"vnd.curl.mcurl">>, []}; +all_ext(<<"mdb">>) -> {<<"application">>, <<"x-msaccess">>, []}; +all_ext(<<"mdi">>) -> {<<"image">>, <<"vnd.ms-modi">>, []}; +all_ext(<<"mesh">>) -> {<<"model">>, <<"mesh">>, []}; +all_ext(<<"meta4">>) -> {<<"application">>, <<"metalink4+xml">>, []}; +all_ext(<<"metalink">>) -> {<<"application">>, <<"metalink+xml">>, []}; +all_ext(<<"me">>) -> {<<"text">>, <<"troff">>, []}; +all_ext(<<"mets">>) -> {<<"application">>, <<"mets+xml">>, []}; +all_ext(<<"mfm">>) -> {<<"application">>, <<"vnd.mfmp">>, []}; +all_ext(<<"mft">>) -> {<<"application">>, <<"rpki-manifest">>, []}; +all_ext(<<"mgp">>) -> {<<"application">>, <<"vnd.osgeo.mapguide.package">>, []}; +all_ext(<<"mgz">>) -> {<<"application">>, <<"vnd.proteus.magazine">>, []}; +all_ext(<<"mid">>) -> {<<"audio">>, <<"midi">>, []}; +all_ext(<<"midi">>) -> {<<"audio">>, <<"midi">>, []}; +all_ext(<<"mie">>) -> {<<"application">>, <<"x-mie">>, []}; +all_ext(<<"mif">>) -> {<<"application">>, <<"vnd.mif">>, []}; +all_ext(<<"mime">>) -> {<<"message">>, <<"rfc822">>, []}; +all_ext(<<"mj2">>) -> {<<"video">>, <<"mj2">>, []}; +all_ext(<<"mjp2">>) -> {<<"video">>, <<"mj2">>, []}; +all_ext(<<"mk3d">>) -> {<<"video">>, <<"x-matroska">>, []}; +all_ext(<<"mka">>) -> {<<"audio">>, <<"x-matroska">>, []}; +all_ext(<<"mks">>) -> {<<"video">>, <<"x-matroska">>, []}; +all_ext(<<"mkv">>) -> {<<"video">>, <<"x-matroska">>, []}; +all_ext(<<"mlp">>) -> {<<"application">>, <<"vnd.dolby.mlp">>, []}; +all_ext(<<"mmd">>) -> {<<"application">>, <<"vnd.chipnuts.karaoke-mmd">>, []}; +all_ext(<<"mmf">>) -> {<<"application">>, <<"vnd.smaf">>, []}; +all_ext(<<"mmr">>) -> {<<"image">>, <<"vnd.fujixerox.edmics-mmr">>, []}; +all_ext(<<"mng">>) -> {<<"video">>, <<"x-mng">>, []}; +all_ext(<<"mny">>) -> {<<"application">>, <<"x-msmoney">>, []}; +all_ext(<<"mobi">>) -> {<<"application">>, <<"x-mobipocket-ebook">>, []}; +all_ext(<<"mods">>) -> {<<"application">>, <<"mods+xml">>, []}; +all_ext(<<"movie">>) -> {<<"video">>, <<"x-sgi-movie">>, []}; +all_ext(<<"mov">>) -> {<<"video">>, <<"quicktime">>, []}; +all_ext(<<"mp21">>) -> {<<"application">>, <<"mp21">>, []}; +all_ext(<<"mp2a">>) -> {<<"audio">>, <<"mpeg">>, []}; +all_ext(<<"mp2">>) -> {<<"audio">>, <<"mpeg">>, []}; +all_ext(<<"mp3">>) -> {<<"audio">>, <<"mpeg">>, []}; +all_ext(<<"mp4a">>) -> {<<"audio">>, <<"mp4">>, []}; +all_ext(<<"mp4s">>) -> {<<"application">>, <<"mp4">>, []}; +all_ext(<<"mp4">>) -> {<<"video">>, <<"mp4">>, []}; +all_ext(<<"mp4v">>) -> {<<"video">>, <<"mp4">>, []}; +all_ext(<<"mpc">>) -> {<<"application">>, <<"vnd.mophun.certificate">>, []}; +all_ext(<<"mpeg">>) -> {<<"video">>, <<"mpeg">>, []}; +all_ext(<<"mpe">>) -> {<<"video">>, <<"mpeg">>, []}; +all_ext(<<"mpg4">>) -> {<<"video">>, <<"mp4">>, []}; +all_ext(<<"mpga">>) -> {<<"audio">>, <<"mpeg">>, []}; +all_ext(<<"mpg">>) -> {<<"video">>, <<"mpeg">>, []}; +all_ext(<<"mpkg">>) -> {<<"application">>, <<"vnd.apple.installer+xml">>, []}; +all_ext(<<"mpm">>) -> {<<"application">>, <<"vnd.blueice.multipass">>, []}; +all_ext(<<"mpn">>) -> {<<"application">>, <<"vnd.mophun.application">>, []}; +all_ext(<<"mpp">>) -> {<<"application">>, <<"vnd.ms-project">>, []}; +all_ext(<<"mpt">>) -> {<<"application">>, <<"vnd.ms-project">>, []}; +all_ext(<<"mpy">>) -> {<<"application">>, <<"vnd.ibm.minipay">>, []}; +all_ext(<<"mqy">>) -> {<<"application">>, <<"vnd.mobius.mqy">>, []}; +all_ext(<<"mrc">>) -> {<<"application">>, <<"marc">>, []}; +all_ext(<<"mrcx">>) -> {<<"application">>, <<"marcxml+xml">>, []}; +all_ext(<<"mscml">>) -> {<<"application">>, <<"mediaservercontrol+xml">>, []}; +all_ext(<<"mseed">>) -> {<<"application">>, <<"vnd.fdsn.mseed">>, []}; +all_ext(<<"mseq">>) -> {<<"application">>, <<"vnd.mseq">>, []}; +all_ext(<<"msf">>) -> {<<"application">>, <<"vnd.epson.msf">>, []}; +all_ext(<<"msh">>) -> {<<"model">>, <<"mesh">>, []}; +all_ext(<<"msi">>) -> {<<"application">>, <<"x-msdownload">>, []}; +all_ext(<<"msl">>) -> {<<"application">>, <<"vnd.mobius.msl">>, []}; +all_ext(<<"ms">>) -> {<<"text">>, <<"troff">>, []}; +all_ext(<<"msty">>) -> {<<"application">>, <<"vnd.muvee.style">>, []}; +all_ext(<<"mts">>) -> {<<"model">>, <<"vnd.mts">>, []}; +all_ext(<<"mus">>) -> {<<"application">>, <<"vnd.musician">>, []}; +all_ext(<<"musicxml">>) -> {<<"application">>, <<"vnd.recordare.musicxml+xml">>, []}; +all_ext(<<"mvb">>) -> {<<"application">>, <<"x-msmediaview">>, []}; +all_ext(<<"mwf">>) -> {<<"application">>, <<"vnd.mfer">>, []}; +all_ext(<<"mxf">>) -> {<<"application">>, <<"mxf">>, []}; +all_ext(<<"mxl">>) -> {<<"application">>, <<"vnd.recordare.musicxml">>, []}; +all_ext(<<"mxml">>) -> {<<"application">>, <<"xv+xml">>, []}; +all_ext(<<"mxs">>) -> {<<"application">>, <<"vnd.triscape.mxs">>, []}; +all_ext(<<"mxu">>) -> {<<"video">>, <<"vnd.mpegurl">>, []}; +all_ext(<<"n3">>) -> {<<"text">>, <<"n3">>, []}; +all_ext(<<"nb">>) -> {<<"application">>, <<"mathematica">>, []}; +all_ext(<<"nbp">>) -> {<<"application">>, <<"vnd.wolfram.player">>, []}; +all_ext(<<"nc">>) -> {<<"application">>, <<"x-netcdf">>, []}; +all_ext(<<"ncx">>) -> {<<"application">>, <<"x-dtbncx+xml">>, []}; +all_ext(<<"nfo">>) -> {<<"text">>, <<"x-nfo">>, []}; +all_ext(<<"n-gage">>) -> {<<"application">>, <<"vnd.nokia.n-gage.symbian.install">>, []}; +all_ext(<<"ngdat">>) -> {<<"application">>, <<"vnd.nokia.n-gage.data">>, []}; +all_ext(<<"nitf">>) -> {<<"application">>, <<"vnd.nitf">>, []}; +all_ext(<<"nlu">>) -> {<<"application">>, <<"vnd.neurolanguage.nlu">>, []}; +all_ext(<<"nml">>) -> {<<"application">>, <<"vnd.enliven">>, []}; +all_ext(<<"nnd">>) -> {<<"application">>, <<"vnd.noblenet-directory">>, []}; +all_ext(<<"nns">>) -> {<<"application">>, <<"vnd.noblenet-sealer">>, []}; +all_ext(<<"nnw">>) -> {<<"application">>, <<"vnd.noblenet-web">>, []}; +all_ext(<<"npx">>) -> {<<"image">>, <<"vnd.net-fpx">>, []}; +all_ext(<<"nsc">>) -> {<<"application">>, <<"x-conference">>, []}; +all_ext(<<"nsf">>) -> {<<"application">>, <<"vnd.lotus-notes">>, []}; +all_ext(<<"ntf">>) -> {<<"application">>, <<"vnd.nitf">>, []}; +all_ext(<<"nzb">>) -> {<<"application">>, <<"x-nzb">>, []}; +all_ext(<<"oa2">>) -> {<<"application">>, <<"vnd.fujitsu.oasys2">>, []}; +all_ext(<<"oa3">>) -> {<<"application">>, <<"vnd.fujitsu.oasys3">>, []}; +all_ext(<<"oas">>) -> {<<"application">>, <<"vnd.fujitsu.oasys">>, []}; +all_ext(<<"obd">>) -> {<<"application">>, <<"x-msbinder">>, []}; +all_ext(<<"obj">>) -> {<<"application">>, <<"x-tgif">>, []}; +all_ext(<<"oda">>) -> {<<"application">>, <<"oda">>, []}; +all_ext(<<"odb">>) -> {<<"application">>, <<"vnd.oasis.opendocument.database">>, []}; +all_ext(<<"odc">>) -> {<<"application">>, <<"vnd.oasis.opendocument.chart">>, []}; +all_ext(<<"odf">>) -> {<<"application">>, <<"vnd.oasis.opendocument.formula">>, []}; +all_ext(<<"odft">>) -> {<<"application">>, <<"vnd.oasis.opendocument.formula-template">>, []}; +all_ext(<<"odg">>) -> {<<"application">>, <<"vnd.oasis.opendocument.graphics">>, []}; +all_ext(<<"odi">>) -> {<<"application">>, <<"vnd.oasis.opendocument.image">>, []}; +all_ext(<<"odm">>) -> {<<"application">>, <<"vnd.oasis.opendocument.text-master">>, []}; +all_ext(<<"odp">>) -> {<<"application">>, <<"vnd.oasis.opendocument.presentation">>, []}; +all_ext(<<"ods">>) -> {<<"application">>, <<"vnd.oasis.opendocument.spreadsheet">>, []}; +all_ext(<<"odt">>) -> {<<"application">>, <<"vnd.oasis.opendocument.text">>, []}; +all_ext(<<"oga">>) -> {<<"audio">>, <<"ogg">>, []}; +all_ext(<<"ogg">>) -> {<<"audio">>, <<"ogg">>, []}; +all_ext(<<"ogv">>) -> {<<"video">>, <<"ogg">>, []}; +all_ext(<<"ogx">>) -> {<<"application">>, <<"ogg">>, []}; +all_ext(<<"omdoc">>) -> {<<"application">>, <<"omdoc+xml">>, []}; +all_ext(<<"onepkg">>) -> {<<"application">>, <<"onenote">>, []}; +all_ext(<<"onetmp">>) -> {<<"application">>, <<"onenote">>, []}; +all_ext(<<"onetoc2">>) -> {<<"application">>, <<"onenote">>, []}; +all_ext(<<"onetoc">>) -> {<<"application">>, <<"onenote">>, []}; +all_ext(<<"opf">>) -> {<<"application">>, <<"oebps-package+xml">>, []}; +all_ext(<<"opml">>) -> {<<"text">>, <<"x-opml">>, []}; +all_ext(<<"oprc">>) -> {<<"application">>, <<"vnd.palm">>, []}; +all_ext(<<"org">>) -> {<<"application">>, <<"vnd.lotus-organizer">>, []}; +all_ext(<<"osf">>) -> {<<"application">>, <<"vnd.yamaha.openscoreformat">>, []}; +all_ext(<<"osfpvg">>) -> {<<"application">>, <<"vnd.yamaha.openscoreformat.osfpvg+xml">>, []}; +all_ext(<<"otc">>) -> {<<"application">>, <<"vnd.oasis.opendocument.chart-template">>, []}; +all_ext(<<"otf">>) -> {<<"application">>, <<"x-font-otf">>, []}; +all_ext(<<"otg">>) -> {<<"application">>, <<"vnd.oasis.opendocument.graphics-template">>, []}; +all_ext(<<"oth">>) -> {<<"application">>, <<"vnd.oasis.opendocument.text-web">>, []}; +all_ext(<<"oti">>) -> {<<"application">>, <<"vnd.oasis.opendocument.image-template">>, []}; +all_ext(<<"otp">>) -> {<<"application">>, <<"vnd.oasis.opendocument.presentation-template">>, []}; +all_ext(<<"ots">>) -> {<<"application">>, <<"vnd.oasis.opendocument.spreadsheet-template">>, []}; +all_ext(<<"ott">>) -> {<<"application">>, <<"vnd.oasis.opendocument.text-template">>, []}; +all_ext(<<"oxps">>) -> {<<"application">>, <<"oxps">>, []}; +all_ext(<<"oxt">>) -> {<<"application">>, <<"vnd.openofficeorg.extension">>, []}; +all_ext(<<"p10">>) -> {<<"application">>, <<"pkcs10">>, []}; +all_ext(<<"p12">>) -> {<<"application">>, <<"x-pkcs12">>, []}; +all_ext(<<"p7b">>) -> {<<"application">>, <<"x-pkcs7-certificates">>, []}; +all_ext(<<"p7c">>) -> {<<"application">>, <<"pkcs7-mime">>, []}; +all_ext(<<"p7m">>) -> {<<"application">>, <<"pkcs7-mime">>, []}; +all_ext(<<"p7r">>) -> {<<"application">>, <<"x-pkcs7-certreqresp">>, []}; +all_ext(<<"p7s">>) -> {<<"application">>, <<"pkcs7-signature">>, []}; +all_ext(<<"p8">>) -> {<<"application">>, <<"pkcs8">>, []}; +all_ext(<<"pas">>) -> {<<"text">>, <<"x-pascal">>, []}; +all_ext(<<"paw">>) -> {<<"application">>, <<"vnd.pawaafile">>, []}; +all_ext(<<"pbd">>) -> {<<"application">>, <<"vnd.powerbuilder6">>, []}; +all_ext(<<"pbm">>) -> {<<"image">>, <<"x-portable-bitmap">>, []}; +all_ext(<<"pcap">>) -> {<<"application">>, <<"vnd.tcpdump.pcap">>, []}; +all_ext(<<"pcf">>) -> {<<"application">>, <<"x-font-pcf">>, []}; +all_ext(<<"pcl">>) -> {<<"application">>, <<"vnd.hp-pcl">>, []}; +all_ext(<<"pclxl">>) -> {<<"application">>, <<"vnd.hp-pclxl">>, []}; +all_ext(<<"pct">>) -> {<<"image">>, <<"x-pict">>, []}; +all_ext(<<"pcurl">>) -> {<<"application">>, <<"vnd.curl.pcurl">>, []}; +all_ext(<<"pcx">>) -> {<<"image">>, <<"x-pcx">>, []}; +all_ext(<<"pdb">>) -> {<<"application">>, <<"vnd.palm">>, []}; +all_ext(<<"pdf">>) -> {<<"application">>, <<"pdf">>, []}; +all_ext(<<"pfa">>) -> {<<"application">>, <<"x-font-type1">>, []}; +all_ext(<<"pfb">>) -> {<<"application">>, <<"x-font-type1">>, []}; +all_ext(<<"pfm">>) -> {<<"application">>, <<"x-font-type1">>, []}; +all_ext(<<"pfr">>) -> {<<"application">>, <<"font-tdpfr">>, []}; +all_ext(<<"pfx">>) -> {<<"application">>, <<"x-pkcs12">>, []}; +all_ext(<<"pgm">>) -> {<<"image">>, <<"x-portable-graymap">>, []}; +all_ext(<<"pgn">>) -> {<<"application">>, <<"x-chess-pgn">>, []}; +all_ext(<<"pgp">>) -> {<<"application">>, <<"pgp-encrypted">>, []}; +all_ext(<<"pic">>) -> {<<"image">>, <<"x-pict">>, []}; +all_ext(<<"pkg">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"pki">>) -> {<<"application">>, <<"pkixcmp">>, []}; +all_ext(<<"pkipath">>) -> {<<"application">>, <<"pkix-pkipath">>, []}; +all_ext(<<"plb">>) -> {<<"application">>, <<"vnd.3gpp.pic-bw-large">>, []}; +all_ext(<<"plc">>) -> {<<"application">>, <<"vnd.mobius.plc">>, []}; +all_ext(<<"plf">>) -> {<<"application">>, <<"vnd.pocketlearn">>, []}; +all_ext(<<"pls">>) -> {<<"application">>, <<"pls+xml">>, []}; +all_ext(<<"pml">>) -> {<<"application">>, <<"vnd.ctc-posml">>, []}; +all_ext(<<"png">>) -> {<<"image">>, <<"png">>, []}; +all_ext(<<"pnm">>) -> {<<"image">>, <<"x-portable-anymap">>, []}; +all_ext(<<"portpkg">>) -> {<<"application">>, <<"vnd.macports.portpkg">>, []}; +all_ext(<<"pot">>) -> {<<"application">>, <<"vnd.ms-powerpoint">>, []}; +all_ext(<<"potm">>) -> {<<"application">>, <<"vnd.ms-powerpoint.template.macroenabled.12">>, []}; +all_ext(<<"potx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.presentationml.template">>, []}; +all_ext(<<"ppam">>) -> {<<"application">>, <<"vnd.ms-powerpoint.addin.macroenabled.12">>, []}; +all_ext(<<"ppd">>) -> {<<"application">>, <<"vnd.cups-ppd">>, []}; +all_ext(<<"ppm">>) -> {<<"image">>, <<"x-portable-pixmap">>, []}; +all_ext(<<"pps">>) -> {<<"application">>, <<"vnd.ms-powerpoint">>, []}; +all_ext(<<"ppsm">>) -> {<<"application">>, <<"vnd.ms-powerpoint.slideshow.macroenabled.12">>, []}; +all_ext(<<"ppsx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.presentationml.slideshow">>, []}; +all_ext(<<"ppt">>) -> {<<"application">>, <<"vnd.ms-powerpoint">>, []}; +all_ext(<<"pptm">>) -> {<<"application">>, <<"vnd.ms-powerpoint.presentation.macroenabled.12">>, []}; +all_ext(<<"pptx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.presentationml.presentation">>, []}; +all_ext(<<"pqa">>) -> {<<"application">>, <<"vnd.palm">>, []}; +all_ext(<<"prc">>) -> {<<"application">>, <<"x-mobipocket-ebook">>, []}; +all_ext(<<"pre">>) -> {<<"application">>, <<"vnd.lotus-freelance">>, []}; +all_ext(<<"prf">>) -> {<<"application">>, <<"pics-rules">>, []}; +all_ext(<<"ps">>) -> {<<"application">>, <<"postscript">>, []}; +all_ext(<<"psb">>) -> {<<"application">>, <<"vnd.3gpp.pic-bw-small">>, []}; +all_ext(<<"psd">>) -> {<<"image">>, <<"vnd.adobe.photoshop">>, []}; +all_ext(<<"psf">>) -> {<<"application">>, <<"x-font-linux-psf">>, []}; +all_ext(<<"pskcxml">>) -> {<<"application">>, <<"pskc+xml">>, []}; +all_ext(<<"p">>) -> {<<"text">>, <<"x-pascal">>, []}; +all_ext(<<"ptid">>) -> {<<"application">>, <<"vnd.pvi.ptid1">>, []}; +all_ext(<<"pub">>) -> {<<"application">>, <<"x-mspublisher">>, []}; +all_ext(<<"pvb">>) -> {<<"application">>, <<"vnd.3gpp.pic-bw-var">>, []}; +all_ext(<<"pwn">>) -> {<<"application">>, <<"vnd.3m.post-it-notes">>, []}; +all_ext(<<"pya">>) -> {<<"audio">>, <<"vnd.ms-playready.media.pya">>, []}; +all_ext(<<"pyv">>) -> {<<"video">>, <<"vnd.ms-playready.media.pyv">>, []}; +all_ext(<<"qam">>) -> {<<"application">>, <<"vnd.epson.quickanime">>, []}; +all_ext(<<"qbo">>) -> {<<"application">>, <<"vnd.intu.qbo">>, []}; +all_ext(<<"qfx">>) -> {<<"application">>, <<"vnd.intu.qfx">>, []}; +all_ext(<<"qps">>) -> {<<"application">>, <<"vnd.publishare-delta-tree">>, []}; +all_ext(<<"qt">>) -> {<<"video">>, <<"quicktime">>, []}; +all_ext(<<"qwd">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []}; +all_ext(<<"qwt">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []}; +all_ext(<<"qxb">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []}; +all_ext(<<"qxd">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []}; +all_ext(<<"qxl">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []}; +all_ext(<<"qxt">>) -> {<<"application">>, <<"vnd.quark.quarkxpress">>, []}; +all_ext(<<"ra">>) -> {<<"audio">>, <<"x-pn-realaudio">>, []}; +all_ext(<<"ram">>) -> {<<"audio">>, <<"x-pn-realaudio">>, []}; +all_ext(<<"rar">>) -> {<<"application">>, <<"x-rar-compressed">>, []}; +all_ext(<<"ras">>) -> {<<"image">>, <<"x-cmu-raster">>, []}; +all_ext(<<"rcprofile">>) -> {<<"application">>, <<"vnd.ipunplugged.rcprofile">>, []}; +all_ext(<<"rdf">>) -> {<<"application">>, <<"rdf+xml">>, []}; +all_ext(<<"rdz">>) -> {<<"application">>, <<"vnd.data-vision.rdz">>, []}; +all_ext(<<"rep">>) -> {<<"application">>, <<"vnd.businessobjects">>, []}; +all_ext(<<"res">>) -> {<<"application">>, <<"x-dtbresource+xml">>, []}; +all_ext(<<"rgb">>) -> {<<"image">>, <<"x-rgb">>, []}; +all_ext(<<"rif">>) -> {<<"application">>, <<"reginfo+xml">>, []}; +all_ext(<<"rip">>) -> {<<"audio">>, <<"vnd.rip">>, []}; +all_ext(<<"ris">>) -> {<<"application">>, <<"x-research-info-systems">>, []}; +all_ext(<<"rl">>) -> {<<"application">>, <<"resource-lists+xml">>, []}; +all_ext(<<"rlc">>) -> {<<"image">>, <<"vnd.fujixerox.edmics-rlc">>, []}; +all_ext(<<"rld">>) -> {<<"application">>, <<"resource-lists-diff+xml">>, []}; +all_ext(<<"rm">>) -> {<<"application">>, <<"vnd.rn-realmedia">>, []}; +all_ext(<<"rmi">>) -> {<<"audio">>, <<"midi">>, []}; +all_ext(<<"rmp">>) -> {<<"audio">>, <<"x-pn-realaudio-plugin">>, []}; +all_ext(<<"rms">>) -> {<<"application">>, <<"vnd.jcp.javame.midlet-rms">>, []}; +all_ext(<<"rmvb">>) -> {<<"application">>, <<"vnd.rn-realmedia-vbr">>, []}; +all_ext(<<"rnc">>) -> {<<"application">>, <<"relax-ng-compact-syntax">>, []}; +all_ext(<<"roa">>) -> {<<"application">>, <<"rpki-roa">>, []}; +all_ext(<<"roff">>) -> {<<"text">>, <<"troff">>, []}; +all_ext(<<"rp9">>) -> {<<"application">>, <<"vnd.cloanto.rp9">>, []}; +all_ext(<<"rpss">>) -> {<<"application">>, <<"vnd.nokia.radio-presets">>, []}; +all_ext(<<"rpst">>) -> {<<"application">>, <<"vnd.nokia.radio-preset">>, []}; +all_ext(<<"rq">>) -> {<<"application">>, <<"sparql-query">>, []}; +all_ext(<<"rs">>) -> {<<"application">>, <<"rls-services+xml">>, []}; +all_ext(<<"rsd">>) -> {<<"application">>, <<"rsd+xml">>, []}; +all_ext(<<"rss">>) -> {<<"application">>, <<"rss+xml">>, []}; +all_ext(<<"rtf">>) -> {<<"application">>, <<"rtf">>, []}; +all_ext(<<"rtx">>) -> {<<"text">>, <<"richtext">>, []}; +all_ext(<<"s3m">>) -> {<<"audio">>, <<"s3m">>, []}; +all_ext(<<"saf">>) -> {<<"application">>, <<"vnd.yamaha.smaf-audio">>, []}; +all_ext(<<"sbml">>) -> {<<"application">>, <<"sbml+xml">>, []}; +all_ext(<<"sc">>) -> {<<"application">>, <<"vnd.ibm.secure-container">>, []}; +all_ext(<<"scd">>) -> {<<"application">>, <<"x-msschedule">>, []}; +all_ext(<<"scm">>) -> {<<"application">>, <<"vnd.lotus-screencam">>, []}; +all_ext(<<"scq">>) -> {<<"application">>, <<"scvp-cv-request">>, []}; +all_ext(<<"scs">>) -> {<<"application">>, <<"scvp-cv-response">>, []}; +all_ext(<<"scurl">>) -> {<<"text">>, <<"vnd.curl.scurl">>, []}; +all_ext(<<"sda">>) -> {<<"application">>, <<"vnd.stardivision.draw">>, []}; +all_ext(<<"sdc">>) -> {<<"application">>, <<"vnd.stardivision.calc">>, []}; +all_ext(<<"sdd">>) -> {<<"application">>, <<"vnd.stardivision.impress">>, []}; +all_ext(<<"sdkd">>) -> {<<"application">>, <<"vnd.solent.sdkm+xml">>, []}; +all_ext(<<"sdkm">>) -> {<<"application">>, <<"vnd.solent.sdkm+xml">>, []}; +all_ext(<<"sdp">>) -> {<<"application">>, <<"sdp">>, []}; +all_ext(<<"sdw">>) -> {<<"application">>, <<"vnd.stardivision.writer">>, []}; +all_ext(<<"see">>) -> {<<"application">>, <<"vnd.seemail">>, []}; +all_ext(<<"seed">>) -> {<<"application">>, <<"vnd.fdsn.seed">>, []}; +all_ext(<<"sema">>) -> {<<"application">>, <<"vnd.sema">>, []}; +all_ext(<<"semd">>) -> {<<"application">>, <<"vnd.semd">>, []}; +all_ext(<<"semf">>) -> {<<"application">>, <<"vnd.semf">>, []}; +all_ext(<<"ser">>) -> {<<"application">>, <<"java-serialized-object">>, []}; +all_ext(<<"setpay">>) -> {<<"application">>, <<"set-payment-initiation">>, []}; +all_ext(<<"setreg">>) -> {<<"application">>, <<"set-registration-initiation">>, []}; +all_ext(<<"sfd-hdstx">>) -> {<<"application">>, <<"vnd.hydrostatix.sof-data">>, []}; +all_ext(<<"sfs">>) -> {<<"application">>, <<"vnd.spotfire.sfs">>, []}; +all_ext(<<"sfv">>) -> {<<"text">>, <<"x-sfv">>, []}; +all_ext(<<"sgi">>) -> {<<"image">>, <<"sgi">>, []}; +all_ext(<<"sgl">>) -> {<<"application">>, <<"vnd.stardivision.writer-global">>, []}; +all_ext(<<"sgml">>) -> {<<"text">>, <<"sgml">>, []}; +all_ext(<<"sgm">>) -> {<<"text">>, <<"sgml">>, []}; +all_ext(<<"sh">>) -> {<<"application">>, <<"x-sh">>, []}; +all_ext(<<"shar">>) -> {<<"application">>, <<"x-shar">>, []}; +all_ext(<<"shf">>) -> {<<"application">>, <<"shf+xml">>, []}; +all_ext(<<"sid">>) -> {<<"image">>, <<"x-mrsid-image">>, []}; +all_ext(<<"sig">>) -> {<<"application">>, <<"pgp-signature">>, []}; +all_ext(<<"sil">>) -> {<<"audio">>, <<"silk">>, []}; +all_ext(<<"silo">>) -> {<<"model">>, <<"mesh">>, []}; +all_ext(<<"sis">>) -> {<<"application">>, <<"vnd.symbian.install">>, []}; +all_ext(<<"sisx">>) -> {<<"application">>, <<"vnd.symbian.install">>, []}; +all_ext(<<"sit">>) -> {<<"application">>, <<"x-stuffit">>, []}; +all_ext(<<"sitx">>) -> {<<"application">>, <<"x-stuffitx">>, []}; +all_ext(<<"skd">>) -> {<<"application">>, <<"vnd.koan">>, []}; +all_ext(<<"skm">>) -> {<<"application">>, <<"vnd.koan">>, []}; +all_ext(<<"skp">>) -> {<<"application">>, <<"vnd.koan">>, []}; +all_ext(<<"skt">>) -> {<<"application">>, <<"vnd.koan">>, []}; +all_ext(<<"sldm">>) -> {<<"application">>, <<"vnd.ms-powerpoint.slide.macroenabled.12">>, []}; +all_ext(<<"sldx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.presentationml.slide">>, []}; +all_ext(<<"slt">>) -> {<<"application">>, <<"vnd.epson.salt">>, []}; +all_ext(<<"sm">>) -> {<<"application">>, <<"vnd.stepmania.stepchart">>, []}; +all_ext(<<"smf">>) -> {<<"application">>, <<"vnd.stardivision.math">>, []}; +all_ext(<<"smi">>) -> {<<"application">>, <<"smil+xml">>, []}; +all_ext(<<"smil">>) -> {<<"application">>, <<"smil+xml">>, []}; +all_ext(<<"smv">>) -> {<<"video">>, <<"x-smv">>, []}; +all_ext(<<"smzip">>) -> {<<"application">>, <<"vnd.stepmania.package">>, []}; +all_ext(<<"snd">>) -> {<<"audio">>, <<"basic">>, []}; +all_ext(<<"snf">>) -> {<<"application">>, <<"x-font-snf">>, []}; +all_ext(<<"so">>) -> {<<"application">>, <<"octet-stream">>, []}; +all_ext(<<"spc">>) -> {<<"application">>, <<"x-pkcs7-certificates">>, []}; +all_ext(<<"spf">>) -> {<<"application">>, <<"vnd.yamaha.smaf-phrase">>, []}; +all_ext(<<"spl">>) -> {<<"application">>, <<"x-futuresplash">>, []}; +all_ext(<<"spot">>) -> {<<"text">>, <<"vnd.in3d.spot">>, []}; +all_ext(<<"spp">>) -> {<<"application">>, <<"scvp-vp-response">>, []}; +all_ext(<<"spq">>) -> {<<"application">>, <<"scvp-vp-request">>, []}; +all_ext(<<"spx">>) -> {<<"audio">>, <<"ogg">>, []}; +all_ext(<<"sql">>) -> {<<"application">>, <<"x-sql">>, []}; +all_ext(<<"src">>) -> {<<"application">>, <<"x-wais-source">>, []}; +all_ext(<<"srt">>) -> {<<"application">>, <<"x-subrip">>, []}; +all_ext(<<"sru">>) -> {<<"application">>, <<"sru+xml">>, []}; +all_ext(<<"srx">>) -> {<<"application">>, <<"sparql-results+xml">>, []}; +all_ext(<<"ssdl">>) -> {<<"application">>, <<"ssdl+xml">>, []}; +all_ext(<<"sse">>) -> {<<"application">>, <<"vnd.kodak-descriptor">>, []}; +all_ext(<<"ssf">>) -> {<<"application">>, <<"vnd.epson.ssf">>, []}; +all_ext(<<"ssml">>) -> {<<"application">>, <<"ssml+xml">>, []}; +all_ext(<<"st">>) -> {<<"application">>, <<"vnd.sailingtracker.track">>, []}; +all_ext(<<"stc">>) -> {<<"application">>, <<"vnd.sun.xml.calc.template">>, []}; +all_ext(<<"std">>) -> {<<"application">>, <<"vnd.sun.xml.draw.template">>, []}; +all_ext(<<"s">>) -> {<<"text">>, <<"x-asm">>, []}; +all_ext(<<"stf">>) -> {<<"application">>, <<"vnd.wt.stf">>, []}; +all_ext(<<"sti">>) -> {<<"application">>, <<"vnd.sun.xml.impress.template">>, []}; +all_ext(<<"stk">>) -> {<<"application">>, <<"hyperstudio">>, []}; +all_ext(<<"stl">>) -> {<<"application">>, <<"vnd.ms-pki.stl">>, []}; +all_ext(<<"str">>) -> {<<"application">>, <<"vnd.pg.format">>, []}; +all_ext(<<"stw">>) -> {<<"application">>, <<"vnd.sun.xml.writer.template">>, []}; +all_ext(<<"sub">>) -> {<<"image">>, <<"vnd.dvb.subtitle">>, []}; +all_ext(<<"sus">>) -> {<<"application">>, <<"vnd.sus-calendar">>, []}; +all_ext(<<"susp">>) -> {<<"application">>, <<"vnd.sus-calendar">>, []}; +all_ext(<<"sv4cpio">>) -> {<<"application">>, <<"x-sv4cpio">>, []}; +all_ext(<<"sv4crc">>) -> {<<"application">>, <<"x-sv4crc">>, []}; +all_ext(<<"svc">>) -> {<<"application">>, <<"vnd.dvb.service">>, []}; +all_ext(<<"svd">>) -> {<<"application">>, <<"vnd.svd">>, []}; +all_ext(<<"svg">>) -> {<<"image">>, <<"svg+xml">>, []}; +all_ext(<<"svgz">>) -> {<<"image">>, <<"svg+xml">>, []}; +all_ext(<<"swa">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"swf">>) -> {<<"application">>, <<"x-shockwave-flash">>, []}; +all_ext(<<"swi">>) -> {<<"application">>, <<"vnd.aristanetworks.swi">>, []}; +all_ext(<<"sxc">>) -> {<<"application">>, <<"vnd.sun.xml.calc">>, []}; +all_ext(<<"sxd">>) -> {<<"application">>, <<"vnd.sun.xml.draw">>, []}; +all_ext(<<"sxg">>) -> {<<"application">>, <<"vnd.sun.xml.writer.global">>, []}; +all_ext(<<"sxi">>) -> {<<"application">>, <<"vnd.sun.xml.impress">>, []}; +all_ext(<<"sxm">>) -> {<<"application">>, <<"vnd.sun.xml.math">>, []}; +all_ext(<<"sxw">>) -> {<<"application">>, <<"vnd.sun.xml.writer">>, []}; +all_ext(<<"t3">>) -> {<<"application">>, <<"x-t3vm-image">>, []}; +all_ext(<<"taglet">>) -> {<<"application">>, <<"vnd.mynfc">>, []}; +all_ext(<<"tao">>) -> {<<"application">>, <<"vnd.tao.intent-module-archive">>, []}; +all_ext(<<"tar">>) -> {<<"application">>, <<"x-tar">>, []}; +all_ext(<<"tcap">>) -> {<<"application">>, <<"vnd.3gpp2.tcap">>, []}; +all_ext(<<"tcl">>) -> {<<"application">>, <<"x-tcl">>, []}; +all_ext(<<"teacher">>) -> {<<"application">>, <<"vnd.smart.teacher">>, []}; +all_ext(<<"tei">>) -> {<<"application">>, <<"tei+xml">>, []}; +all_ext(<<"teicorpus">>) -> {<<"application">>, <<"tei+xml">>, []}; +all_ext(<<"tex">>) -> {<<"application">>, <<"x-tex">>, []}; +all_ext(<<"texi">>) -> {<<"application">>, <<"x-texinfo">>, []}; +all_ext(<<"texinfo">>) -> {<<"application">>, <<"x-texinfo">>, []}; +all_ext(<<"text">>) -> {<<"text">>, <<"plain">>, []}; +all_ext(<<"tfi">>) -> {<<"application">>, <<"thraud+xml">>, []}; +all_ext(<<"tfm">>) -> {<<"application">>, <<"x-tex-tfm">>, []}; +all_ext(<<"tga">>) -> {<<"image">>, <<"x-tga">>, []}; +all_ext(<<"thmx">>) -> {<<"application">>, <<"vnd.ms-officetheme">>, []}; +all_ext(<<"tiff">>) -> {<<"image">>, <<"tiff">>, []}; +all_ext(<<"tif">>) -> {<<"image">>, <<"tiff">>, []}; +all_ext(<<"tmo">>) -> {<<"application">>, <<"vnd.tmobile-livetv">>, []}; +all_ext(<<"torrent">>) -> {<<"application">>, <<"x-bittorrent">>, []}; +all_ext(<<"tpl">>) -> {<<"application">>, <<"vnd.groove-tool-template">>, []}; +all_ext(<<"tpt">>) -> {<<"application">>, <<"vnd.trid.tpt">>, []}; +all_ext(<<"tra">>) -> {<<"application">>, <<"vnd.trueapp">>, []}; +all_ext(<<"trm">>) -> {<<"application">>, <<"x-msterminal">>, []}; +all_ext(<<"tr">>) -> {<<"text">>, <<"troff">>, []}; +all_ext(<<"tsd">>) -> {<<"application">>, <<"timestamped-data">>, []}; +all_ext(<<"tsv">>) -> {<<"text">>, <<"tab-separated-values">>, []}; +all_ext(<<"ttc">>) -> {<<"application">>, <<"x-font-ttf">>, []}; +all_ext(<<"t">>) -> {<<"text">>, <<"troff">>, []}; +all_ext(<<"ttf">>) -> {<<"application">>, <<"x-font-ttf">>, []}; +all_ext(<<"ttl">>) -> {<<"text">>, <<"turtle">>, []}; +all_ext(<<"twd">>) -> {<<"application">>, <<"vnd.simtech-mindmapper">>, []}; +all_ext(<<"twds">>) -> {<<"application">>, <<"vnd.simtech-mindmapper">>, []}; +all_ext(<<"txd">>) -> {<<"application">>, <<"vnd.genomatix.tuxedo">>, []}; +all_ext(<<"txf">>) -> {<<"application">>, <<"vnd.mobius.txf">>, []}; +all_ext(<<"txt">>) -> {<<"text">>, <<"plain">>, []}; +all_ext(<<"u32">>) -> {<<"application">>, <<"x-authorware-bin">>, []}; +all_ext(<<"udeb">>) -> {<<"application">>, <<"x-debian-package">>, []}; +all_ext(<<"ufd">>) -> {<<"application">>, <<"vnd.ufdl">>, []}; +all_ext(<<"ufdl">>) -> {<<"application">>, <<"vnd.ufdl">>, []}; +all_ext(<<"ulx">>) -> {<<"application">>, <<"x-glulx">>, []}; +all_ext(<<"umj">>) -> {<<"application">>, <<"vnd.umajin">>, []}; +all_ext(<<"unityweb">>) -> {<<"application">>, <<"vnd.unity">>, []}; +all_ext(<<"uoml">>) -> {<<"application">>, <<"vnd.uoml+xml">>, []}; +all_ext(<<"uris">>) -> {<<"text">>, <<"uri-list">>, []}; +all_ext(<<"uri">>) -> {<<"text">>, <<"uri-list">>, []}; +all_ext(<<"urls">>) -> {<<"text">>, <<"uri-list">>, []}; +all_ext(<<"ustar">>) -> {<<"application">>, <<"x-ustar">>, []}; +all_ext(<<"utz">>) -> {<<"application">>, <<"vnd.uiq.theme">>, []}; +all_ext(<<"uu">>) -> {<<"text">>, <<"x-uuencode">>, []}; +all_ext(<<"uva">>) -> {<<"audio">>, <<"vnd.dece.audio">>, []}; +all_ext(<<"uvd">>) -> {<<"application">>, <<"vnd.dece.data">>, []}; +all_ext(<<"uvf">>) -> {<<"application">>, <<"vnd.dece.data">>, []}; +all_ext(<<"uvg">>) -> {<<"image">>, <<"vnd.dece.graphic">>, []}; +all_ext(<<"uvh">>) -> {<<"video">>, <<"vnd.dece.hd">>, []}; +all_ext(<<"uvi">>) -> {<<"image">>, <<"vnd.dece.graphic">>, []}; +all_ext(<<"uvm">>) -> {<<"video">>, <<"vnd.dece.mobile">>, []}; +all_ext(<<"uvp">>) -> {<<"video">>, <<"vnd.dece.pd">>, []}; +all_ext(<<"uvs">>) -> {<<"video">>, <<"vnd.dece.sd">>, []}; +all_ext(<<"uvt">>) -> {<<"application">>, <<"vnd.dece.ttml+xml">>, []}; +all_ext(<<"uvu">>) -> {<<"video">>, <<"vnd.uvvu.mp4">>, []}; +all_ext(<<"uvva">>) -> {<<"audio">>, <<"vnd.dece.audio">>, []}; +all_ext(<<"uvvd">>) -> {<<"application">>, <<"vnd.dece.data">>, []}; +all_ext(<<"uvvf">>) -> {<<"application">>, <<"vnd.dece.data">>, []}; +all_ext(<<"uvvg">>) -> {<<"image">>, <<"vnd.dece.graphic">>, []}; +all_ext(<<"uvvh">>) -> {<<"video">>, <<"vnd.dece.hd">>, []}; +all_ext(<<"uvvi">>) -> {<<"image">>, <<"vnd.dece.graphic">>, []}; +all_ext(<<"uvvm">>) -> {<<"video">>, <<"vnd.dece.mobile">>, []}; +all_ext(<<"uvvp">>) -> {<<"video">>, <<"vnd.dece.pd">>, []}; +all_ext(<<"uvvs">>) -> {<<"video">>, <<"vnd.dece.sd">>, []}; +all_ext(<<"uvvt">>) -> {<<"application">>, <<"vnd.dece.ttml+xml">>, []}; +all_ext(<<"uvvu">>) -> {<<"video">>, <<"vnd.uvvu.mp4">>, []}; +all_ext(<<"uvv">>) -> {<<"video">>, <<"vnd.dece.video">>, []}; +all_ext(<<"uvvv">>) -> {<<"video">>, <<"vnd.dece.video">>, []}; +all_ext(<<"uvvx">>) -> {<<"application">>, <<"vnd.dece.unspecified">>, []}; +all_ext(<<"uvvz">>) -> {<<"application">>, <<"vnd.dece.zip">>, []}; +all_ext(<<"uvx">>) -> {<<"application">>, <<"vnd.dece.unspecified">>, []}; +all_ext(<<"uvz">>) -> {<<"application">>, <<"vnd.dece.zip">>, []}; +all_ext(<<"vcard">>) -> {<<"text">>, <<"vcard">>, []}; +all_ext(<<"vcd">>) -> {<<"application">>, <<"x-cdlink">>, []}; +all_ext(<<"vcf">>) -> {<<"text">>, <<"x-vcard">>, []}; +all_ext(<<"vcg">>) -> {<<"application">>, <<"vnd.groove-vcard">>, []}; +all_ext(<<"vcs">>) -> {<<"text">>, <<"x-vcalendar">>, []}; +all_ext(<<"vcx">>) -> {<<"application">>, <<"vnd.vcx">>, []}; +all_ext(<<"vis">>) -> {<<"application">>, <<"vnd.visionary">>, []}; +all_ext(<<"viv">>) -> {<<"video">>, <<"vnd.vivo">>, []}; +all_ext(<<"vob">>) -> {<<"video">>, <<"x-ms-vob">>, []}; +all_ext(<<"vor">>) -> {<<"application">>, <<"vnd.stardivision.writer">>, []}; +all_ext(<<"vox">>) -> {<<"application">>, <<"x-authorware-bin">>, []}; +all_ext(<<"vrml">>) -> {<<"model">>, <<"vrml">>, []}; +all_ext(<<"vsd">>) -> {<<"application">>, <<"vnd.visio">>, []}; +all_ext(<<"vsf">>) -> {<<"application">>, <<"vnd.vsf">>, []}; +all_ext(<<"vss">>) -> {<<"application">>, <<"vnd.visio">>, []}; +all_ext(<<"vst">>) -> {<<"application">>, <<"vnd.visio">>, []}; +all_ext(<<"vsw">>) -> {<<"application">>, <<"vnd.visio">>, []}; +all_ext(<<"vtu">>) -> {<<"model">>, <<"vnd.vtu">>, []}; +all_ext(<<"vxml">>) -> {<<"application">>, <<"voicexml+xml">>, []}; +all_ext(<<"w3d">>) -> {<<"application">>, <<"x-director">>, []}; +all_ext(<<"wad">>) -> {<<"application">>, <<"x-doom">>, []}; +all_ext(<<"wav">>) -> {<<"audio">>, <<"x-wav">>, []}; +all_ext(<<"wax">>) -> {<<"audio">>, <<"x-ms-wax">>, []}; +all_ext(<<"wbmp">>) -> {<<"image">>, <<"vnd.wap.wbmp">>, []}; +all_ext(<<"wbs">>) -> {<<"application">>, <<"vnd.criticaltools.wbs+xml">>, []}; +all_ext(<<"wbxml">>) -> {<<"application">>, <<"vnd.wap.wbxml">>, []}; +all_ext(<<"wcm">>) -> {<<"application">>, <<"vnd.ms-works">>, []}; +all_ext(<<"wdb">>) -> {<<"application">>, <<"vnd.ms-works">>, []}; +all_ext(<<"wdp">>) -> {<<"image">>, <<"vnd.ms-photo">>, []}; +all_ext(<<"weba">>) -> {<<"audio">>, <<"webm">>, []}; +all_ext(<<"webm">>) -> {<<"video">>, <<"webm">>, []}; +all_ext(<<"webp">>) -> {<<"image">>, <<"webp">>, []}; +all_ext(<<"wg">>) -> {<<"application">>, <<"vnd.pmi.widget">>, []}; +all_ext(<<"wgt">>) -> {<<"application">>, <<"widget">>, []}; +all_ext(<<"wks">>) -> {<<"application">>, <<"vnd.ms-works">>, []}; +all_ext(<<"wma">>) -> {<<"audio">>, <<"x-ms-wma">>, []}; +all_ext(<<"wmd">>) -> {<<"application">>, <<"x-ms-wmd">>, []}; +all_ext(<<"wmf">>) -> {<<"application">>, <<"x-msmetafile">>, []}; +all_ext(<<"wmlc">>) -> {<<"application">>, <<"vnd.wap.wmlc">>, []}; +all_ext(<<"wmlsc">>) -> {<<"application">>, <<"vnd.wap.wmlscriptc">>, []}; +all_ext(<<"wmls">>) -> {<<"text">>, <<"vnd.wap.wmlscript">>, []}; +all_ext(<<"wml">>) -> {<<"text">>, <<"vnd.wap.wml">>, []}; +all_ext(<<"wm">>) -> {<<"video">>, <<"x-ms-wm">>, []}; +all_ext(<<"wmv">>) -> {<<"video">>, <<"x-ms-wmv">>, []}; +all_ext(<<"wmx">>) -> {<<"video">>, <<"x-ms-wmx">>, []}; +all_ext(<<"wmz">>) -> {<<"application">>, <<"x-msmetafile">>, []}; +all_ext(<<"woff">>) -> {<<"application">>, <<"font-woff">>, []}; +all_ext(<<"wpd">>) -> {<<"application">>, <<"vnd.wordperfect">>, []}; +all_ext(<<"wpl">>) -> {<<"application">>, <<"vnd.ms-wpl">>, []}; +all_ext(<<"wps">>) -> {<<"application">>, <<"vnd.ms-works">>, []}; +all_ext(<<"wqd">>) -> {<<"application">>, <<"vnd.wqd">>, []}; +all_ext(<<"wri">>) -> {<<"application">>, <<"x-mswrite">>, []}; +all_ext(<<"wrl">>) -> {<<"model">>, <<"vrml">>, []}; +all_ext(<<"wsdl">>) -> {<<"application">>, <<"wsdl+xml">>, []}; +all_ext(<<"wspolicy">>) -> {<<"application">>, <<"wspolicy+xml">>, []}; +all_ext(<<"wtb">>) -> {<<"application">>, <<"vnd.webturbo">>, []}; +all_ext(<<"wvx">>) -> {<<"video">>, <<"x-ms-wvx">>, []}; +all_ext(<<"x32">>) -> {<<"application">>, <<"x-authorware-bin">>, []}; +all_ext(<<"x3db">>) -> {<<"model">>, <<"x3d+binary">>, []}; +all_ext(<<"x3dbz">>) -> {<<"model">>, <<"x3d+binary">>, []}; +all_ext(<<"x3d">>) -> {<<"model">>, <<"x3d+xml">>, []}; +all_ext(<<"x3dv">>) -> {<<"model">>, <<"x3d+vrml">>, []}; +all_ext(<<"x3dvz">>) -> {<<"model">>, <<"x3d+vrml">>, []}; +all_ext(<<"x3dz">>) -> {<<"model">>, <<"x3d+xml">>, []}; +all_ext(<<"xaml">>) -> {<<"application">>, <<"xaml+xml">>, []}; +all_ext(<<"xap">>) -> {<<"application">>, <<"x-silverlight-app">>, []}; +all_ext(<<"xar">>) -> {<<"application">>, <<"vnd.xara">>, []}; +all_ext(<<"xbap">>) -> {<<"application">>, <<"x-ms-xbap">>, []}; +all_ext(<<"xbd">>) -> {<<"application">>, <<"vnd.fujixerox.docuworks.binder">>, []}; +all_ext(<<"xbm">>) -> {<<"image">>, <<"x-xbitmap">>, []}; +all_ext(<<"xdf">>) -> {<<"application">>, <<"xcap-diff+xml">>, []}; +all_ext(<<"xdm">>) -> {<<"application">>, <<"vnd.syncml.dm+xml">>, []}; +all_ext(<<"xdp">>) -> {<<"application">>, <<"vnd.adobe.xdp+xml">>, []}; +all_ext(<<"xdssc">>) -> {<<"application">>, <<"dssc+xml">>, []}; +all_ext(<<"xdw">>) -> {<<"application">>, <<"vnd.fujixerox.docuworks">>, []}; +all_ext(<<"xenc">>) -> {<<"application">>, <<"xenc+xml">>, []}; +all_ext(<<"xer">>) -> {<<"application">>, <<"patch-ops-error+xml">>, []}; +all_ext(<<"xfdf">>) -> {<<"application">>, <<"vnd.adobe.xfdf">>, []}; +all_ext(<<"xfdl">>) -> {<<"application">>, <<"vnd.xfdl">>, []}; +all_ext(<<"xht">>) -> {<<"application">>, <<"xhtml+xml">>, []}; +all_ext(<<"xhtml">>) -> {<<"application">>, <<"xhtml+xml">>, []}; +all_ext(<<"xhvml">>) -> {<<"application">>, <<"xv+xml">>, []}; +all_ext(<<"xif">>) -> {<<"image">>, <<"vnd.xiff">>, []}; +all_ext(<<"xla">>) -> {<<"application">>, <<"vnd.ms-excel">>, []}; +all_ext(<<"xlam">>) -> {<<"application">>, <<"vnd.ms-excel.addin.macroenabled.12">>, []}; +all_ext(<<"xlc">>) -> {<<"application">>, <<"vnd.ms-excel">>, []}; +all_ext(<<"xlf">>) -> {<<"application">>, <<"x-xliff+xml">>, []}; +all_ext(<<"xlm">>) -> {<<"application">>, <<"vnd.ms-excel">>, []}; +all_ext(<<"xls">>) -> {<<"application">>, <<"vnd.ms-excel">>, []}; +all_ext(<<"xlsb">>) -> {<<"application">>, <<"vnd.ms-excel.sheet.binary.macroenabled.12">>, []}; +all_ext(<<"xlsm">>) -> {<<"application">>, <<"vnd.ms-excel.sheet.macroenabled.12">>, []}; +all_ext(<<"xlsx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.spreadsheetml.sheet">>, []}; +all_ext(<<"xlt">>) -> {<<"application">>, <<"vnd.ms-excel">>, []}; +all_ext(<<"xltm">>) -> {<<"application">>, <<"vnd.ms-excel.template.macroenabled.12">>, []}; +all_ext(<<"xltx">>) -> {<<"application">>, <<"vnd.openxmlformats-officedocument.spreadsheetml.template">>, []}; +all_ext(<<"xlw">>) -> {<<"application">>, <<"vnd.ms-excel">>, []}; +all_ext(<<"xm">>) -> {<<"audio">>, <<"xm">>, []}; +all_ext(<<"xml">>) -> {<<"application">>, <<"xml">>, []}; +all_ext(<<"xo">>) -> {<<"application">>, <<"vnd.olpc-sugar">>, []}; +all_ext(<<"xop">>) -> {<<"application">>, <<"xop+xml">>, []}; +all_ext(<<"xpi">>) -> {<<"application">>, <<"x-xpinstall">>, []}; +all_ext(<<"xpl">>) -> {<<"application">>, <<"xproc+xml">>, []}; +all_ext(<<"xpm">>) -> {<<"image">>, <<"x-xpixmap">>, []}; +all_ext(<<"xpr">>) -> {<<"application">>, <<"vnd.is-xpr">>, []}; +all_ext(<<"xps">>) -> {<<"application">>, <<"vnd.ms-xpsdocument">>, []}; +all_ext(<<"xpw">>) -> {<<"application">>, <<"vnd.intercon.formnet">>, []}; +all_ext(<<"xpx">>) -> {<<"application">>, <<"vnd.intercon.formnet">>, []}; +all_ext(<<"xsl">>) -> {<<"application">>, <<"xml">>, []}; +all_ext(<<"xslt">>) -> {<<"application">>, <<"xslt+xml">>, []}; +all_ext(<<"xsm">>) -> {<<"application">>, <<"vnd.syncml+xml">>, []}; +all_ext(<<"xspf">>) -> {<<"application">>, <<"xspf+xml">>, []}; +all_ext(<<"xul">>) -> {<<"application">>, <<"vnd.mozilla.xul+xml">>, []}; +all_ext(<<"xvm">>) -> {<<"application">>, <<"xv+xml">>, []}; +all_ext(<<"xvml">>) -> {<<"application">>, <<"xv+xml">>, []}; +all_ext(<<"xwd">>) -> {<<"image">>, <<"x-xwindowdump">>, []}; +all_ext(<<"xyz">>) -> {<<"chemical">>, <<"x-xyz">>, []}; +all_ext(<<"xz">>) -> {<<"application">>, <<"x-xz">>, []}; +all_ext(<<"yang">>) -> {<<"application">>, <<"yang">>, []}; +all_ext(<<"yin">>) -> {<<"application">>, <<"yin+xml">>, []}; +all_ext(<<"z1">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"z2">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"z3">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"z4">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"z5">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"z6">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"z7">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"z8">>) -> {<<"application">>, <<"x-zmachine">>, []}; +all_ext(<<"zaz">>) -> {<<"application">>, <<"vnd.zzazz.deck+xml">>, []}; +all_ext(<<"zip">>) -> {<<"application">>, <<"zip">>, []}; +all_ext(<<"zir">>) -> {<<"application">>, <<"vnd.zul">>, []}; +all_ext(<<"zirz">>) -> {<<"application">>, <<"vnd.zul">>, []}; +all_ext(<<"zmm">>) -> {<<"application">>, <<"vnd.handheld-entertainment+xml">>, []}; +%% GENERATED +all_ext(_) -> {<<"application">>, <<"octet-stream">>, []}. + +web_ext(<<"css">>) -> {<<"text">>, <<"css">>, []}; +web_ext(<<"gif">>) -> {<<"image">>, <<"gif">>, []}; +web_ext(<<"html">>) -> {<<"text">>, <<"html">>, []}; +web_ext(<<"htm">>) -> {<<"text">>, <<"html">>, []}; +web_ext(<<"ico">>) -> {<<"image">>, <<"x-icon">>, []}; +web_ext(<<"jpeg">>) -> {<<"image">>, <<"jpeg">>, []}; +web_ext(<<"jpg">>) -> {<<"image">>, <<"jpeg">>, []}; +web_ext(<<"js">>) -> {<<"application">>, <<"javascript">>, []}; +web_ext(<<"mp3">>) -> {<<"audio">>, <<"mpeg">>, []}; +web_ext(<<"mp4">>) -> {<<"video">>, <<"mp4">>, []}; +web_ext(<<"ogg">>) -> {<<"audio">>, <<"ogg">>, []}; +web_ext(<<"ogv">>) -> {<<"video">>, <<"ogg">>, []}; +web_ext(<<"png">>) -> {<<"image">>, <<"png">>, []}; +web_ext(<<"svg">>) -> {<<"image">>, <<"svg+xml">>, []}; +web_ext(<<"wav">>) -> {<<"audio">>, <<"x-wav">>, []}; +web_ext(<<"webm">>) -> {<<"video">>, <<"webm">>, []}; +web_ext(_) -> {<<"application">>, <<"octet-stream">>, []}. diff --git a/rabbitmq-server/deps/cowlib/src/cow_mimetypes.erl.src b/rabbitmq-server/deps/cowlib/src/cow_mimetypes.erl.src new file mode 100644 index 0000000..cf79b5b --- /dev/null +++ b/rabbitmq-server/deps/cowlib/src/cow_mimetypes.erl.src @@ -0,0 +1,59 @@ +%% Copyright (c) 2013-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_mimetypes). + +-export([all/1]). +-export([web/1]). + +%% @doc Return the mimetype for any file by looking at its extension. + +-spec all(file:filename_all()) -> {binary(), binary(), []}. +all(Path) -> + case filename:extension(Path) of + <<>> -> {<<"application">>, <<"octet-stream">>, []}; + << $., Ext/binary >> -> all_ext(Ext) + end. + +%% @doc Return the mimetype for a Web related file by looking at its extension. + +-spec web(file:filename_all()) -> {binary(), binary(), []}. +web(Path) -> + case filename:extension(Path) of + <<>> -> {<<"application">>, <<"octet-stream">>, []}; + << $., Ext/binary >> -> web_ext(Ext) + end. + +%% Internal. + +%% GENERATED +all_ext(_) -> {<<"application">>, <<"octet-stream">>, []}. + +web_ext(<<"css">>) -> {<<"text">>, <<"css">>, []}; +web_ext(<<"gif">>) -> {<<"image">>, <<"gif">>, []}; +web_ext(<<"html">>) -> {<<"text">>, <<"html">>, []}; +web_ext(<<"htm">>) -> {<<"text">>, <<"html">>, []}; +web_ext(<<"ico">>) -> {<<"image">>, <<"x-icon">>, []}; +web_ext(<<"jpeg">>) -> {<<"image">>, <<"jpeg">>, []}; +web_ext(<<"jpg">>) -> {<<"image">>, <<"jpeg">>, []}; +web_ext(<<"js">>) -> {<<"application">>, <<"javascript">>, []}; +web_ext(<<"mp3">>) -> {<<"audio">>, <<"mpeg">>, []}; +web_ext(<<"mp4">>) -> {<<"video">>, <<"mp4">>, []}; +web_ext(<<"ogg">>) -> {<<"audio">>, <<"ogg">>, []}; +web_ext(<<"ogv">>) -> {<<"video">>, <<"ogg">>, []}; +web_ext(<<"png">>) -> {<<"image">>, <<"png">>, []}; +web_ext(<<"svg">>) -> {<<"image">>, <<"svg+xml">>, []}; +web_ext(<<"wav">>) -> {<<"audio">>, <<"x-wav">>, []}; +web_ext(<<"webm">>) -> {<<"video">>, <<"webm">>, []}; +web_ext(_) -> {<<"application">>, <<"octet-stream">>, []}. diff --git a/rabbitmq-server/deps/cowlib/src/cow_multipart.erl b/rabbitmq-server/deps/cowlib/src/cow_multipart.erl new file mode 100644 index 0000000..d2b45a4 --- /dev/null +++ b/rabbitmq-server/deps/cowlib/src/cow_multipart.erl @@ -0,0 +1,752 @@ +%% Copyright (c) 2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_multipart). + +%% Parsing. +-export([parse_headers/2]). +-export([parse_body/2]). + +%% Building. +-export([boundary/0]). +-export([first_part/2]). +-export([part/2]). +-export([close/1]). + +%% Headers. +-export([form_data/1]). +-export([parse_content_disposition/1]). +-export([parse_content_transfer_encoding/1]). +-export([parse_content_type/1]). + +-type headers() :: [{iodata(), iodata()}]. +-export_type([headers/0]). + +-include("cow_inline.hrl"). + +-define(TEST1_MIME, << + "This is a message with multiple parts in MIME format.\r\n" + "--frontier\r\n" + "Content-Type: text/plain\r\n" + "\r\n" + "This is the body of the message.\r\n" + "--frontier\r\n" + "Content-Type: application/octet-stream\r\n" + "Content-Transfer-Encoding: base64\r\n" + "\r\n" + "PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n" + "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==\r\n" + "--frontier--" +>>). +-define(TEST1_BOUNDARY, <<"frontier">>). + +-define(TEST2_MIME, << + "--AaB03x\r\n" + "Content-Disposition: form-data; name=\"submit-name\"\r\n" + "\r\n" + "Larry\r\n" + "--AaB03x\r\n" + "Content-Disposition: form-data; name=\"files\"\r\n" + "Content-Type: multipart/mixed; boundary=BbC04y\r\n" + "\r\n" + "--BbC04y\r\n" + "Content-Disposition: file; filename=\"file1.txt\"\r\n" + "Content-Type: text/plain\r\n" + "\r\n" + "... contents of file1.txt ...\r\n" + "--BbC04y\r\n" + "Content-Disposition: file; filename=\"file2.gif\"\r\n" + "Content-Type: image/gif\r\n" + "Content-Transfer-Encoding: binary\r\n" + "\r\n" + "...contents of file2.gif...\r\n" + "--BbC04y--\r\n" + "--AaB03x--" +>>). +-define(TEST2_BOUNDARY, <<"AaB03x">>). + +-define(TEST3_MIME, << + "This is the preamble.\r\n" + "--boundary\r\n" + "Content-Type: text/plain\r\n" + "\r\n" + "This is the body of the message.\r\n" + "--boundary--" + "\r\nThis is the epilogue. Here it includes leading CRLF" +>>). +-define(TEST3_BOUNDARY, <<"boundary">>). + +-define(TEST4_MIME, << + "This is the preamble.\r\n" + "--boundary\r\n" + "Content-Type: text/plain\r\n" + "\r\n" + "This is the body of the message.\r\n" + "--boundary--" + "\r\n" +>>). +-define(TEST4_BOUNDARY, <<"boundary">>). + +%% Parsing. +%% +%% The multipart format is defined in RFC 2045. + +%% @doc Parse the headers for the next multipart part. +%% +%% This function skips any preamble before the boundary. +%% The preamble may be retrieved using parse_body/2. +%% +%% This function will accept input of any size, it is +%% up to the caller to limit it if needed. + +-spec parse_headers(binary(), binary()) + -> more | {more, binary()} + | {ok, headers(), binary()} + | {done, binary()}. +%% If the stream starts with the boundary we can make a few assumptions +%% and quickly figure out if we got the complete list of headers. +parse_headers(<< "--", Stream/bits >>, Boundary) -> + BoundarySize = byte_size(Boundary), + case Stream of + %% Last boundary. Return the epilogue. + << Boundary:BoundarySize/binary, "--", Stream2/bits >> -> + {done, Stream2}; + << Boundary:BoundarySize/binary, Stream2/bits >> -> + %% We have all the headers only if there is a \r\n\r\n + %% somewhere in the data after the boundary. + case binary:match(Stream2, <<"\r\n\r\n">>) of + nomatch -> + more; + _ -> + before_parse_headers(Stream2) + end; + %% If there isn't enough to represent Boundary \r\n\r\n + %% then we definitely don't have all the headers. + _ when byte_size(Stream) < byte_size(Boundary) + 4 -> + more; + %% Otherwise we have preamble data to skip. + %% We still got rid of the first two misleading bytes. + _ -> + skip_preamble(Stream, Boundary) + end; +%% Otherwise we have preamble data to skip. +parse_headers(Stream, Boundary) -> + skip_preamble(Stream, Boundary). + +%% We need to find the boundary and a \r\n\r\n after that. +%% Since the boundary isn't at the start, it must be right +%% after a \r\n too. +skip_preamble(Stream, Boundary) -> + case binary:match(Stream, <<"\r\n--", Boundary/bits >>) of + %% No boundary, need more data. + nomatch -> + %% We can safely skip the size of the stream + %% minus the last 3 bytes which may be a partial boundary. + SkipSize = byte_size(Stream) - 3, + case SkipSize > 0 of + false -> + more; + true -> + << _:SkipSize/binary, Stream2/bits >> = Stream, + {more, Stream2} + end; + {Start, Length} -> + Start2 = Start + Length, + << _:Start2/binary, Stream2/bits >> = Stream, + case Stream2 of + %% Last boundary. Return the epilogue. + << "--", Stream3/bits >> -> + {done, Stream3}; + _ -> + case binary:match(Stream, <<"\r\n\r\n">>) of + %% We don't have the full headers. + nomatch -> + {more, Stream2}; + _ -> + before_parse_headers(Stream2) + end + end + end. + +%% There is a line break right after the boundary, skip it. +%% +%% We only skip it now because there might be no headers at all, +%% which means the \r\n\r\n indicating the end of headers also +%% includes this line break. +before_parse_headers(<< "\r\n", Stream/bits >>) -> + parse_hd_name(Stream, [], <<>>). + +parse_hd_name(<< C, Rest/bits >>, H, SoFar) -> + case C of + $: -> parse_hd_before_value(Rest, H, SoFar); + $\s -> parse_hd_name_ws(Rest, H, SoFar); + $\t -> parse_hd_name_ws(Rest, H, SoFar); + ?INLINE_LOWERCASE(parse_hd_name, Rest, H, SoFar) + end. + +parse_hd_name_ws(<< C, Rest/bits >>, H, Name) -> + case C of + $\s -> parse_hd_name_ws(Rest, H, Name); + $\t -> parse_hd_name_ws(Rest, H, Name); + $: -> parse_hd_before_value(Rest, H, Name) + end. + +parse_hd_before_value(<< $\s, Rest/bits >>, H, N) -> + parse_hd_before_value(Rest, H, N); +parse_hd_before_value(<< $\t, Rest/bits >>, H, N) -> + parse_hd_before_value(Rest, H, N); +parse_hd_before_value(Buffer, H, N) -> + parse_hd_value(Buffer, H, N, <<>>). + +parse_hd_value(<< $\r, Rest/bits >>, Headers, Name, SoFar) -> + case Rest of + << "\n\r\n", Rest2/bits >> -> + {ok, [{Name, SoFar}|Headers], Rest2}; + << $\n, C, Rest2/bits >> when C =:= $\s; C =:= $\t -> + parse_hd_value(Rest2, Headers, Name, SoFar); + << $\n, Rest2/bits >> -> + parse_hd_name(Rest2, [{Name, SoFar}|Headers], <<>>) + end; +parse_hd_value(<< C, Rest/bits >>, H, N, SoFar) -> + parse_hd_value(Rest, H, N, << SoFar/binary, C >>). + +%% @doc Parse the body of the current multipart part. +%% +%% The body is everything until the next boundary. + +-spec parse_body(binary(), binary()) + -> {ok, binary()} | {ok, binary(), binary()} + | done | {done, binary()} | {done, binary(), binary()}. +parse_body(Stream, Boundary) -> + BoundarySize = byte_size(Boundary), + case Stream of + << "--", Boundary:BoundarySize/binary, _/bits >> -> + done; + _ -> + case binary:match(Stream, << "\r\n--", Boundary/bits >>) of + %% No boundary, check for a possible partial at the end. + %% Return more or less of the body depending on the result. + nomatch -> + StreamSize = byte_size(Stream), + From = StreamSize - BoundarySize - 3, + MatchOpts = if + %% Binary too small to contain boundary, check it fully. + From < 0 -> []; + %% Optimize, only check the end of the binary. + true -> [{scope, {From, StreamSize - From}}] + end, + case binary:match(Stream, <<"\r">>, MatchOpts) of + nomatch -> + {ok, Stream}; + {Pos, _} -> + case Stream of + << Body:Pos/binary >> -> + {ok, Body}; + << Body:Pos/binary, Rest/bits >> -> + {ok, Body, Rest} + end + end; + %% Boundary found, this is the last chunk of the body. + {Pos, _} -> + case Stream of + << Body:Pos/binary, "\r\n" >> -> + {done, Body}; + << Body:Pos/binary, "\r\n", Rest/bits >> -> + {done, Body, Rest}; + << Body:Pos/binary, Rest/bits >> -> + {done, Body, Rest} + end + end + end. + +-ifdef(TEST). +parse_test() -> + H1 = [{<<"content-type">>, <<"text/plain">>}], + Body1 = <<"This is the body of the message.">>, + H2 = lists:sort([{<<"content-type">>, <<"application/octet-stream">>}, + {<<"content-transfer-encoding">>, <<"base64">>}]), + Body2 = <<"PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n" + "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==">>, + {ok, H1, Rest} = parse_headers(?TEST1_MIME, ?TEST1_BOUNDARY), + {done, Body1, Rest2} = parse_body(Rest, ?TEST1_BOUNDARY), + done = parse_body(Rest2, ?TEST1_BOUNDARY), + {ok, H2Unsorted, Rest3} = parse_headers(Rest2, ?TEST1_BOUNDARY), + H2 = lists:sort(H2Unsorted), + {done, Body2, Rest4} = parse_body(Rest3, ?TEST1_BOUNDARY), + done = parse_body(Rest4, ?TEST1_BOUNDARY), + {done, <<>>} = parse_headers(Rest4, ?TEST1_BOUNDARY), + ok. + +parse_interleaved_test() -> + H1 = [{<<"content-disposition">>, <<"form-data; name=\"submit-name\"">>}], + Body1 = <<"Larry">>, + H2 = lists:sort([{<<"content-disposition">>, <<"form-data; name=\"files\"">>}, + {<<"content-type">>, <<"multipart/mixed; boundary=BbC04y">>}]), + InH1 = lists:sort([{<<"content-disposition">>, <<"file; filename=\"file1.txt\"">>}, + {<<"content-type">>, <<"text/plain">>}]), + InBody1 = <<"... contents of file1.txt ...">>, + InH2 = lists:sort([{<<"content-disposition">>, <<"file; filename=\"file2.gif\"">>}, + {<<"content-type">>, <<"image/gif">>}, + {<<"content-transfer-encoding">>, <<"binary">>}]), + InBody2 = <<"...contents of file2.gif...">>, + {ok, H1, Rest} = parse_headers(?TEST2_MIME, ?TEST2_BOUNDARY), + {done, Body1, Rest2} = parse_body(Rest, ?TEST2_BOUNDARY), + done = parse_body(Rest2, ?TEST2_BOUNDARY), + {ok, H2Unsorted, Rest3} = parse_headers(Rest2, ?TEST2_BOUNDARY), + H2 = lists:sort(H2Unsorted), + {_, ContentType} = lists:keyfind(<<"content-type">>, 1, H2), + {<<"multipart">>, <<"mixed">>, [{<<"boundary">>, InBoundary}]} + = parse_content_type(ContentType), + {ok, InH1Unsorted, InRest} = parse_headers(Rest3, InBoundary), + InH1 = lists:sort(InH1Unsorted), + {done, InBody1, InRest2} = parse_body(InRest, InBoundary), + done = parse_body(InRest2, InBoundary), + {ok, InH2Unsorted, InRest3} = parse_headers(InRest2, InBoundary), + InH2 = lists:sort(InH2Unsorted), + {done, InBody2, InRest4} = parse_body(InRest3, InBoundary), + done = parse_body(InRest4, InBoundary), + {done, Rest4} = parse_headers(InRest4, InBoundary), + {done, <<>>} = parse_headers(Rest4, ?TEST2_BOUNDARY), + ok. + +parse_epilogue_test() -> + H1 = [{<<"content-type">>, <<"text/plain">>}], + Body1 = <<"This is the body of the message.">>, + Epilogue = <<"\r\nThis is the epilogue. Here it includes leading CRLF">>, + {ok, H1, Rest} = parse_headers(?TEST3_MIME, ?TEST3_BOUNDARY), + {done, Body1, Rest2} = parse_body(Rest, ?TEST3_BOUNDARY), + done = parse_body(Rest2, ?TEST3_BOUNDARY), + {done, Epilogue} = parse_headers(Rest2, ?TEST3_BOUNDARY), + ok. + +parse_epilogue_crlf_test() -> + H1 = [{<<"content-type">>, <<"text/plain">>}], + Body1 = <<"This is the body of the message.">>, + Epilogue = <<"\r\n">>, + {ok, H1, Rest} = parse_headers(?TEST4_MIME, ?TEST4_BOUNDARY), + {done, Body1, Rest2} = parse_body(Rest, ?TEST4_BOUNDARY), + done = parse_body(Rest2, ?TEST4_BOUNDARY), + {done, Epilogue} = parse_headers(Rest2, ?TEST4_BOUNDARY), + ok. + +parse_partial_test() -> + {ok, <<0:8000, "abcdef">>, <<"\rghij">>} + = parse_body(<<0:8000, "abcdef\rghij">>, <<"boundary">>), + {ok, <<"abcdef">>, <<"\rghij">>} + = parse_body(<<"abcdef\rghij">>, <<"boundary">>), + {ok, <<"abc">>, <<"\rdef">>} + = parse_body(<<"abc\rdef">>, <<"boundaryboundary">>), + {ok, <<0:8000, "abcdef">>, <<"\r\nghij">>} + = parse_body(<<0:8000, "abcdef\r\nghij">>, <<"boundary">>), + {ok, <<"abcdef">>, <<"\r\nghij">>} + = parse_body(<<"abcdef\r\nghij">>, <<"boundary">>), + {ok, <<"abc">>, <<"\r\ndef">>} + = parse_body(<<"abc\r\ndef">>, <<"boundaryboundary">>), + {ok, <<"boundary">>, <<"\r">>} + = parse_body(<<"boundary\r">>, <<"boundary">>), + {ok, <<"boundary">>, <<"\r\n">>} + = parse_body(<<"boundary\r\n">>, <<"boundary">>), + {ok, <<"boundary">>, <<"\r\n-">>} + = parse_body(<<"boundary\r\n-">>, <<"boundary">>), + {ok, <<"boundary">>, <<"\r\n--">>} + = parse_body(<<"boundary\r\n--">>, <<"boundary">>), + ok. +-endif. + +-ifdef(PERF). +perf_parse_multipart(Stream, Boundary) -> + case parse_headers(Stream, Boundary) of + {ok, _, Rest} -> + {_, _, Rest2} = parse_body(Rest, Boundary), + perf_parse_multipart(Rest2, Boundary); + {done, _} -> + ok + end. + +horse_parse() -> + horse:repeat(50000, + perf_parse_multipart(?TEST1_MIME, ?TEST1_BOUNDARY) + ). +-endif. + +%% Building. + +%% @doc Generate a new random boundary. +%% +%% The boundary generated has a low probability of ever appearing +%% in the data. + +-spec boundary() -> binary(). +boundary() -> + base64:encode(crypto:rand_bytes(48)). + +%% @doc Return the first part's head. +%% +%% This works exactly like the part/2 function except there is +%% no leading \r\n. It's not required to use this function, +%% just makes the output a little smaller and prettier. + +-spec first_part(binary(), headers()) -> iodata(). +first_part(Boundary, Headers) -> + [<<"--">>, Boundary, <<"\r\n">>, headers_to_iolist(Headers, [])]. + +%% @doc Return a part's head. + +-spec part(binary(), headers()) -> iodata(). +part(Boundary, Headers) -> + [<<"\r\n--">>, Boundary, <<"\r\n">>, headers_to_iolist(Headers, [])]. + +headers_to_iolist([], Acc) -> + lists:reverse([<<"\r\n">>|Acc]); +headers_to_iolist([{N, V}|Tail], Acc) -> + %% We don't want to create a sublist so we list the + %% values in reverse order so that it gets reversed properly. + headers_to_iolist(Tail, [<<"\r\n">>, V, <<": ">>, N|Acc]). + +%% @doc Return the closing delimiter of the multipart message. + +-spec close(binary()) -> iodata(). +close(Boundary) -> + [<<"\r\n--">>, Boundary, <<"--">>]. + +-ifdef(TEST). +build_test() -> + Result = string:to_lower(binary_to_list(?TEST1_MIME)), + Result = string:to_lower(binary_to_list(iolist_to_binary([ + <<"This is a message with multiple parts in MIME format.\r\n">>, + first_part(?TEST1_BOUNDARY, [{<<"content-type">>, <<"text/plain">>}]), + <<"This is the body of the message.">>, + part(?TEST1_BOUNDARY, [ + {<<"content-type">>, <<"application/octet-stream">>}, + {<<"content-transfer-encoding">>, <<"base64">>}]), + <<"PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n" + "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==">>, + close(?TEST1_BOUNDARY) + ]))), + ok. + +identity_test() -> + B = boundary(), + Preamble = <<"This is a message with multiple parts in MIME format.">>, + H1 = [{<<"content-type">>, <<"text/plain">>}], + Body1 = <<"This is the body of the message.">>, + H2 = lists:sort([{<<"content-type">>, <<"application/octet-stream">>}, + {<<"content-transfer-encoding">>, <<"base64">>}]), + Body2 = <<"PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n" + "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==">>, + Epilogue = <<"Gotta go fast!">>, + M = iolist_to_binary([ + Preamble, + part(B, H1), Body1, + part(B, H2), Body2, + close(B), + Epilogue + ]), + {done, Preamble, M2} = parse_body(M, B), + {ok, H1, M3} = parse_headers(M2, B), + {done, Body1, M4} = parse_body(M3, B), + {ok, H2Unsorted, M5} = parse_headers(M4, B), + H2 = lists:sort(H2Unsorted), + {done, Body2, M6} = parse_body(M5, B), + {done, Epilogue} = parse_headers(M6, B), + ok. +-endif. + +-ifdef(PERF). +perf_build_multipart() -> + B = boundary(), + [ + <<"preamble\r\n">>, + first_part(B, [{<<"content-type">>, <<"text/plain">>}]), + <<"This is the body of the message.">>, + part(B, [ + {<<"content-type">>, <<"application/octet-stream">>}, + {<<"content-transfer-encoding">>, <<"base64">>}]), + <<"PGh0bWw+CiAgPGhlYWQ+CiAgPC9oZWFkPgogIDxib2R5PgogICAgPHA+VGhpcyBpcyB0aGUg\r\n" + "Ym9keSBvZiB0aGUgbWVzc2FnZS48L3A+CiAgPC9ib2R5Pgo8L2h0bWw+Cg==">>, + close(B), + <<"epilogue">> + ]. + +horse_build() -> + horse:repeat(50000, + perf_build_multipart() + ). +-endif. + +%% Headers. + +%% @doc Convenience function for extracting information from headers +%% when parsing a multipart/form-data stream. + +-spec form_data(headers()) + -> {data, binary()} + | {file, binary(), binary(), binary(), binary()}. +form_data(Headers) -> + {_, DispositionBin} = lists:keyfind(<<"content-disposition">>, 1, Headers), + {<<"form-data">>, Params} = parse_content_disposition(DispositionBin), + {_, FieldName} = lists:keyfind(<<"name">>, 1, Params), + case lists:keyfind(<<"filename">>, 1, Params) of + false -> + {data, FieldName}; + {_, Filename} -> + Type = case lists:keyfind(<<"content-type">>, 1, Headers) of + false -> <<"text/plain">>; + {_, T} -> T + end, + TransferEncoding = case lists:keyfind( + <<"content-transfer-encoding">>, 1, Headers) of + false -> <<"7bit">>; + {_, TE} -> TE + end, + {file, FieldName, Filename, Type, TransferEncoding} + end. + +-ifdef(TEST). +form_data_test_() -> + Tests = [ + {[{<<"content-disposition">>, <<"form-data; name=\"submit-name\"">>}], + {data, <<"submit-name">>}}, + {[{<<"content-disposition">>, + <<"form-data; name=\"files\"; filename=\"file1.txt\"">>}, + {<<"content-type">>, <<"text/x-plain">>}], + {file, <<"files">>, <<"file1.txt">>, + <<"text/x-plain">>, <<"7bit">>}} + ], + [{lists:flatten(io_lib:format("~p", [V])), + fun() -> R = form_data(V) end} || {V, R} <- Tests]. +-endif. + +%% @todo parse_content_description +%% @todo parse_content_id + +%% @doc Parse an RFC 2183 content-disposition value. +%% @todo Support RFC 2231. + +-spec parse_content_disposition(binary()) + -> {binary(), [{binary(), binary()}]}. +parse_content_disposition(Bin) -> + parse_cd_type(Bin, <<>>). + +parse_cd_type(<<>>, Acc) -> + {Acc, []}; +parse_cd_type(<< C, Rest/bits >>, Acc) -> + case C of + $; -> {Acc, parse_before_param(Rest, [])}; + $\s -> {Acc, parse_before_param(Rest, [])}; + $\t -> {Acc, parse_before_param(Rest, [])}; + ?INLINE_LOWERCASE(parse_cd_type, Rest, Acc) + end. + +-ifdef(TEST). +parse_content_disposition_test_() -> + Tests = [ + {<<"inline">>, {<<"inline">>, []}}, + {<<"attachment">>, {<<"attachment">>, []}}, + {<<"attachment; filename=genome.jpeg;" + " modification-date=\"Wed, 12 Feb 1997 16:29:51 -0500\";">>, + {<<"attachment">>, [ + {<<"filename">>, <<"genome.jpeg">>}, + {<<"modification-date">>, <<"Wed, 12 Feb 1997 16:29:51 -0500">>} + ]}}, + {<<"form-data; name=\"user\"">>, + {<<"form-data">>, [{<<"name">>, <<"user">>}]}}, + {<<"form-data; NAME=\"submit-name\"">>, + {<<"form-data">>, [{<<"name">>, <<"submit-name">>}]}}, + {<<"form-data; name=\"files\"; filename=\"file1.txt\"">>, + {<<"form-data">>, [ + {<<"name">>, <<"files">>}, + {<<"filename">>, <<"file1.txt">>} + ]}}, + {<<"file; filename=\"file1.txt\"">>, + {<<"file">>, [{<<"filename">>, <<"file1.txt">>}]}}, + {<<"file; filename=\"file2.gif\"">>, + {<<"file">>, [{<<"filename">>, <<"file2.gif">>}]}} + ], + [{V, fun() -> R = parse_content_disposition(V) end} || {V, R} <- Tests]. +-endif. + +-ifdef(PERF). +horse_parse_content_disposition_attachment() -> + horse:repeat(100000, + parse_content_disposition(<<"attachment; filename=genome.jpeg;" + " modification-date=\"Wed, 12 Feb 1997 16:29:51 -0500\";">>) + ). + +horse_parse_content_disposition_form_data() -> + horse:repeat(100000, + parse_content_disposition( + <<"form-data; name=\"files\"; filename=\"file1.txt\"">>) + ). + +horse_parse_content_disposition_inline() -> + horse:repeat(100000, + parse_content_disposition(<<"inline">>) + ). +-endif. + +%% @doc Parse an RFC 2045 content-transfer-encoding header. + +-spec parse_content_transfer_encoding(binary()) -> binary(). +parse_content_transfer_encoding(Bin) -> + ?INLINE_LOWERCASE_BC(Bin). + +-ifdef(TEST). +parse_content_transfer_encoding_test_() -> + Tests = [ + {<<"7bit">>, <<"7bit">>}, + {<<"7BIT">>, <<"7bit">>}, + {<<"8bit">>, <<"8bit">>}, + {<<"binary">>, <<"binary">>}, + {<<"quoted-printable">>, <<"quoted-printable">>}, + {<<"base64">>, <<"base64">>}, + {<<"Base64">>, <<"base64">>}, + {<<"BASE64">>, <<"base64">>}, + {<<"bAsE64">>, <<"base64">>} + ], + [{V, fun() -> R = parse_content_transfer_encoding(V) end} + || {V, R} <- Tests]. +-endif. + +-ifdef(PERF). +horse_parse_content_transfer_encoding() -> + horse:repeat(100000, + parse_content_transfer_encoding(<<"QUOTED-PRINTABLE">>) + ). +-endif. + +%% @doc Parse an RFC 2045 content-type header. + +-spec parse_content_type(binary()) + -> {binary(), binary(), [{binary(), binary()}]}. +parse_content_type(Bin) -> + parse_ct_type(Bin, <<>>). + +parse_ct_type(<< C, Rest/bits >>, Acc) -> + case C of + $/ -> parse_ct_subtype(Rest, Acc, <<>>); + ?INLINE_LOWERCASE(parse_ct_type, Rest, Acc) + end. + +parse_ct_subtype(<<>>, Type, Subtype) when Subtype =/= <<>> -> + {Type, Subtype, []}; +parse_ct_subtype(<< C, Rest/bits >>, Type, Acc) -> + case C of + $; -> {Type, Acc, parse_before_param(Rest, [])}; + $\s -> {Type, Acc, parse_before_param(Rest, [])}; + $\t -> {Type, Acc, parse_before_param(Rest, [])}; + ?INLINE_LOWERCASE(parse_ct_subtype, Rest, Type, Acc) + end. + +-ifdef(TEST). +parse_content_type_test_() -> + Tests = [ + {<<"image/gif">>, + {<<"image">>, <<"gif">>, []}}, + {<<"text/plain">>, + {<<"text">>, <<"plain">>, []}}, + {<<"text/plain; charset=us-ascii">>, + {<<"text">>, <<"plain">>, [{<<"charset">>, <<"us-ascii">>}]}}, + {<<"text/plain; charset=\"us-ascii\"">>, + {<<"text">>, <<"plain">>, [{<<"charset">>, <<"us-ascii">>}]}}, + {<<"multipart/form-data; boundary=AaB03x">>, + {<<"multipart">>, <<"form-data">>, + [{<<"boundary">>, <<"AaB03x">>}]}}, + {<<"multipart/mixed; boundary=BbC04y">>, + {<<"multipart">>, <<"mixed">>, [{<<"boundary">>, <<"BbC04y">>}]}}, + {<<"multipart/mixed; boundary=--------">>, + {<<"multipart">>, <<"mixed">>, [{<<"boundary">>, <<"--------">>}]}}, + {<<"application/x-horse; filename=genome.jpeg;" + " some-date=\"Wed, 12 Feb 1997 16:29:51 -0500\";" + " charset=us-ascii; empty=; number=12345">>, + {<<"application">>, <<"x-horse">>, [ + {<<"filename">>, <<"genome.jpeg">>}, + {<<"some-date">>, <<"Wed, 12 Feb 1997 16:29:51 -0500">>}, + {<<"charset">>, <<"us-ascii">>}, + {<<"empty">>, <<>>}, + {<<"number">>, <<"12345">>} + ]}} + ], + [{V, fun() -> R = parse_content_type(V) end} + || {V, R} <- Tests]. +-endif. + +-ifdef(PERF). +horse_parse_content_type_zero() -> + horse:repeat(100000, + parse_content_type(<<"text/plain">>) + ). + +horse_parse_content_type_one() -> + horse:repeat(100000, + parse_content_type(<<"text/plain; charset=\"us-ascii\"">>) + ). + +horse_parse_content_type_five() -> + horse:repeat(100000, + parse_content_type(<<"application/x-horse; filename=genome.jpeg;" + " some-date=\"Wed, 12 Feb 1997 16:29:51 -0500\";" + " charset=us-ascii; empty=; number=12345">>) + ). +-endif. + +%% @doc Parse RFC 2045 parameters. + +parse_before_param(<<>>, Params) -> + lists:reverse(Params); +parse_before_param(<< C, Rest/bits >>, Params) -> + case C of + $; -> parse_before_param(Rest, Params); + $\s -> parse_before_param(Rest, Params); + $\t -> parse_before_param(Rest, Params); + ?INLINE_LOWERCASE(parse_param_name, Rest, Params, <<>>) + end. + +parse_param_name(<<>>, Params, Acc) -> + lists:reverse([{Acc, <<>>}|Params]); +parse_param_name(<< C, Rest/bits >>, Params, Acc) -> + case C of + $= -> parse_param_value(Rest, Params, Acc); + ?INLINE_LOWERCASE(parse_param_name, Rest, Params, Acc) + end. + +parse_param_value(<<>>, Params, Name) -> + lists:reverse([{Name, <<>>}|Params]); +parse_param_value(<< C, Rest/bits >>, Params, Name) -> + case C of + $" -> parse_param_quoted_value(Rest, Params, Name, <<>>); + $; -> parse_before_param(Rest, [{Name, <<>>}|Params]); + $\s -> parse_before_param(Rest, [{Name, <<>>}|Params]); + $\t -> parse_before_param(Rest, [{Name, <<>>}|Params]); + C -> parse_param_value(Rest, Params, Name, << C >>) + end. + +parse_param_value(<<>>, Params, Name, Acc) -> + lists:reverse([{Name, Acc}|Params]); +parse_param_value(<< C, Rest/bits >>, Params, Name, Acc) -> + case C of + $; -> parse_before_param(Rest, [{Name, Acc}|Params]); + $\s -> parse_before_param(Rest, [{Name, Acc}|Params]); + $\t -> parse_before_param(Rest, [{Name, Acc}|Params]); + C -> parse_param_value(Rest, Params, Name, << Acc/binary, C >>) + end. + +%% We expect a final $" so no need to test for <<>>. +parse_param_quoted_value(<< $\\, C, Rest/bits >>, Params, Name, Acc) -> + parse_param_quoted_value(Rest, Params, Name, << Acc/binary, C >>); +parse_param_quoted_value(<< $", Rest/bits >>, Params, Name, Acc) -> + parse_before_param(Rest, [{Name, Acc}|Params]); +parse_param_quoted_value(<< C, Rest/bits >>, Params, Name, Acc) + when C =/= $\r -> + parse_param_quoted_value(Rest, Params, Name, << Acc/binary, C >>). diff --git a/rabbitmq-server/deps/cowlib/src/cow_qs.erl b/rabbitmq-server/deps/cowlib/src/cow_qs.erl new file mode 100644 index 0000000..413562b --- /dev/null +++ b/rabbitmq-server/deps/cowlib/src/cow_qs.erl @@ -0,0 +1,571 @@ +%% Copyright (c) 2013-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_qs). + +-export([parse_qs/1]). +-export([qs/1]). +-export([urldecode/1]). +-export([urlencode/1]). + +-type qs_vals() :: [{binary(), binary() | true}]. + +%% @doc Parse an application/x-www-form-urlencoded string. +%% +%% The percent decoding is inlined to greatly improve the performance +%% by avoiding copying binaries twice (once for extracting, once for +%% decoding) instead of just extracting the proper representation. + +-spec parse_qs(binary()) -> qs_vals(). +parse_qs(B) -> + parse_qs_name(B, [], <<>>). + +parse_qs_name(<< $%, H, L, Rest/bits >>, Acc, Name) -> + C = (unhex(H) bsl 4 bor unhex(L)), + parse_qs_name(Rest, Acc, << Name/bits, C >>); +parse_qs_name(<< $+, Rest/bits >>, Acc, Name) -> + parse_qs_name(Rest, Acc, << Name/bits, " " >>); +parse_qs_name(<< $=, Rest/bits >>, Acc, Name) when Name =/= <<>> -> + parse_qs_value(Rest, Acc, Name, <<>>); +parse_qs_name(<< $&, Rest/bits >>, Acc, Name) -> + case Name of + <<>> -> parse_qs_name(Rest, Acc, <<>>); + _ -> parse_qs_name(Rest, [{Name, true}|Acc], <<>>) + end; +parse_qs_name(<< C, Rest/bits >>, Acc, Name) when C =/= $%, C =/= $= -> + parse_qs_name(Rest, Acc, << Name/bits, C >>); +parse_qs_name(<<>>, Acc, Name) -> + case Name of + <<>> -> lists:reverse(Acc); + _ -> lists:reverse([{Name, true}|Acc]) + end. + +parse_qs_value(<< $%, H, L, Rest/bits >>, Acc, Name, Value) -> + C = (unhex(H) bsl 4 bor unhex(L)), + parse_qs_value(Rest, Acc, Name, << Value/bits, C >>); +parse_qs_value(<< $+, Rest/bits >>, Acc, Name, Value) -> + parse_qs_value(Rest, Acc, Name, << Value/bits, " " >>); +parse_qs_value(<< $&, Rest/bits >>, Acc, Name, Value) -> + parse_qs_name(Rest, [{Name, Value}|Acc], <<>>); +parse_qs_value(<< C, Rest/bits >>, Acc, Name, Value) when C =/= $% -> + parse_qs_value(Rest, Acc, Name, << Value/bits, C >>); +parse_qs_value(<<>>, Acc, Name, Value) -> + lists:reverse([{Name, Value}|Acc]). + +-ifdef(TEST). +parse_qs_test_() -> + Tests = [ + {<<>>, []}, + {<<"&">>, []}, + {<<"a">>, [{<<"a">>, true}]}, + {<<"a&">>, [{<<"a">>, true}]}, + {<<"&a">>, [{<<"a">>, true}]}, + {<<"a&b">>, [{<<"a">>, true}, {<<"b">>, true}]}, + {<<"a&&b">>, [{<<"a">>, true}, {<<"b">>, true}]}, + {<<"a&b&">>, [{<<"a">>, true}, {<<"b">>, true}]}, + {<<"=">>, error}, + {<<"=b">>, error}, + {<<"a=">>, [{<<"a">>, <<>>}]}, + {<<"a=b">>, [{<<"a">>, <<"b">>}]}, + {<<"a=&b=">>, [{<<"a">>, <<>>}, {<<"b">>, <<>>}]}, + {<<"a=b&c&d=e">>, [{<<"a">>, <<"b">>}, + {<<"c">>, true}, {<<"d">>, <<"e">>}]}, + {<<"a=b=c&d=e=f&g=h=i">>, [{<<"a">>, <<"b=c">>}, + {<<"d">>, <<"e=f">>}, {<<"g">>, <<"h=i">>}]}, + {<<"+">>, [{<<" ">>, true}]}, + {<<"+=+">>, [{<<" ">>, <<" ">>}]}, + {<<"a+b=c+d">>, [{<<"a b">>, <<"c d">>}]}, + {<<"+a+=+b+&+c+=+d+">>, [{<<" a ">>, <<" b ">>}, + {<<" c ">>, <<" d ">>}]}, + {<<"a%20b=c%20d">>, [{<<"a b">>, <<"c d">>}]}, + {<<"%25%26%3D=%25%26%3D&_-.=.-_">>, [{<<"%&=">>, <<"%&=">>}, + {<<"_-.">>, <<".-_">>}]}, + {<<"for=extend%2Franch">>, [{<<"for">>, <<"extend/ranch">>}]} + ], + [{Qs, fun() -> + E = try parse_qs(Qs) of + R -> R + catch _:_ -> + error + end + end} || {Qs, E} <- Tests]. + +parse_qs_identity_test_() -> + Tests = [ + <<"+">>, + <<"hl=en&q=erlang+cowboy">>, + <<"direction=desc&for=extend%2Franch&sort=updated&state=open">>, + <<"i=EWiIXmPj5gl6&v=QowBp0oDLQXdd4x_GwiywA&ip=98.20.31.81&" + "la=en&pg=New8.undertonebrandsafe.com%2F698a2525065ee2" + "60c0b2f2aaad89ab82&re=&sz=1&fc=1&fr=140&br=3&bv=11.0." + "696.16&os=3&ov=&rs=vpl&k=cookies%7Csale%7Cbrowser%7Cm" + "ore%7Cprivacy%7Cstatistics%7Cactivities%7Cauction%7Ce" + "mail%7Cfree%7Cin...&t=112373&xt=5%7C61%7C0&tz=-1&ev=x" + "&tk=&za=1&ortb-za=1&zu=&zl=&ax=U&ay=U&ortb-pid=536454" + ".55&ortb-sid=112373.8&seats=999&ortb-xt=IAB24&ortb-ugc=">>, + <<"i=9pQNskA&v=0ySQQd1F&ev=12345678&t=12345&sz=3&ip=67.58." + "236.89&la=en&pg=http%3A%2F%2Fwww.yahoo.com%2Fpage1.ht" + "m&re=http%3A%2F%2Fsearch.google.com&fc=1&fr=1&br=2&bv" + "=3.0.14&os=1&ov=XP&k=cars%2Cford&rs=js&xt=5%7C22%7C23" + "4&tz=%2B180&tk=key1%3Dvalue1%7Ckey2%3Dvalue2&zl=4%2C5" + "%2C6&za=4&zu=competitor.com&ua=Mozilla%2F5.0+%28Windo" + "ws%3B+U%3B+Windows+NT+6.1%3B+en-US%29+AppleWebKit%2F5" + "34.13+%28KHTML%2C+like+Gecko%29+Chrome%2F9.0.597.98+S" + "afari%2F534.13&ortb-za=1%2C6%2C13&ortb-pid=521732&ort" + "b-sid=521732&ortb-xt=IAB3&ortb-ugc=">> + ], + [{V, fun() -> V = qs(parse_qs(V)) end} || V <- Tests]. +-endif. + +-ifdef(PERF). +horse_parse_qs_shorter() -> + horse:repeat(20000, + parse_qs(<<"hl=en&q=erlang%20cowboy">>) + ). + +horse_parse_qs_short() -> + horse:repeat(20000, + parse_qs( + <<"direction=desc&for=extend%2Franch&sort=updated&state=open">>) + ). + +horse_parse_qs_long() -> + horse:repeat(20000, + parse_qs(<<"i=EWiIXmPj5gl6&v=QowBp0oDLQXdd4x_GwiywA&ip=98.20.31.81&" + "la=en&pg=New8.undertonebrandsafe.com%2F698a2525065ee260c0b2f2a" + "aad89ab82&re=&sz=1&fc=1&fr=140&br=3&bv=11.0.696.16&os=3&ov=&rs" + "=vpl&k=cookies%7Csale%7Cbrowser%7Cmore%7Cprivacy%7Cstatistics%" + "7Cactivities%7Cauction%7Cemail%7Cfree%7Cin...&t=112373&xt=5%7C" + "61%7C0&tz=-1&ev=x&tk=&za=1&ortb-za=1&zu=&zl=&ax=U&ay=U&ortb-pi" + "d=536454.55&ortb-sid=112373.8&seats=999&ortb-xt=IAB24&ortb-ugc" + "=">>) + ). + +horse_parse_qs_longer() -> + horse:repeat(20000, + parse_qs(<<"i=9pQNskA&v=0ySQQd1F&ev=12345678&t=12345&sz=3&ip=67.58." + "236.89&la=en&pg=http%3A%2F%2Fwww.yahoo.com%2Fpage1.htm&re=http" + "%3A%2F%2Fsearch.google.com&fc=1&fr=1&br=2&bv=3.0.14&os=1&ov=XP" + "&k=cars%2cford&rs=js&xt=5%7c22%7c234&tz=%2b180&tk=key1%3Dvalue" + "1%7Ckey2%3Dvalue2&zl=4,5,6&za=4&zu=competitor.com&ua=Mozilla%2" + "F5.0%20(Windows%3B%20U%3B%20Windows%20NT%206.1%3B%20en-US)%20A" + "ppleWebKit%2F534.13%20(KHTML%2C%20like%20Gecko)%20Chrome%2F9.0" + ".597.98%20Safari%2F534.13&ortb-za=1%2C6%2C13&ortb-pid=521732&o" + "rtb-sid=521732&ortb-xt=IAB3&ortb-ugc=">>) + ). +-endif. + +%% @doc Build an application/x-www-form-urlencoded string. + +-spec qs(qs_vals()) -> binary(). +qs([]) -> + <<>>; +qs(L) -> + qs(L, <<>>). + +qs([], Acc) -> + << $&, Qs/bits >> = Acc, + Qs; +qs([{Name, true}|Tail], Acc) -> + Acc2 = urlencode(Name, << Acc/bits, $& >>), + qs(Tail, Acc2); +qs([{Name, Value}|Tail], Acc) -> + Acc2 = urlencode(Name, << Acc/bits, $& >>), + Acc3 = urlencode(Value, << Acc2/bits, $= >>), + qs(Tail, Acc3). + +-define(QS_SHORTER, [ + {<<"hl">>, <<"en">>}, + {<<"q">>, <<"erlang cowboy">>} +]). + +-define(QS_SHORT, [ + {<<"direction">>, <<"desc">>}, + {<<"for">>, <<"extend/ranch">>}, + {<<"sort">>, <<"updated">>}, + {<<"state">>, <<"open">>} +]). + +-define(QS_LONG, [ + {<<"i">>, <<"EWiIXmPj5gl6">>}, + {<<"v">>, <<"QowBp0oDLQXdd4x_GwiywA">>}, + {<<"ip">>, <<"98.20.31.81">>}, + {<<"la">>, <<"en">>}, + {<<"pg">>, <<"New8.undertonebrandsafe.com/" + "698a2525065ee260c0b2f2aaad89ab82">>}, + {<<"re">>, <<>>}, + {<<"sz">>, <<"1">>}, + {<<"fc">>, <<"1">>}, + {<<"fr">>, <<"140">>}, + {<<"br">>, <<"3">>}, + {<<"bv">>, <<"11.0.696.16">>}, + {<<"os">>, <<"3">>}, + {<<"ov">>, <<>>}, + {<<"rs">>, <<"vpl">>}, + {<<"k">>, <<"cookies|sale|browser|more|privacy|statistics|" + "activities|auction|email|free|in...">>}, + {<<"t">>, <<"112373">>}, + {<<"xt">>, <<"5|61|0">>}, + {<<"tz">>, <<"-1">>}, + {<<"ev">>, <<"x">>}, + {<<"tk">>, <<>>}, + {<<"za">>, <<"1">>}, + {<<"ortb-za">>, <<"1">>}, + {<<"zu">>, <<>>}, + {<<"zl">>, <<>>}, + {<<"ax">>, <<"U">>}, + {<<"ay">>, <<"U">>}, + {<<"ortb-pid">>, <<"536454.55">>}, + {<<"ortb-sid">>, <<"112373.8">>}, + {<<"seats">>, <<"999">>}, + {<<"ortb-xt">>, <<"IAB24">>}, + {<<"ortb-ugc">>, <<>>} +]). + +-define(QS_LONGER, [ + {<<"i">>, <<"9pQNskA">>}, + {<<"v">>, <<"0ySQQd1F">>}, + {<<"ev">>, <<"12345678">>}, + {<<"t">>, <<"12345">>}, + {<<"sz">>, <<"3">>}, + {<<"ip">>, <<"67.58.236.89">>}, + {<<"la">>, <<"en">>}, + {<<"pg">>, <<"http://www.yahoo.com/page1.htm">>}, + {<<"re">>, <<"http://search.google.com">>}, + {<<"fc">>, <<"1">>}, + {<<"fr">>, <<"1">>}, + {<<"br">>, <<"2">>}, + {<<"bv">>, <<"3.0.14">>}, + {<<"os">>, <<"1">>}, + {<<"ov">>, <<"XP">>}, + {<<"k">>, <<"cars,ford">>}, + {<<"rs">>, <<"js">>}, + {<<"xt">>, <<"5|22|234">>}, + {<<"tz">>, <<"+180">>}, + {<<"tk">>, <<"key1=value1|key2=value2">>}, + {<<"zl">>, <<"4,5,6">>}, + {<<"za">>, <<"4">>}, + {<<"zu">>, <<"competitor.com">>}, + {<<"ua">>, <<"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) " + "AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.98 " + "Safari/534.13">>}, + {<<"ortb-za">>, <<"1,6,13">>}, + {<<"ortb-pid">>, <<"521732">>}, + {<<"ortb-sid">>, <<"521732">>}, + {<<"ortb-xt">>, <<"IAB3">>}, + {<<"ortb-ugc">>, <<>>} +]). + +-ifdef(TEST). +qs_test_() -> + Tests = [ + {[<<"a">>], error}, + {[{<<"a">>, <<"b">>, <<"c">>}], error}, + {[], <<>>}, + {[{<<"a">>, true}], <<"a">>}, + {[{<<"a">>, true}, {<<"b">>, true}], <<"a&b">>}, + {[{<<"a">>, <<>>}], <<"a=">>}, + {[{<<"a">>, <<"b">>}], <<"a=b">>}, + {[{<<"a">>, <<>>}, {<<"b">>, <<>>}], <<"a=&b=">>}, + {[{<<"a">>, <<"b">>}, {<<"c">>, true}, {<<"d">>, <<"e">>}], + <<"a=b&c&d=e">>}, + {[{<<"a">>, <<"b=c">>}, {<<"d">>, <<"e=f">>}, {<<"g">>, <<"h=i">>}], + <<"a=b%3Dc&d=e%3Df&g=h%3Di">>}, + {[{<<" ">>, true}], <<"+">>}, + {[{<<" ">>, <<" ">>}], <<"+=+">>}, + {[{<<"a b">>, <<"c d">>}], <<"a+b=c+d">>}, + {[{<<" a ">>, <<" b ">>}, {<<" c ">>, <<" d ">>}], + <<"+a+=+b+&+c+=+d+">>}, + {[{<<"%&=">>, <<"%&=">>}, {<<"_-.">>, <<".-_">>}], + <<"%25%26%3D=%25%26%3D&_-.=.-_">>}, + {[{<<"for">>, <<"extend/ranch">>}], <<"for=extend%2Franch">>} + ], + [{lists:flatten(io_lib:format("~p", [Vals])), fun() -> + E = try qs(Vals) of + R -> R + catch _:_ -> + error + end + end} || {Vals, E} <- Tests]. + +qs_identity_test_() -> + Tests = [ + [{<<"+">>, true}], + ?QS_SHORTER, + ?QS_SHORT, + ?QS_LONG, + ?QS_LONGER + ], + [{lists:flatten(io_lib:format("~p", [V])), fun() -> + V = parse_qs(qs(V)) + end} || V <- Tests]. +-endif. + +-ifdef(PERF). +horse_qs_shorter() -> + horse:repeat(20000, qs(?QS_SHORTER)). + +horse_qs_short() -> + horse:repeat(20000, qs(?QS_SHORT)). + +horse_qs_long() -> + horse:repeat(20000, qs(?QS_LONG)). + +horse_qs_longer() -> + horse:repeat(20000, qs(?QS_LONGER)). +-endif. + +%% @doc Decode a percent encoded string (x-www-form-urlencoded rules). + +-spec urldecode(B) -> B when B::binary(). +urldecode(B) -> + urldecode(B, <<>>). + +urldecode(<< $%, H, L, Rest/bits >>, Acc) -> + C = (unhex(H) bsl 4 bor unhex(L)), + urldecode(Rest, << Acc/bits, C >>); +urldecode(<< $+, Rest/bits >>, Acc) -> + urldecode(Rest, << Acc/bits, " " >>); +urldecode(<< C, Rest/bits >>, Acc) when C =/= $% -> + urldecode(Rest, << Acc/bits, C >>); +urldecode(<<>>, Acc) -> + Acc. + +unhex($0) -> 0; +unhex($1) -> 1; +unhex($2) -> 2; +unhex($3) -> 3; +unhex($4) -> 4; +unhex($5) -> 5; +unhex($6) -> 6; +unhex($7) -> 7; +unhex($8) -> 8; +unhex($9) -> 9; +unhex($A) -> 10; +unhex($B) -> 11; +unhex($C) -> 12; +unhex($D) -> 13; +unhex($E) -> 14; +unhex($F) -> 15; +unhex($a) -> 10; +unhex($b) -> 11; +unhex($c) -> 12; +unhex($d) -> 13; +unhex($e) -> 14; +unhex($f) -> 15. + +-ifdef(TEST). +urldecode_test_() -> + Tests = [ + {<<"%20">>, <<" ">>}, + {<<"+">>, <<" ">>}, + {<<"%00">>, <<0>>}, + {<<"%fF">>, <<255>>}, + {<<"123">>, <<"123">>}, + {<<"%i5">>, error}, + {<<"%5">>, error} + ], + [{Qs, fun() -> + E = try urldecode(Qs) of + R -> R + catch _:_ -> + error + end + end} || {Qs, E} <- Tests]. + +urldecode_identity_test_() -> + Tests = [ + <<"+">>, + <<"nothingnothingnothingnothing">>, + <<"Small+fast+modular+HTTP+server">>, + <<"Small%2C+fast%2C+modular+HTTP+server.">>, + <<"%E3%83%84%E3%82%A4%E3%83%B3%E3%82%BD%E3%82%A6%E3%83" + "%AB%E3%80%9C%E8%BC%AA%E5%BB%BB%E3%81%99%E3%82%8B%E6%97%8B%E5" + "%BE%8B%E3%80%9C">> + ], + [{V, fun() -> V = urlencode(urldecode(V)) end} || V <- Tests]. +-endif. + +-ifdef(PERF). +horse_urldecode() -> + horse:repeat(100000, + urldecode(<<"nothingnothingnothingnothing">>) + ). + +horse_urldecode_plus() -> + horse:repeat(100000, + urldecode(<<"Small+fast+modular+HTTP+server">>) + ). + +horse_urldecode_hex() -> + horse:repeat(100000, + urldecode(<<"Small%2C%20fast%2C%20modular%20HTTP%20server.">>) + ). + +horse_urldecode_jp_hex() -> + horse:repeat(100000, + urldecode(<<"%E3%83%84%E3%82%A4%E3%83%B3%E3%82%BD%E3%82%A6%E3%83" + "%AB%E3%80%9C%E8%BC%AA%E5%BB%BB%E3%81%99%E3%82%8B%E6%97%8B%E5" + "%BE%8B%E3%80%9C">>) + ). + +horse_urldecode_mix() -> + horse:repeat(100000, + urldecode(<<"Small%2C+fast%2C+modular+HTTP+server.">>) + ). +-endif. + +%% @doc Percent encode a string (x-www-form-urlencoded rules). + +-spec urlencode(B) -> B when B::binary(). +urlencode(B) -> + urlencode(B, <<>>). + +urlencode(<< $\s, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $+ >>); +urlencode(<< $-, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $- >>); +urlencode(<< $., Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $. >>); +urlencode(<< $0, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $0 >>); +urlencode(<< $1, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $1 >>); +urlencode(<< $2, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $2 >>); +urlencode(<< $3, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $3 >>); +urlencode(<< $4, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $4 >>); +urlencode(<< $5, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $5 >>); +urlencode(<< $6, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $6 >>); +urlencode(<< $7, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $7 >>); +urlencode(<< $8, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $8 >>); +urlencode(<< $9, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $9 >>); +urlencode(<< $A, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $A >>); +urlencode(<< $B, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $B >>); +urlencode(<< $C, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $C >>); +urlencode(<< $D, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $D >>); +urlencode(<< $E, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $E >>); +urlencode(<< $F, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $F >>); +urlencode(<< $G, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $G >>); +urlencode(<< $H, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $H >>); +urlencode(<< $I, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $I >>); +urlencode(<< $J, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $J >>); +urlencode(<< $K, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $K >>); +urlencode(<< $L, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $L >>); +urlencode(<< $M, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $M >>); +urlencode(<< $N, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $N >>); +urlencode(<< $O, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $O >>); +urlencode(<< $P, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $P >>); +urlencode(<< $Q, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $Q >>); +urlencode(<< $R, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $R >>); +urlencode(<< $S, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $S >>); +urlencode(<< $T, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $T >>); +urlencode(<< $U, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $U >>); +urlencode(<< $V, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $V >>); +urlencode(<< $W, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $W >>); +urlencode(<< $X, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $X >>); +urlencode(<< $Y, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $Y >>); +urlencode(<< $Z, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $Z >>); +urlencode(<< $_, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $_ >>); +urlencode(<< $a, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $a >>); +urlencode(<< $b, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $b >>); +urlencode(<< $c, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $c >>); +urlencode(<< $d, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $d >>); +urlencode(<< $e, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $e >>); +urlencode(<< $f, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $f >>); +urlencode(<< $g, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $g >>); +urlencode(<< $h, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $h >>); +urlencode(<< $i, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $i >>); +urlencode(<< $j, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $j >>); +urlencode(<< $k, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $k >>); +urlencode(<< $l, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $l >>); +urlencode(<< $m, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $m >>); +urlencode(<< $n, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $n >>); +urlencode(<< $o, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $o >>); +urlencode(<< $p, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $p >>); +urlencode(<< $q, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $q >>); +urlencode(<< $r, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $r >>); +urlencode(<< $s, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $s >>); +urlencode(<< $t, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $t >>); +urlencode(<< $u, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $u >>); +urlencode(<< $v, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $v >>); +urlencode(<< $w, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $w >>); +urlencode(<< $x, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $x >>); +urlencode(<< $y, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $y >>); +urlencode(<< $z, Rest/bits >>, Acc) -> urlencode(Rest, << Acc/bits, $z >>); +urlencode(<< C, Rest/bits >>, Acc) -> + H = hex(C bsr 4), + L = hex(C band 16#0f), + urlencode(Rest, << Acc/bits, $%, H, L >>); +urlencode(<<>>, Acc) -> + Acc. + +hex( 0) -> $0; +hex( 1) -> $1; +hex( 2) -> $2; +hex( 3) -> $3; +hex( 4) -> $4; +hex( 5) -> $5; +hex( 6) -> $6; +hex( 7) -> $7; +hex( 8) -> $8; +hex( 9) -> $9; +hex(10) -> $A; +hex(11) -> $B; +hex(12) -> $C; +hex(13) -> $D; +hex(14) -> $E; +hex(15) -> $F. + +-ifdef(TEST). +urlencode_test_() -> + Tests = [ + {<<255, 0>>, <<"%FF%00">>}, + {<<255, " ">>, <<"%FF+">>}, + {<<" ">>, <<"+">>}, + {<<"aBc123">>, <<"aBc123">>}, + {<<".-_">>, <<".-_">>} + ], + [{V, fun() -> E = urlencode(V) end} || {V, E} <- Tests]. + +urlencode_identity_test_() -> + Tests = [ + <<"+">>, + <<"nothingnothingnothingnothing">>, + <<"Small fast modular HTTP server">>, + <<"Small, fast, modular HTTP server.">>, + <<227,131,132,227,130,164,227,131,179,227,130,189,227, + 130,166,227,131,171,227,128,156,232,188,170,229,187,187,227, + 129,153,227,130,139,230,151,139,229,190,139,227,128,156>> + ], + [{V, fun() -> V = urldecode(urlencode(V)) end} || V <- Tests]. +-endif. + +-ifdef(PERF). +horse_urlencode() -> + horse:repeat(100000, + urlencode(<<"nothingnothingnothingnothing">>) + ). + +horse_urlencode_plus() -> + horse:repeat(100000, + urlencode(<<"Small fast modular HTTP server">>) + ). + +horse_urlencode_jp() -> + horse:repeat(100000, + urlencode(<<227,131,132,227,130,164,227,131,179,227,130,189,227, + 130,166,227,131,171,227,128,156,232,188,170,229,187,187,227, + 129,153,227,130,139,230,151,139,229,190,139,227,128,156>>) + ). + +horse_urlencode_mix() -> + horse:repeat(100000, + urlencode(<<"Small, fast, modular HTTP server.">>) + ). +-endif. diff --git a/rabbitmq-server/deps/cowlib/src/cow_spdy.erl b/rabbitmq-server/deps/cowlib/src/cow_spdy.erl new file mode 100644 index 0000000..59c1ba4 --- /dev/null +++ b/rabbitmq-server/deps/cowlib/src/cow_spdy.erl @@ -0,0 +1,265 @@ +%% Copyright (c) 2013-2014, Loïc Hoguin +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-module(cow_spdy). + +%% Zstream. +-export([deflate_init/0]). +-export([inflate_init/0]). + +%% Parse. +-export([split/1]). +-export([parse/2]). + +%% Build. +-export([data/3]). +-export([syn_stream/12]). +-export([syn_reply/6]). +-export([rst_stream/2]). +%% @todo settings +-export([ping/1]). +-export([goaway/2]). +%% @todo headers +%% @todo window_update + +-include("cow_spdy.hrl"). + +%% Zstream. + +deflate_init() -> + Zdef = zlib:open(), + ok = zlib:deflateInit(Zdef), + _ = zlib:deflateSetDictionary(Zdef, ?ZDICT), + Zdef. + +inflate_init() -> + Zinf = zlib:open(), + ok = zlib:inflateInit(Zinf), + Zinf. + +%% Parse. + +split(Data = << _:40, Length:24, _/bits >>) + when byte_size(Data) >= Length + 8 -> + Length2 = Length + 8, + << Frame:Length2/binary, Rest/bits >> = Data, + {true, Frame, Rest}; +split(_) -> + false. + +parse(<< 0:1, StreamID:31, 0:7, IsFinFlag:1, _:24, Data/bits >>, _) -> + {data, StreamID, from_flag(IsFinFlag), Data}; +parse(<< 1:1, 3:15, 1:16, 0:6, IsUnidirectionalFlag:1, IsFinFlag:1, + _:25, StreamID:31, _:1, AssocToStreamID:31, Priority:3, _:5, + 0:8, Rest/bits >>, Zinf) -> + case parse_headers(Rest, Zinf) of + {ok, Headers, [{<<":host">>, Host}, {<<":method">>, Method}, + {<<":path">>, Path}, {<<":scheme">>, Scheme}, + {<<":version">>, Version}]} -> + {syn_stream, StreamID, AssocToStreamID, from_flag(IsFinFlag), + from_flag(IsUnidirectionalFlag), Priority, Method, + Scheme, Host, Path, Version, Headers}; + _ -> + {error, badprotocol} + end; +parse(<< 1:1, 3:15, 2:16, 0:7, IsFinFlag:1, _:25, + StreamID:31, Rest/bits >>, Zinf) -> + case parse_headers(Rest, Zinf) of + {ok, Headers, [{<<":status">>, Status}, {<<":version">>, Version}]} -> + {syn_reply, StreamID, from_flag(IsFinFlag), + Status, Version, Headers}; + _ -> + {error, badprotocol} + end; +parse(<< 1:1, 3:15, 3:16, 0:8, _:56, StatusCode:32 >>, _) + when StatusCode =:= 0; StatusCode > 11 -> + {error, badprotocol}; +parse(<< 1:1, 3:15, 3:16, 0:8, _:25, StreamID:31, StatusCode:32 >>, _) -> + Status = case StatusCode of + 1 -> protocol_error; + 2 -> invalid_stream; + 3 -> refused_stream; + 4 -> unsupported_version; + 5 -> cancel; + 6 -> internal_error; + 7 -> flow_control_error; + 8 -> stream_in_use; + 9 -> stream_already_closed; + 10 -> invalid_credentials; + 11 -> frame_too_large + end, + {rst_stream, StreamID, Status}; +parse(<< 1:1, 3:15, 4:16, 0:7, ClearSettingsFlag:1, _:24, + NbEntries:32, Rest/bits >>, _) -> + try + Settings = [begin + Is0 = 0, + Key = case ID of + 1 -> upload_bandwidth; + 2 -> download_bandwidth; + 3 -> round_trip_time; + 4 -> max_concurrent_streams; + 5 -> current_cwnd; + 6 -> download_retrans_rate; + 7 -> initial_window_size; + 8 -> client_certificate_vector_size + end, + {Key, Value, from_flag(PersistFlag), from_flag(WasPersistedFlag)} + end || << Is0:6, WasPersistedFlag:1, PersistFlag:1, + ID:24, Value:32 >> <= Rest], + NbEntries = length(Settings), + {settings, from_flag(ClearSettingsFlag), Settings} + catch _:_ -> + {error, badprotocol} + end; +parse(<< 1:1, 3:15, 6:16, 0:8, _:24, PingID:32 >>, _) -> + {ping, PingID}; +parse(<< 1:1, 3:15, 7:16, 0:8, _:56, StatusCode:32 >>, _) + when StatusCode > 2 -> + {error, badprotocol}; +parse(<< 1:1, 3:15, 7:16, 0:8, _:25, LastGoodStreamID:31, + StatusCode:32 >>, _) -> + Status = case StatusCode of + 0 -> ok; + 1 -> protocol_error; + 2 -> internal_error + end, + {goaway, LastGoodStreamID, Status}; +parse(<< 1:1, 3:15, 8:16, 0:7, IsFinFlag:1, _:25, StreamID:31, + Rest/bits >>, Zinf) -> + case parse_headers(Rest, Zinf) of + {ok, Headers, []} -> + {headers, StreamID, from_flag(IsFinFlag), Headers}; + _ -> + {error, badprotocol} + end; +parse(<< 1:1, 3:15, 9:16, 0:8, _:57, 0:31 >>, _) -> + {error, badprotocol}; +parse(<< 1:1, 3:15, 9:16, 0:8, _:25, StreamID:31, + _:1, DeltaWindowSize:31 >>, _) -> + {window_update, StreamID, DeltaWindowSize}; +parse(_, _) -> + {error, badprotocol}. + +parse_headers(Data, Zinf) -> + [<< NbHeaders:32, Rest/bits >>] = inflate(Zinf, Data), + parse_headers(Rest, NbHeaders, [], []). + +parse_headers(<<>>, 0, Headers, SpHeaders) -> + {ok, lists:reverse(Headers), lists:sort(SpHeaders)}; +parse_headers(<<>>, _, _, _) -> + error; +parse_headers(_, 0, _, _) -> + error; +parse_headers(<< 0:32, _/bits >>, _, _, _) -> + error; +parse_headers(<< L1:32, Key:L1/binary, L2:32, Value:L2/binary, Rest/bits >>, + NbHeaders, Acc, SpAcc) -> + case Key of + << $:, _/bits >> -> + parse_headers(Rest, NbHeaders - 1, Acc, + lists:keystore(Key, 1, SpAcc, {Key, Value})); + _ -> + parse_headers(Rest, NbHeaders - 1, [{Key, Value}|Acc], SpAcc) + end. + +inflate(Zinf, Data) -> + try + zlib:inflate(Zinf, Data) + catch _:_ -> + ok = zlib:inflateSetDictionary(Zinf, ?ZDICT), + zlib:inflate(Zinf, <<>>) + end. + +from_flag(0) -> false; +from_flag(1) -> true. + +%% Build. + +data(StreamID, IsFin, Data) -> + IsFinFlag = to_flag(IsFin), + Length = iolist_size(Data), + [<< 0:1, StreamID:31, 0:7, IsFinFlag:1, Length:24 >>, Data]. + +syn_stream(Zdef, StreamID, AssocToStreamID, IsFin, IsUnidirectional, + Priority, Method, Scheme, Host, Path, Version, Headers) -> + IsFinFlag = to_flag(IsFin), + IsUnidirectionalFlag = to_flag(IsUnidirectional), + HeaderBlock = build_headers(Zdef, [ + {<<":method">>, Method}, + {<<":scheme">>, Scheme}, + {<<":host">>, Host}, + {<<":path">>, Path}, + {<<":version">>, Version} + |Headers]), + Length = 10 + iolist_size(HeaderBlock), + [<< 1:1, 3:15, 1:16, 0:6, IsUnidirectionalFlag:1, IsFinFlag:1, + Length:24, 0:1, StreamID:31, 0:1, AssocToStreamID:31, + Priority:3, 0:5, 0:8 >>, HeaderBlock]. + +syn_reply(Zdef, StreamID, IsFin, Status, Version, Headers) -> + IsFinFlag = to_flag(IsFin), + HeaderBlock = build_headers(Zdef, [ + {<<":status">>, Status}, + {<<":version">>, Version} + |Headers]), + Length = 4 + iolist_size(HeaderBlock), + [<< 1:1, 3:15, 2:16, 0:7, IsFinFlag:1, Length:24, + 0:1, StreamID:31 >>, HeaderBlock]. + +rst_stream(StreamID, Status) -> + StatusCode = case Status of + protocol_error -> 1; + invalid_stream -> 2; + refused_stream -> 3; + unsupported_version -> 4; + cancel -> 5; + internal_error -> 6; + flow_control_error -> 7; + stream_in_use -> 8; + stream_already_closed -> 9; + invalid_credentials -> 10; + frame_too_large -> 11 + end, + << 1:1, 3:15, 3:16, 0:8, 8:24, + 0:1, StreamID:31, StatusCode:32 >>. + +%% @todo settings + +ping(PingID) -> + << 1:1, 3:15, 6:16, 0:8, 4:24, PingID:32 >>. + +goaway(LastGoodStreamID, Status) -> + StatusCode = case Status of + ok -> 0; + protocol_error -> 1; + internal_error -> 2 + end, + << 1:1, 3:15, 7:16, 0:8, 8:24, + 0:1, LastGoodStreamID:31, StatusCode:32 >>. + +%% @todo headers +%% @todo window_update + +build_headers(Zdef, Headers) -> + NbHeaders = length(Headers), + Headers2 = [begin + L1 = iolist_size(Key), + L2 = iolist_size(Value), + [<< L1:32 >>, Key, << L2:32 >>, Value] + end || {Key, Value} <- Headers], + zlib:deflate(Zdef, [<< NbHeaders:32 >>, Headers2], full). + +to_flag(false) -> 0; +to_flag(true) -> 1. diff --git a/rabbitmq-server/deps/cowlib/src/cow_spdy.hrl b/rabbitmq-server/deps/cowlib/src/cow_spdy.hrl new file mode 100644 index 0000000..9637b1c --- /dev/null +++ b/rabbitmq-server/deps/cowlib/src/cow_spdy.hrl @@ -0,0 +1,181 @@ +%% Zlib dictionary. + +-define(ZDICT, << + 16#00, 16#00, 16#00, 16#07, 16#6f, 16#70, 16#74, 16#69, + 16#6f, 16#6e, 16#73, 16#00, 16#00, 16#00, 16#04, 16#68, + 16#65, 16#61, 16#64, 16#00, 16#00, 16#00, 16#04, 16#70, + 16#6f, 16#73, 16#74, 16#00, 16#00, 16#00, 16#03, 16#70, + 16#75, 16#74, 16#00, 16#00, 16#00, 16#06, 16#64, 16#65, + 16#6c, 16#65, 16#74, 16#65, 16#00, 16#00, 16#00, 16#05, + 16#74, 16#72, 16#61, 16#63, 16#65, 16#00, 16#00, 16#00, + 16#06, 16#61, 16#63, 16#63, 16#65, 16#70, 16#74, 16#00, + 16#00, 16#00, 16#0e, 16#61, 16#63, 16#63, 16#65, 16#70, + 16#74, 16#2d, 16#63, 16#68, 16#61, 16#72, 16#73, 16#65, + 16#74, 16#00, 16#00, 16#00, 16#0f, 16#61, 16#63, 16#63, + 16#65, 16#70, 16#74, 16#2d, 16#65, 16#6e, 16#63, 16#6f, + 16#64, 16#69, 16#6e, 16#67, 16#00, 16#00, 16#00, 16#0f, + 16#61, 16#63, 16#63, 16#65, 16#70, 16#74, 16#2d, 16#6c, + 16#61, 16#6e, 16#67, 16#75, 16#61, 16#67, 16#65, 16#00, + 16#00, 16#00, 16#0d, 16#61, 16#63, 16#63, 16#65, 16#70, + 16#74, 16#2d, 16#72, 16#61, 16#6e, 16#67, 16#65, 16#73, + 16#00, 16#00, 16#00, 16#03, 16#61, 16#67, 16#65, 16#00, + 16#00, 16#00, 16#05, 16#61, 16#6c, 16#6c, 16#6f, 16#77, + 16#00, 16#00, 16#00, 16#0d, 16#61, 16#75, 16#74, 16#68, + 16#6f, 16#72, 16#69, 16#7a, 16#61, 16#74, 16#69, 16#6f, + 16#6e, 16#00, 16#00, 16#00, 16#0d, 16#63, 16#61, 16#63, + 16#68, 16#65, 16#2d, 16#63, 16#6f, 16#6e, 16#74, 16#72, + 16#6f, 16#6c, 16#00, 16#00, 16#00, 16#0a, 16#63, 16#6f, + 16#6e, 16#6e, 16#65, 16#63, 16#74, 16#69, 16#6f, 16#6e, + 16#00, 16#00, 16#00, 16#0c, 16#63, 16#6f, 16#6e, 16#74, + 16#65, 16#6e, 16#74, 16#2d, 16#62, 16#61, 16#73, 16#65, + 16#00, 16#00, 16#00, 16#10, 16#63, 16#6f, 16#6e, 16#74, + 16#65, 16#6e, 16#74, 16#2d, 16#65, 16#6e, 16#63, 16#6f, + 16#64, 16#69, 16#6e, 16#67, 16#00, 16#00, 16#00, 16#10, + 16#63, 16#6f, 16#6e, 16#74, 16#65, 16#6e, 16#74, 16#2d, + 16#6c, 16#61, 16#6e, 16#67, 16#75, 16#61, 16#67, 16#65, + 16#00, 16#00, 16#00, 16#0e, 16#63, 16#6f, 16#6e, 16#74, + 16#65, 16#6e, 16#74, 16#2d, 16#6c, 16#65, 16#6e, 16#67, + 16#74, 16#68, 16#00, 16#00, 16#00, 16#10, 16#63, 16#6f, + 16#6e, 16#74, 16#65, 16#6e, 16#74, 16#2d, 16#6c, 16#6f, + 16#63, 16#61, 16#74, 16#69, 16#6f, 16#6e, 16#00, 16#00, + 16#00, 16#0b, 16#63, 16#6f, 16#6e, 16#74, 16#65, 16#6e, + 16#74, 16#2d, 16#6d, 16#64, 16#35, 16#00, 16#00, 16#00, + 16#0d, 16#63, 16#6f, 16#6e, 16#74, 16#65, 16#6e, 16#74, + 16#2d, 16#72, 16#61, 16#6e, 16#67, 16#65, 16#00, 16#00, + 16#00, 16#0c, 16#63, 16#6f, 16#6e, 16#74, 16#65, 16#6e, + 16#74, 16#2d, 16#74, 16#79, 16#70, 16#65, 16#00, 16#00, + 16#00, 16#04, 16#64, 16#61, 16#74, 16#65, 16#00, 16#00, + 16#00, 16#04, 16#65, 16#74, 16#61, 16#67, 16#00, 16#00, + 16#00, 16#06, 16#65, 16#78, 16#70, 16#65, 16#63, 16#74, + 16#00, 16#00, 16#00, 16#07, 16#65, 16#78, 16#70, 16#69, + 16#72, 16#65, 16#73, 16#00, 16#00, 16#00, 16#04, 16#66, + 16#72, 16#6f, 16#6d, 16#00, 16#00, 16#00, 16#04, 16#68, + 16#6f, 16#73, 16#74, 16#00, 16#00, 16#00, 16#08, 16#69, + 16#66, 16#2d, 16#6d, 16#61, 16#74, 16#63, 16#68, 16#00, + 16#00, 16#00, 16#11, 16#69, 16#66, 16#2d, 16#6d, 16#6f, + 16#64, 16#69, 16#66, 16#69, 16#65, 16#64, 16#2d, 16#73, + 16#69, 16#6e, 16#63, 16#65, 16#00, 16#00, 16#00, 16#0d, + 16#69, 16#66, 16#2d, 16#6e, 16#6f, 16#6e, 16#65, 16#2d, + 16#6d, 16#61, 16#74, 16#63, 16#68, 16#00, 16#00, 16#00, + 16#08, 16#69, 16#66, 16#2d, 16#72, 16#61, 16#6e, 16#67, + 16#65, 16#00, 16#00, 16#00, 16#13, 16#69, 16#66, 16#2d, + 16#75, 16#6e, 16#6d, 16#6f, 16#64, 16#69, 16#66, 16#69, + 16#65, 16#64, 16#2d, 16#73, 16#69, 16#6e, 16#63, 16#65, + 16#00, 16#00, 16#00, 16#0d, 16#6c, 16#61, 16#73, 16#74, + 16#2d, 16#6d, 16#6f, 16#64, 16#69, 16#66, 16#69, 16#65, + 16#64, 16#00, 16#00, 16#00, 16#08, 16#6c, 16#6f, 16#63, + 16#61, 16#74, 16#69, 16#6f, 16#6e, 16#00, 16#00, 16#00, + 16#0c, 16#6d, 16#61, 16#78, 16#2d, 16#66, 16#6f, 16#72, + 16#77, 16#61, 16#72, 16#64, 16#73, 16#00, 16#00, 16#00, + 16#06, 16#70, 16#72, 16#61, 16#67, 16#6d, 16#61, 16#00, + 16#00, 16#00, 16#12, 16#70, 16#72, 16#6f, 16#78, 16#79, + 16#2d, 16#61, 16#75, 16#74, 16#68, 16#65, 16#6e, 16#74, + 16#69, 16#63, 16#61, 16#74, 16#65, 16#00, 16#00, 16#00, + 16#13, 16#70, 16#72, 16#6f, 16#78, 16#79, 16#2d, 16#61, + 16#75, 16#74, 16#68, 16#6f, 16#72, 16#69, 16#7a, 16#61, + 16#74, 16#69, 16#6f, 16#6e, 16#00, 16#00, 16#00, 16#05, + 16#72, 16#61, 16#6e, 16#67, 16#65, 16#00, 16#00, 16#00, + 16#07, 16#72, 16#65, 16#66, 16#65, 16#72, 16#65, 16#72, + 16#00, 16#00, 16#00, 16#0b, 16#72, 16#65, 16#74, 16#72, + 16#79, 16#2d, 16#61, 16#66, 16#74, 16#65, 16#72, 16#00, + 16#00, 16#00, 16#06, 16#73, 16#65, 16#72, 16#76, 16#65, + 16#72, 16#00, 16#00, 16#00, 16#02, 16#74, 16#65, 16#00, + 16#00, 16#00, 16#07, 16#74, 16#72, 16#61, 16#69, 16#6c, + 16#65, 16#72, 16#00, 16#00, 16#00, 16#11, 16#74, 16#72, + 16#61, 16#6e, 16#73, 16#66, 16#65, 16#72, 16#2d, 16#65, + 16#6e, 16#63, 16#6f, 16#64, 16#69, 16#6e, 16#67, 16#00, + 16#00, 16#00, 16#07, 16#75, 16#70, 16#67, 16#72, 16#61, + 16#64, 16#65, 16#00, 16#00, 16#00, 16#0a, 16#75, 16#73, + 16#65, 16#72, 16#2d, 16#61, 16#67, 16#65, 16#6e, 16#74, + 16#00, 16#00, 16#00, 16#04, 16#76, 16#61, 16#72, 16#79, + 16#00, 16#00, 16#00, 16#03, 16#76, 16#69, 16#61, 16#00, + 16#00, 16#00, 16#07, 16#77, 16#61, 16#72, 16#6e, 16#69, + 16#6e, 16#67, 16#00, 16#00, 16#00, 16#10, 16#77, 16#77, + 16#77, 16#2d, 16#61, 16#75, 16#74, 16#68, 16#65, 16#6e, + 16#74, 16#69, 16#63, 16#61, 16#74, 16#65, 16#00, 16#00, + 16#00, 16#06, 16#6d, 16#65, 16#74, 16#68, 16#6f, 16#64, + 16#00, 16#00, 16#00, 16#03, 16#67, 16#65, 16#74, 16#00, + 16#00, 16#00, 16#06, 16#73, 16#74, 16#61, 16#74, 16#75, + 16#73, 16#00, 16#00, 16#00, 16#06, 16#32, 16#30, 16#30, + 16#20, 16#4f, 16#4b, 16#00, 16#00, 16#00, 16#07, 16#76, + 16#65, 16#72, 16#73, 16#69, 16#6f, 16#6e, 16#00, 16#00, + 16#00, 16#08, 16#48, 16#54, 16#54, 16#50, 16#2f, 16#31, + 16#2e, 16#31, 16#00, 16#00, 16#00, 16#03, 16#75, 16#72, + 16#6c, 16#00, 16#00, 16#00, 16#06, 16#70, 16#75, 16#62, + 16#6c, 16#69, 16#63, 16#00, 16#00, 16#00, 16#0a, 16#73, + 16#65, 16#74, 16#2d, 16#63, 16#6f, 16#6f, 16#6b, 16#69, + 16#65, 16#00, 16#00, 16#00, 16#0a, 16#6b, 16#65, 16#65, + 16#70, 16#2d, 16#61, 16#6c, 16#69, 16#76, 16#65, 16#00, + 16#00, 16#00, 16#06, 16#6f, 16#72, 16#69, 16#67, 16#69, + 16#6e, 16#31, 16#30, 16#30, 16#31, 16#30, 16#31, 16#32, + 16#30, 16#31, 16#32, 16#30, 16#32, 16#32, 16#30, 16#35, + 16#32, 16#30, 16#36, 16#33, 16#30, 16#30, 16#33, 16#30, + 16#32, 16#33, 16#30, 16#33, 16#33, 16#30, 16#34, 16#33, + 16#30, 16#35, 16#33, 16#30, 16#36, 16#33, 16#30, 16#37, + 16#34, 16#30, 16#32, 16#34, 16#30, 16#35, 16#34, 16#30, + 16#36, 16#34, 16#30, 16#37, 16#34, 16#30, 16#38, 16#34, + 16#30, 16#39, 16#34, 16#31, 16#30, 16#34, 16#31, 16#31, + 16#34, 16#31, 16#32, 16#34, 16#31, 16#33, 16#34, 16#31, + 16#34, 16#34, 16#31, 16#35, 16#34, 16#31, 16#36, 16#34, + 16#31, 16#37, 16#35, 16#30, 16#32, 16#35, 16#30, 16#34, + 16#35, 16#30, 16#35, 16#32, 16#30, 16#33, 16#20, 16#4e, + 16#6f, 16#6e, 16#2d, 16#41, 16#75, 16#74, 16#68, 16#6f, + 16#72, 16#69, 16#74, 16#61, 16#74, 16#69, 16#76, 16#65, + 16#20, 16#49, 16#6e, 16#66, 16#6f, 16#72, 16#6d, 16#61, + 16#74, 16#69, 16#6f, 16#6e, 16#32, 16#30, 16#34, 16#20, + 16#4e, 16#6f, 16#20, 16#43, 16#6f, 16#6e, 16#74, 16#65, + 16#6e, 16#74, 16#33, 16#30, 16#31, 16#20, 16#4d, 16#6f, + 16#76, 16#65, 16#64, 16#20, 16#50, 16#65, 16#72, 16#6d, + 16#61, 16#6e, 16#65, 16#6e, 16#74, 16#6c, 16#79, 16#34, + 16#30, 16#30, 16#20, 16#42, 16#61, 16#64, 16#20, 16#52, + 16#65, 16#71, 16#75, 16#65, 16#73, 16#74, 16#34, 16#30, + 16#31, 16#20, 16#55, 16#6e, 16#61, 16#75, 16#74, 16#68, + 16#6f, 16#72, 16#69, 16#7a, 16#65, 16#64, 16#34, 16#30, + 16#33, 16#20, 16#46, 16#6f, 16#72, 16#62, 16#69, 16#64, + 16#64, 16#65, 16#6e, 16#34, 16#30, 16#34, 16#20, 16#4e, + 16#6f, 16#74, 16#20, 16#46, 16#6f, 16#75, 16#6e, 16#64, + 16#35, 16#30, 16#30, 16#20, 16#49, 16#6e, 16#74, 16#65, + 16#72, 16#6e, 16#61, 16#6c, 16#20, 16#53, 16#65, 16#72, + 16#76, 16#65, 16#72, 16#20, 16#45, 16#72, 16#72, 16#6f, + 16#72, 16#35, 16#30, 16#31, 16#20, 16#4e, 16#6f, 16#74, + 16#20, 16#49, 16#6d, 16#70, 16#6c, 16#65, 16#6d, 16#65, + 16#6e, 16#74, 16#65, 16#64, 16#35, 16#30, 16#33, 16#20, + 16#53, 16#65, 16#72, 16#76, 16#69, 16#63, 16#65, 16#20, + 16#55, 16#6e, 16#61, 16#76, 16#61, 16#69, 16#6c, 16#61, + 16#62, 16#6c, 16#65, 16#4a, 16#61, 16#6e, 16#20, 16#46, + 16#65, 16#62, 16#20, 16#4d, 16#61, 16#72, 16#20, 16#41, + 16#70, 16#72, 16#20, 16#4d, 16#61, 16#79, 16#20, 16#4a, + 16#75, 16#6e, 16#20, 16#4a, 16#75, 16#6c, 16#20, 16#41, + 16#75, 16#67, 16#20, 16#53, 16#65, 16#70, 16#74, 16#20, + 16#4f, 16#63, 16#74, 16#20, 16#4e, 16#6f, 16#76, 16#20, + 16#44, 16#65, 16#63, 16#20, 16#30, 16#30, 16#3a, 16#30, + 16#30, 16#3a, 16#30, 16#30, 16#20, 16#4d, 16#6f, 16#6e, + 16#2c, 16#20, 16#54, 16#75, 16#65, 16#2c, 16#20, 16#57, + 16#65, 16#64, 16#2c, 16#20, 16#54, 16#68, 16#75, 16#2c, + 16#20, 16#46, 16#72, 16#69, 16#2c, 16#20, 16#53, 16#61, + 16#74, 16#2c, 16#20, 16#53, 16#75, 16#6e, 16#2c, 16#20, + 16#47, 16#4d, 16#54, 16#63, 16#68, 16#75, 16#6e, 16#6b, + 16#65, 16#64, 16#2c, 16#74, 16#65, 16#78, 16#74, 16#2f, + 16#68, 16#74, 16#6d, 16#6c, 16#2c, 16#69, 16#6d, 16#61, + 16#67, 16#65, 16#2f, 16#70, 16#6e, 16#67, 16#2c, 16#69, + 16#6d, 16#61, 16#67, 16#65, 16#2f, 16#6a, 16#70, 16#67, + 16#2c, 16#69, 16#6d, 16#61, 16#67, 16#65, 16#2f, 16#67, + 16#69, 16#66, 16#2c, 16#61, 16#70, 16#70, 16#6c, 16#69, + 16#63, 16#61, 16#74, 16#69, 16#6f, 16#6e, 16#2f, 16#78, + 16#6d, 16#6c, 16#2c, 16#61, 16#70, 16#70, 16#6c, 16#69, + 16#63, 16#61, 16#74, 16#69, 16#6f, 16#6e, 16#2f, 16#78, + 16#68, 16#74, 16#6d, 16#6c, 16#2b, 16#78, 16#6d, 16#6c, + 16#2c, 16#74, 16#65, 16#78, 16#74, 16#2f, 16#70, 16#6c, + 16#61, 16#69, 16#6e, 16#2c, 16#74, 16#65, 16#78, 16#74, + 16#2f, 16#6a, 16#61, 16#76, 16#61, 16#73, 16#63, 16#72, + 16#69, 16#70, 16#74, 16#2c, 16#70, 16#75, 16#62, 16#6c, + 16#69, 16#63, 16#70, 16#72, 16#69, 16#76, 16#61, 16#74, + 16#65, 16#6d, 16#61, 16#78, 16#2d, 16#61, 16#67, 16#65, + 16#3d, 16#67, 16#7a, 16#69, 16#70, 16#2c, 16#64, 16#65, + 16#66, 16#6c, 16#61, 16#74, 16#65, 16#2c, 16#73, 16#64, + 16#63, 16#68, 16#63, 16#68, 16#61, 16#72, 16#73, 16#65, + 16#74, 16#3d, 16#75, 16#74, 16#66, 16#2d, 16#38, 16#63, + 16#68, 16#61, 16#72, 16#73, 16#65, 16#74, 16#3d, 16#69, + 16#73, 16#6f, 16#2d, 16#38, 16#38, 16#35, 16#39, 16#2d, + 16#31, 16#2c, 16#75, 16#74, 16#66, 16#2d, 16#2c, 16#2a, + 16#2c, 16#65, 16#6e, 16#71, 16#3d, 16#30, 16#2e >>). diff --git a/rabbitmq-server/deps/cowlib/src/cowlib.app.src b/rabbitmq-server/deps/cowlib/src/cowlib.app.src new file mode 100644 index 0000000..7545fc7 --- /dev/null +++ b/rabbitmq-server/deps/cowlib/src/cowlib.app.src @@ -0,0 +1,7 @@ +{application,cowlib, + [{description,"Support library for manipulating Web protocols."}, + {vsn,"1.0.1"}, + {id,"git"}, + {modules,[]}, + {registered,[]}, + {applications,[kernel,stdlib,crypto]}]}. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/proper_SUITE.erl b/rabbitmq-server/deps/cowlib/test/eunit_SUITE.erl similarity index 60% rename from rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/proper_SUITE.erl rename to rabbitmq-server/deps/cowlib/test/eunit_SUITE.erl index 440aa5f..dddfdd3 100644 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/proper_SUITE.erl +++ b/rabbitmq-server/deps/cowlib/test/eunit_SUITE.erl @@ -1,4 +1,4 @@ -%% Copyright (c) 2011, Loïc Hoguin +%% Copyright (c) 2013-2014, Loïc Hoguin %% %% Permission to use, copy, modify, and/or distribute this software for any %% purpose with or without fee is hereby granted, provided that the above @@ -12,26 +12,20 @@ %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. --module(proper_SUITE). +-module(eunit_SUITE). -include_lib("common_test/include/ct.hrl"). --export([all/0, groups/0]). %% ct. --export([dispatcher_split_host/1]). %% cowboy_dispatcher. - %% ct. +-export([all/0]). -all() -> - [{group, dispatcher}]. +%% Tests. +-export([eunit/1]). -groups() -> - [{dispatcher, [], [dispatcher_split_host]}]. +%% ct. -%% cowboy_dispatcher. +all() -> + [eunit]. -dispatcher_split_host(_Config) -> - true = proper:quickcheck(dispatcher_prop:prop_split_host_symmetric(), - [{on_output, fun(Format, Data) -> - io:format(user, Format, Data), %% Console. - io:format(Format, Data) %% Logs. - end}]). +eunit(_) -> + ok = eunit:test({application, cowlib}). diff --git a/rabbitmq-server/plugins-src/licensing/LICENSE-APACHE2-ExplorerCanvas b/rabbitmq-server/deps/licensing/LICENSE-APACHE2-ExplorerCanvas similarity index 100% rename from rabbitmq-server/plugins-src/licensing/LICENSE-APACHE2-ExplorerCanvas rename to rabbitmq-server/deps/licensing/LICENSE-APACHE2-ExplorerCanvas diff --git a/rabbitmq-server/LICENSE-Apache-Basho b/rabbitmq-server/deps/licensing/LICENSE-APL2-Rebar similarity index 100% rename from rabbitmq-server/LICENSE-Apache-Basho rename to rabbitmq-server/deps/licensing/LICENSE-APL2-Rebar diff --git a/rabbitmq-server/plugins-src/licensing/LICENSE-APL2-Stomp-Websocket b/rabbitmq-server/deps/licensing/LICENSE-APL2-Stomp-Websocket similarity index 100% rename from rabbitmq-server/plugins-src/licensing/LICENSE-APL2-Stomp-Websocket rename to rabbitmq-server/deps/licensing/LICENSE-APL2-Stomp-Websocket diff --git a/rabbitmq-server/plugins-src/licensing/LICENSE-BSD-base64js b/rabbitmq-server/deps/licensing/LICENSE-BSD-base64js similarity index 100% rename from rabbitmq-server/plugins-src/licensing/LICENSE-BSD-base64js rename to rabbitmq-server/deps/licensing/LICENSE-BSD-base64js diff --git a/rabbitmq-server/plugins-src/licensing/LICENSE-BSD-glMatrix b/rabbitmq-server/deps/licensing/LICENSE-BSD-glMatrix similarity index 100% rename from rabbitmq-server/plugins-src/licensing/LICENSE-BSD-glMatrix rename to rabbitmq-server/deps/licensing/LICENSE-BSD-glMatrix diff --git a/rabbitmq-server/deps/licensing/LICENSE-EPL-OTP b/rabbitmq-server/deps/licensing/LICENSE-EPL-OTP new file mode 100644 index 0000000..2257751 --- /dev/null +++ b/rabbitmq-server/deps/licensing/LICENSE-EPL-OTP @@ -0,0 +1,286 @@ +ERLANG PUBLIC LICENSE +Version 1.1 + +1. Definitions. + +1.1. ``Contributor'' means each entity that creates or contributes to +the creation of Modifications. + +1.2. ``Contributor Version'' means the combination of the Original +Code, prior Modifications used by a Contributor, and the Modifications +made by that particular Contributor. + +1.3. ``Covered Code'' means the Original Code or Modifications or the +combination of the Original Code and Modifications, in each case +including portions thereof. + +1.4. ``Electronic Distribution Mechanism'' means a mechanism generally +accepted in the software development community for the electronic +transfer of data. + +1.5. ``Executable'' means Covered Code in any form other than Source +Code. + +1.6. ``Initial Developer'' means the individual or entity identified +as the Initial Developer in the Source Code notice required by Exhibit +A. + +1.7. ``Larger Work'' means a work which combines Covered Code or +portions thereof with code not governed by the terms of this License. + +1.8. ``License'' means this document. + +1.9. ``Modifications'' means any addition to or deletion from the +substance or structure of either the Original Code or any previous +Modifications. When Covered Code is released as a series of files, a +Modification is: + +A. Any addition to or deletion from the contents of a file containing + Original Code or previous Modifications. + +B. Any new file that contains any part of the Original Code or + previous Modifications. + +1.10. ``Original Code'' means Source Code of computer software code +which is described in the Source Code notice required by Exhibit A as +Original Code, and which, at the time of its release under this +License is not already Covered Code governed by this License. + +1.11. ``Source Code'' means the preferred form of the Covered Code for +making modifications to it, including all modules it contains, plus +any associated interface definition files, scripts used to control +compilation and installation of an Executable, or a list of source +code differential comparisons against either the Original Code or +another well known, available Covered Code of the Contributor's +choice. The Source Code can be in a compressed or archival form, +provided the appropriate decompression or de-archiving software is +widely available for no charge. + +1.12. ``You'' means an individual or a legal entity exercising rights +under, and complying with all of the terms of, this License. For legal +entities,``You'' includes any entity which controls, is controlled by, +or is under common control with You. For purposes of this definition, +``control'' means (a) the power, direct or indirect, to cause the +direction or management of such entity, whether by contract or +otherwise, or (b) ownership of fifty percent (50%) or more of the +outstanding shares or beneficial ownership of such entity. + +2. Source Code License. + +2.1. The Initial Developer Grant. +The Initial Developer hereby grants You a world-wide, royalty-free, +non-exclusive license, subject to third party intellectual property +claims: + +(a) to use, reproduce, modify, display, perform, sublicense and + distribute the Original Code (or portions thereof) with or without + Modifications, or as part of a Larger Work; and + +(b) under patents now or hereafter owned or controlled by Initial + Developer, to make, have made, use and sell (``Utilize'') the + Original Code (or portions thereof), but solely to the extent that + any such patent is reasonably necessary to enable You to Utilize + the Original Code (or portions thereof) and not to any greater + extent that may be necessary to Utilize further Modifications or + combinations. + +2.2. Contributor Grant. +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license, subject to third party intellectual property +claims: + +(a) to use, reproduce, modify, display, perform, sublicense and + distribute the Modifications created by such Contributor (or + portions thereof) either on an unmodified basis, with other + Modifications, as Covered Code or as part of a Larger Work; and + +(b) under patents now or hereafter owned or controlled by Contributor, + to Utilize the Contributor Version (or portions thereof), but + solely to the extent that any such patent is reasonably necessary + to enable You to Utilize the Contributor Version (or portions + thereof), and not to any greater extent that may be necessary to + Utilize further Modifications or combinations. + +3. Distribution Obligations. + +3.1. Application of License. +The Modifications which You contribute are governed by the terms of +this License, including without limitation Section 2.2. The Source +Code version of Covered Code may be distributed only under the terms +of this License, and You must include a copy of this License with +every copy of the Source Code You distribute. You may not offer or +impose any terms on any Source Code version that alters or restricts +the applicable version of this License or the recipients' rights +hereunder. However, You may include an additional document offering +the additional rights described in Section 3.5. + +3.2. Availability of Source Code. +Any Modification which You contribute must be made available in Source +Code form under the terms of this License either on the same media as +an Executable version or via an accepted Electronic Distribution +Mechanism to anyone to whom you made an Executable version available; +and if made available via Electronic Distribution Mechanism, must +remain available for at least twelve (12) months after the date it +initially became available, or at least six (6) months after a +subsequent version of that particular Modification has been made +available to such recipients. You are responsible for ensuring that +the Source Code version remains available even if the Electronic +Distribution Mechanism is maintained by a third party. + +3.3. Description of Modifications. +You must cause all Covered Code to which you contribute to contain a +file documenting the changes You made to create that Covered Code and +the date of any change. You must include a prominent statement that +the Modification is derived, directly or indirectly, from Original +Code provided by the Initial Developer and including the name of the +Initial Developer in (a) the Source Code, and (b) in any notice in an +Executable version or related documentation in which You describe the +origin or ownership of the Covered Code. + +3.4. Intellectual Property Matters + +(a) Third Party Claims. + If You have knowledge that a party claims an intellectual property + right in particular functionality or code (or its utilization + under this License), you must include a text file with the source + code distribution titled ``LEGAL'' which describes the claim and + the party making the claim in sufficient detail that a recipient + will know whom to contact. If you obtain such knowledge after You + make Your Modification available as described in Section 3.2, You + shall promptly modify the LEGAL file in all copies You make + available thereafter and shall take other steps (such as notifying + appropriate mailing lists or newsgroups) reasonably calculated to + inform those who received the Covered Code that new knowledge has + been obtained. + +(b) Contributor APIs. + If Your Modification is an application programming interface and + You own or control patents which are reasonably necessary to + implement that API, you must also include this information in the + LEGAL file. + +3.5. Required Notices. +You must duplicate the notice in Exhibit A in each file of the Source +Code, and this License in any documentation for the Source Code, where +You describe recipients' rights relating to Covered Code. If You +created one or more Modification(s), You may add your name as a +Contributor to the notice described in Exhibit A. If it is not +possible to put such notice in a particular Source Code file due to +its structure, then you must include such notice in a location (such +as a relevant directory file) where a user would be likely to look for +such a notice. You may choose to offer, and to charge a fee for, +warranty, support, indemnity or liability obligations to one or more +recipients of Covered Code. However, You may do so only on Your own +behalf, and not on behalf of the Initial Developer or any +Contributor. You must make it absolutely clear than any such warranty, +support, indemnity or liability obligation is offered by You alone, +and You hereby agree to indemnify the Initial Developer and every +Contributor for any liability incurred by the Initial Developer or +such Contributor as a result of warranty, support, indemnity or +liability terms You offer. + +3.6. Distribution of Executable Versions. +You may distribute Covered Code in Executable form only if the +requirements of Section 3.1-3.5 have been met for that Covered Code, +and if You include a notice stating that the Source Code version of +the Covered Code is available under the terms of this License, +including a description of how and where You have fulfilled the +obligations of Section 3.2. The notice must be conspicuously included +in any notice in an Executable version, related documentation or +collateral in which You describe recipients' rights relating to the +Covered Code. You may distribute the Executable version of Covered +Code under a license of Your choice, which may contain terms different +from this License, provided that You are in compliance with the terms +of this License and that the license for the Executable version does +not attempt to limit or alter the recipient's rights in the Source +Code version from the rights set forth in this License. If You +distribute the Executable version under a different license You must +make it absolutely clear that any terms which differ from this License +are offered by You alone, not by the Initial Developer or any +Contributor. You hereby agree to indemnify the Initial Developer and +every Contributor for any liability incurred by the Initial Developer +or such Contributor as a result of any such terms You offer. + +3.7. Larger Works. +You may create a Larger Work by combining Covered Code with other code +not governed by the terms of this License and distribute the Larger +Work as a single product. In such a case, You must make sure the +requirements of this License are fulfilled for the Covered Code. + +4. Inability to Comply Due to Statute or Regulation. +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Code due to statute +or regulation then You must: (a) comply with the terms of this License +to the maximum extent possible; and (b) describe the limitations and +the code they affect. Such description must be included in the LEGAL +file described in Section 3.4 and must be included with all +distributions of the Source Code. Except to the extent prohibited by +statute or regulation, such description must be sufficiently detailed +for a recipient of ordinary skill to be able to understand it. + +5. Application of this License. + +This License applies to code to which the Initial Developer has +attached the notice in Exhibit A, and to related Covered Code. + +6. CONNECTION TO MOZILLA PUBLIC LICENSE + +This Erlang License is a derivative work of the Mozilla Public +License, Version 1.0. It contains terms which differ from the Mozilla +Public License, Version 1.0. + +7. DISCLAIMER OF WARRANTY. + +COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN ``AS IS'' BASIS, +WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, +WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF +DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR +NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF +THE COVERED CODE IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE +IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER +CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR +CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART +OF THIS LICENSE. NO USE OF ANY COVERED CODE IS AUTHORIZED HEREUNDER +EXCEPT UNDER THIS DISCLAIMER. + +8. TERMINATION. +This License and the rights granted hereunder will terminate +automatically if You fail to comply with terms herein and fail to cure +such breach within 30 days of becoming aware of the breach. All +sublicenses to the Covered Code which are properly granted shall +survive any termination of this License. Provisions which, by their +nature, must remain in effect beyond the termination of this License +shall survive. + +9. DISCLAIMER OF LIABILITY +Any utilization of Covered Code shall not cause the Initial Developer +or any Contributor to be liable for any damages (neither direct nor +indirect). + +10. MISCELLANEOUS +This License represents the complete agreement concerning the subject +matter hereof. If any provision is held to be unenforceable, such +provision shall be reformed only to the extent necessary to make it +enforceable. This License shall be construed by and in accordance with +the substantive laws of Sweden. Any dispute, controversy or claim +arising out of or relating to this License, or the breach, termination +or invalidity thereof, shall be subject to the exclusive jurisdiction +of Swedish courts, with the Stockholm City Court as the first +instance. + +EXHIBIT A. + +``The contents of this file are subject to the Erlang Public License, +Version 1.1, (the "License"); you may not use this file except in +compliance with the License. You should have received a copy of the +Erlang Public License along with this software. If not, it can be +retrieved via the world wide web at http://www.erlang.org/. + +Software distributed under the License is distributed on an "AS IS" +basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +the License for the specific language governing rights and limitations +under the License. + +The Initial Developer of the Original Code is Ericsson AB. +Portions created by Ericsson are Copyright 2013, Ericsson AB. +All Rights Reserved.'' diff --git a/rabbitmq-server/plugins-src/licensing/LICENSE-MIT-EJS10 b/rabbitmq-server/deps/licensing/LICENSE-MIT-EJS10 similarity index 100% rename from rabbitmq-server/plugins-src/licensing/LICENSE-MIT-EJS10 rename to rabbitmq-server/deps/licensing/LICENSE-MIT-EJS10 diff --git a/rabbitmq-server/plugins-src/licensing/LICENSE-MIT-Flot b/rabbitmq-server/deps/licensing/LICENSE-MIT-Flot similarity index 100% rename from rabbitmq-server/plugins-src/licensing/LICENSE-MIT-Flot rename to rabbitmq-server/deps/licensing/LICENSE-MIT-Flot diff --git a/rabbitmq-server/deps/licensing/LICENSE-MIT-Mochiweb b/rabbitmq-server/deps/licensing/LICENSE-MIT-Mochiweb new file mode 100644 index 0000000..7b7c506 --- /dev/null +++ b/rabbitmq-server/deps/licensing/LICENSE-MIT-Mochiweb @@ -0,0 +1,22 @@ +This is the MIT license. + +Copyright (c) 2007 Mochi Media, Inc. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/rabbitmq-server/plugins-src/licensing/LICENSE-MIT-Sammy060 b/rabbitmq-server/deps/licensing/LICENSE-MIT-Sammy060 similarity index 100% rename from rabbitmq-server/plugins-src/licensing/LICENSE-MIT-Sammy060 rename to rabbitmq-server/deps/licensing/LICENSE-MIT-Sammy060 diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/LICENSE-MIT-eldap b/rabbitmq-server/deps/licensing/LICENSE-MIT-SockJS similarity index 96% rename from rabbitmq-server/plugins-src/eldap-wrapper/LICENSE-MIT-eldap rename to rabbitmq-server/deps/licensing/LICENSE-MIT-SockJS index 1f62009..a897167 100644 --- a/rabbitmq-server/plugins-src/eldap-wrapper/LICENSE-MIT-eldap +++ b/rabbitmq-server/deps/licensing/LICENSE-MIT-SockJS @@ -1,6 +1,5 @@ +Copyright (C) 2011 VMware, Inc. -Copyright (c) 2010, Torbjorn Tornkvist - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights @@ -18,4 +17,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/rabbitmq-server/plugins-src/licensing/LICENSE-MIT-jQuery164 b/rabbitmq-server/deps/licensing/LICENSE-MIT-jQuery164 similarity index 100% rename from rabbitmq-server/plugins-src/licensing/LICENSE-MIT-jQuery164 rename to rabbitmq-server/deps/licensing/LICENSE-MIT-jQuery164 diff --git a/rabbitmq-server/codegen/LICENSE-MPL-RabbitMQ b/rabbitmq-server/deps/licensing/LICENSE-MPL-RabbitMQ similarity index 100% rename from rabbitmq-server/codegen/LICENSE-MPL-RabbitMQ rename to rabbitmq-server/deps/licensing/LICENSE-MPL-RabbitMQ diff --git a/rabbitmq-server/deps/licensing/LICENSE-MPL2 b/rabbitmq-server/deps/licensing/LICENSE-MPL2 new file mode 100644 index 0000000..14e2f77 --- /dev/null +++ b/rabbitmq-server/deps/licensing/LICENSE-MPL2 @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/rabbitmq-server/deps/licensing/license_info_rabbitmq_codegen b/rabbitmq-server/deps/licensing/license_info_rabbitmq_codegen new file mode 100644 index 0000000..a703cbd --- /dev/null +++ b/rabbitmq-server/deps/licensing/license_info_rabbitmq_codegen @@ -0,0 +1,4 @@ +The files amqp-rabbitmq-0.8.json and amqp-rabbitmq-0.9.1.json are +"Copyright (C) 2008-2016 Pivotal Software, Inc", Inc. and are covered by the MIT +license. + diff --git a/rabbitmq-server/plugins-src/licensing/license_info_rabbitmq-management b/rabbitmq-server/deps/licensing/license_info_rabbitmq_management similarity index 100% rename from rabbitmq-server/plugins-src/licensing/license_info_rabbitmq-management rename to rabbitmq-server/deps/licensing/license_info_rabbitmq_management diff --git a/rabbitmq-server/plugins-src/licensing/license_info_rabbitmq-management-visualiser b/rabbitmq-server/deps/licensing/license_info_rabbitmq_management_visualiser similarity index 100% rename from rabbitmq-server/plugins-src/licensing/license_info_rabbitmq-management-visualiser rename to rabbitmq-server/deps/licensing/license_info_rabbitmq_management_visualiser diff --git a/rabbitmq-server/deps/mochiweb/CHANGES.md b/rabbitmq-server/deps/mochiweb/CHANGES.md new file mode 100644 index 0000000..af80a19 --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/CHANGES.md @@ -0,0 +1,201 @@ +Version 2.13.0 released 2016-02-08 + +* Support parsing of UTF-16 surrogate pairs encoded as character + references in mochiweb_html + https://github.com/mochi/mochiweb/issues/164 +* Avoid swallowing messages that are not related to the socket + during request parsing + https://github.com/mochi/mochiweb/pull/161 +* Ensure correct ordering of Set-Cookie headers: first in, first out + https://github.com/mochi/mochiweb/issues/162 +* Improve response times by caching a formatted date once per second + for the response headers with a mochiweb_clock service + https://github.com/mochi/mochiweb/pull/158 + +Version 2.12.2 released 2015-02-21 + +* Close connections quietly when setopts fails with a closed socket. + https://github.com/mochi/mochiweb/pull/152 + +Version 2.12.1 released 2015-02-01 + +* Fix active_socket accounting + https://github.com/mochi/mochiweb/issues/149 +* Added full MIT license preludes to each source file to make it + easier for mochiweb's code to be used piecemeal + https://github.com/mochi/mochiweb/pull/148 + +Version 2.12.0 released 2015-01-16 + +* Send "Connection: close" header when the server is going to close + a Keep-Alive connection, usually due to unread data from the + client + https://github.com/mochi/mochiweb/issues/146 + +Version 2.11.2 released 2015-01-16 + +* Fix regression introduced in #147 + https://github.com/mochi/mochiweb/pull/147 + +Version 2.11.1 released 2015-01-16 + +* Accept range end position which exceededs the resource size + https://github.com/mochi/mochiweb/pull/147 + +Version 2.11.0 released 2015-01-12 + +* Perform SSL handshake after releasing acceptor back into the pool, + and slow accept rate when file descriptors are not available, + to mitigate a potential DoS attack. Adds new mochiweb_socket + functions transport_accept/1 and finish_accept/1 which should be + used in preference to the now deprecated accept/1 function. + https://github.com/mochi/mochiweb/issues/138 + +Version 2.10.1 released 2015-01-11 + +* Fixes issue with SSL and mochiweb_websocket. Note that + mochiweb_websocket is still experimental and the API + is subject to change in future versions. + https://github.com/mochi/mochiweb/pull/144 + +Version 2.10.0 released 2014-12-17 + +* Added new `recbuf` option to mochiweb_http to allow the receive + buffer to be configured. + https://github.com/mochi/mochiweb/pull/134 + +Version 2.9.2 released 2014-10-16 + +* Add timeouts to SSL connect to prevent DoS by opening a connection + and not doing anything. + https://github.com/mochi/mochiweb/pull/140 +* Prevent using ECDH cipher in R16B because it is broken + https://github.com/mochi/mochiweb/pull/140 +* For default SSL connections, remove usage of sslv3 and not-so-secure + ciphers. + https://github.com/mochi/mochiweb/pull/140 + +Version 2.9.1 released 2014-09-29 + +* Fix Makefile rule for building docs + https://github.com/mochi/mochiweb/issues/135 +* Minimize gen_tcp:send calls to optimize performance. + https://github.com/mochi/mochiweb/pull/137 + +Version 2.9.0 released 2014-06-24 + +* Increased timeout in test suite for FreeBSD + https://github.com/mochi/mochiweb/pull/121 +* Updated rebar to v2.5.0 and fixed associated build issues + https://github.com/mochi/mochiweb/issues/131 + +Version 2.8.0 released 2014-01-01 + +* Websocket support + https://github.com/mochi/mochiweb/pull/120 +* Force files named "crossdomain.xml" to have MIME type + text/x-cross-domain-policy. + https://github.com/mochi/mochiweb/pull/118 + +Version 2.7.0 released 2013-08-01 + +* Fix 0-length range responses + https://github.com/mochi/mochiweb/pull/87 +* Add support for all possible `erlang:decode_packet/3` responses, + previously these would just crash. + https://github.com/mochi/mochiweb/pull/114 +* Makefile fixed to make `make test` work before `make all` + https://github.com/mochi/mochiweb/pull/116 +* Usage of the crypto module made R16B01+ compatible + https://github.com/mochi/mochiweb/pull/115 +* Build fixed for R16B01 + https://github.com/mochi/mochiweb/pull/112 +* `mochiweb_socket_server:stop/1` is now a synchronous + call instead of an asynchronous cast +* `mochiweb_html:parse_tokens/1` (and `parse/1`) will now create a + html element to wrap documents that have a HTML5 doctype + (``) but no html element + https://github.com/mochi/mochiweb/issues/110 + +Version 2.6.0 released 2013-04-15 + +* Enable R15B gen_tcp workaround only on R15B + https://github.com/mochi/mochiweb/pull/107 + +Version 2.5.0 released 2013-03-04 + +* Replace now() with os:timestamp() in acceptor (optimization) + https://github.com/mochi/mochiweb/pull/102 +* New mochiweb_session module for managing session cookies. + NOTE: this module is only supported on R15B02 and later! + https://github.com/mochi/mochiweb/pull/94 +* New mochiweb_base64url module for base64url encoding + (URL and Filename safe alphabet, see RFC 4648). +* Fix rebar.config in mochiwebapp_skel to use {branch, "master"} + https://github.com/mochi/mochiweb/issues/105 + +Version 2.4.2 released 2013-02-05 + +* Fixed issue in mochiweb_response introduced in v2.4.0 + https://github.com/mochi/mochiweb/pull/100 + +Version 2.4.1 released 2013-01-30 + +* Fixed issue in mochiweb_request introduced in v2.4.0 + https://github.com/mochi/mochiweb/issues/97 +* Fixed issue in mochifmt_records introduced in v2.4.0 + https://github.com/mochi/mochiweb/issues/96 + +Version 2.4.0 released 2013-01-23 + +* Switch from parameterized modules to explicit tuple module calls for + R16 compatibility (#95) +* Fix for mochiweb_acceptor crash with extra-long HTTP headers under + R15B02 (#91) +* Fix case in handling range headers (#85) +* Handle combined Content-Length header (#88) +* Windows security fix for `safe_relative_path`, any path with a + backslash on any platform is now considered unsafe (#92) + +Version 2.3.2 released 2012-07-27 + +* Case insensitive match for "Connection: close" (#81) + +Version 2.3.1 released 2012-03-31 + +* Fix edoc warnings (#63) +* Fix mochiweb_html handling of invalid charref sequences (unescaped &) (#69). +* Add a manual garbage collection between requests to avoid worst case behavior + on keep-alive sockets. +* Fix dst cookie bug (#73) +* Removed unnecessary template_dir option, see + https://github.com/basho/rebar/issues/203 + +Version 2.3.0 released 2011-10-14 + +* Handle ssl_closed message in mochiweb_http (#59) +* Added support for new MIME types (otf, eot, m4v, svg, svgz, ttc, ttf, + vcf, webm, webp, woff) (#61) +* Updated mochiweb_charref to support all HTML5 entities. Note that + if you are using this module directly, the spec has changed to return + `[integer()]` for some entities. (#64) + +Version 2.2.1 released 2011-08-31 + +* Removed `mochiweb_skel` module from the pre-rebar era + +Version 2.2.0 released 2011-08-29 + +* Added new `mochiweb_http:start_link/1` and + `mochiweb_socket_server:start_link/1` APIs to explicitly start linked + servers. Also added `{link, false}` option to the `start/1` variants + to explicitly start unlinked. This is in expectation that we will + eventually change the default behavior of `start/1` to be unlinked as you + would expect it to. See https://github.com/mochi/mochiweb/issues/58 for + discussion. + +Version 2.1.0 released 2011-08-29 + +* Added new `mochijson2:decode/2` with `{format, struct | proplist | eep18}` + options for easy decoding to various proplist formats. Also added encoding + support for eep18 style objects. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/LICENSE b/rabbitmq-server/deps/mochiweb/LICENSE similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/LICENSE rename to rabbitmq-server/deps/mochiweb/LICENSE diff --git a/rabbitmq-server/deps/mochiweb/Makefile b/rabbitmq-server/deps/mochiweb/Makefile new file mode 100644 index 0000000..244d7be --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/Makefile @@ -0,0 +1,22 @@ +IGNORE_DEPS += edown eper eunit_formatters meck node_package rebar_lock_deps_plugin rebar_vsn_plugin reltool_util +C_SRC_DIR = /path/do/not/exist +C_SRC_TYPE = rebar +DRV_CFLAGS = -fPIC +export DRV_CFLAGS +ERLANG_ARCH = 64 +export ERLANG_ARCH +ERLC_OPTS = +debug_info +export ERLC_OPTS + +COMPILE_FIRST += + + +rebar_dep: preprocess pre-deps deps pre-app app + +preprocess:: + +pre-deps:: + +pre-app:: + +include ../../erlang.mk \ No newline at end of file diff --git a/rabbitmq-server/deps/mochiweb/Makefile.orig.mk b/rabbitmq-server/deps/mochiweb/Makefile.orig.mk new file mode 100644 index 0000000..983c304 --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/Makefile.orig.mk @@ -0,0 +1,24 @@ +PREFIX:=../ +DEST:=$(PREFIX)$(PROJECT) + +REBAR=./rebar + +.PHONY: all edoc test clean build_plt dialyzer app + +all: + @$(REBAR) prepare-deps + +edoc: all + @$(REBAR) doc + +test: + @rm -rf .eunit + @mkdir -p .eunit + @$(REBAR) eunit + +clean: + @$(REBAR) clean + +app: + @[ -z "$(PROJECT)" ] && echo "ERROR: required variable PROJECT missing" 1>&2 && exit 1 || true + @$(REBAR) -r create template=mochiwebapp dest=$(DEST) appid=$(PROJECT) diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/README b/rabbitmq-server/deps/mochiweb/README similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/README rename to rabbitmq-server/deps/mochiweb/README diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/hmac_api/README b/rabbitmq-server/deps/mochiweb/examples/hmac_api/README similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/hmac_api/README rename to rabbitmq-server/deps/mochiweb/examples/hmac_api/README diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/hmac_api/hmac_api.hrl b/rabbitmq-server/deps/mochiweb/examples/hmac_api/hmac_api.hrl similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/hmac_api/hmac_api.hrl rename to rabbitmq-server/deps/mochiweb/examples/hmac_api/hmac_api.hrl diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/hmac_api/hmac_api_client.erl b/rabbitmq-server/deps/mochiweb/examples/hmac_api/hmac_api_client.erl similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/hmac_api/hmac_api_client.erl rename to rabbitmq-server/deps/mochiweb/examples/hmac_api/hmac_api_client.erl diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/hmac_api/hmac_api_lib.erl b/rabbitmq-server/deps/mochiweb/examples/hmac_api/hmac_api_lib.erl similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/hmac_api/hmac_api_lib.erl rename to rabbitmq-server/deps/mochiweb/examples/hmac_api/hmac_api_lib.erl diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/https/https_store.erl b/rabbitmq-server/deps/mochiweb/examples/https/https_store.erl similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/https/https_store.erl rename to rabbitmq-server/deps/mochiweb/examples/https/https_store.erl diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/https/server_cert.pem b/rabbitmq-server/deps/mochiweb/examples/https/server_cert.pem similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/https/server_cert.pem rename to rabbitmq-server/deps/mochiweb/examples/https/server_cert.pem diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/https/server_key.pem b/rabbitmq-server/deps/mochiweb/examples/https/server_key.pem similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/https/server_key.pem rename to rabbitmq-server/deps/mochiweb/examples/https/server_key.pem diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/keepalive/keepalive.erl b/rabbitmq-server/deps/mochiweb/examples/keepalive/keepalive.erl similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/examples/keepalive/keepalive.erl rename to rabbitmq-server/deps/mochiweb/examples/keepalive/keepalive.erl diff --git a/rabbitmq-server/deps/mochiweb/examples/websocket/index.html b/rabbitmq-server/deps/mochiweb/examples/websocket/index.html new file mode 100644 index 0000000..6926aba --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/examples/websocket/index.html @@ -0,0 +1,59 @@ + + + + Websockets With Mochiweb Demo + + +

Mochiweb websocket demo

+ +
+ +   State: +
+
Protip: open your javascript error console, just in case..
+
+
+
+ + +
+
+
+
+ + + + + diff --git a/rabbitmq-server/deps/mochiweb/examples/websocket/websocket.erl b/rabbitmq-server/deps/mochiweb/examples/websocket/websocket.erl new file mode 100644 index 0000000..cd52da1 --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/examples/websocket/websocket.erl @@ -0,0 +1,148 @@ +-module(websocket). + +%% To run: erlc websocket.erl && erl -pa ../../ebin -s websocket + +%% The MIT License (MIT) + +%% Copyright (c) 2012 Zadane.pl sp. z o.o. + +%% Permission is hereby granted, free of charge, to any person obtaining a copy +%% of this software and associated documentation files (the "Software"), to deal +%% in the Software without restriction, including without limitation the rights +%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +%% copies of the Software, and to permit persons to whom the Software is +%% furnished to do so, subject to the following conditions: + +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. + +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +%% THE SOFTWARE. + +-export([start/0, start_link/0, ws_loop/3, loop/2]). +-export([broadcast_server/1]). + +%% +%% Mochiweb websocket example +%% +%% [1]: At first you have to start HTTP server which will listen for HTTP +%% requests and eventually upgrade connection to websocket +%% [2]: Attempt to upgrade connection to websocket. +%% Function mochiweb_websocket:upgrade_connection/2: +%% * first argument is mochiweb_request +%% * second is M:F which will handle further websocket messages. +%% Function return two funs: +%% * ReentryWs/1 - use it to enter to messages handling loop +%% (in this example ws_loop/3) +%% * ReplyChannel/1 - use to send messages to client. May be passed to +%% other processes +%% [3]: Example of sending message to client +%% [4]: State that will be passed to message handling loop +%% [5]: Pass control to messages handling loop. From this moment each message +%% received from client can be handled... +%% [6]: ...here as Payload. State is variable intended for holding your custom +%% state. ReplyChannel is the same function as in [3]. +%% Notice! Payload is list of messages received from client. Websocket +%% framing mechanism concatenates messages which are sent one after another +%% in short time. +%% [7]: Print payload received from client and send it back +%% [8]: Message handling function must return new state value +start() -> + spawn( + fun () -> + application:start(sasl), + start_link(), + receive + stop -> ok + end + end). + +start_link() -> + %% [1] + io:format("Listening at http://127.0.0.1:8080/~n"), + Broadcaster = spawn_link(?MODULE, broadcast_server, [dict:new()]), + mochiweb_http:start_link([ + {name, client_access}, + {loop, {?MODULE, loop, [Broadcaster]}}, + {port, 8080} + ]). + +ws_loop(Payload, Broadcaster, _ReplyChannel) -> + %% [6] + + %% [7] + io:format("Received data: ~p~n", [Payload]), + Received = list_to_binary(Payload), + Broadcaster ! {broadcast, self(), Received}, + + %% [8] + Broadcaster. + +loop(Req, Broadcaster) -> + H = mochiweb_request:get_header_value("Upgrade", Req), + loop(Req, + Broadcaster, + H =/= undefined andalso string:to_lower(H) =:= "websocket"). + +loop(Req, _Broadcaster, false) -> + mochiweb_request:serve_file("index.html", "./", Req); +loop(Req, Broadcaster, true) -> + {ReentryWs, ReplyChannel} = mochiweb_websocket:upgrade_connection( + Req, fun ?MODULE:ws_loop/3), + %% [3] + Broadcaster ! {register, self(), ReplyChannel}, + %% [4] + %% [5] + ReentryWs(Broadcaster). + + +%% This server keeps track of connected pids +broadcast_server(Pids) -> + Pids1 = receive + {register, Pid, Channel} -> + broadcast_register(Pid, Channel, Pids); + {broadcast, Pid, Message} -> + broadcast_sendall(Pid, Message, Pids); + {'DOWN', MRef, process, Pid, _Reason} -> + broadcast_down(Pid, MRef, Pids); + Msg -> + io:format("Unknown message: ~p~n", [Msg]), + Pids + end, + erlang:hibernate(?MODULE, broadcast_server, [Pids1]). + +broadcast_register(Pid, Channel, Pids) -> + MRef = erlang:monitor(process, Pid), + broadcast_sendall( + Pid, "connected", dict:store(Pid, {Channel, MRef}, Pids)). + +broadcast_down(Pid, MRef, Pids) -> + Pids1 = case dict:find(Pid, Pids) of + {ok, {_, MRef}} -> + dict:erase(Pid, Pids); + _ -> + Pids + end, + broadcast_sendall(Pid, "disconnected", Pids1). + +broadcast_sendall(Pid, Msg, Pids) -> + M = iolist_to_binary([pid_to_list(Pid), ": ", Msg]), + dict:fold( + fun (K, {Reply, MRef}, Acc) -> + try + begin + Reply(M), + dict:store(K, {Reply, MRef}, Acc) + end + catch + _:_ -> + Acc + end + end, + dict:new(), + Pids). diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/include/internal.hrl b/rabbitmq-server/deps/mochiweb/include/internal.hrl similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/include/internal.hrl rename to rabbitmq-server/deps/mochiweb/include/internal.hrl diff --git a/rabbitmq-server/deps/mochiweb/rebar b/rabbitmq-server/deps/mochiweb/rebar new file mode 100755 index 0000000000000000000000000000000000000000..0d1980bf285e509ba967420330b4599c7bca2cee GIT binary patch literal 159782 zcmZ^~1B@s^lr=oIZQHizjcwbuZQHhO+cR%$+qP%Ex4S>GoBcYK>Pq^gs*)sHdXs-RD0wMh5EOv_ zc${2U|NQ6R|LMT~z5fgNf5$-of0+N6jQ;-xLj31sqNoKD4L|?@$*=$b|FOjXzv_Q& zfX=|)K1FrY7Ml>k=T$9VJhf!9b%V;sAvuleJD}1op~cW(MbIF?-oaYq{WZ`HnVS`u z4!)P+^zRJJP&#KBbHQ|3^le~PlrO=w++JWjHn_*$TL!e-ZfUs;V7j612Zpf8ML=Kv%ie+EuDqawpah^mueL5V9LVH`MItcF48b=IzhpNKDPcb=fQ z?2kFE-40PYPAgqNFTEd4Uo1CcD8az=#9`R1+-3XvoT^K;K_|NqP$*+5q^@{aX3&7O ztWTBuX`m_st&4mB1Eg|+Xbo4xfHfM!6q+@q-AO_)Koyw%G(uELi&A(z?`vZymM0i1 zIB9%+*I9FN?UiL10jr`h=*DHQ>)(y6B`J@h`4zRd(b2Nzf=g}Cw?LdgPz@>pmQVM6 zhC?X?v`lCCCV*EEr2Wo0M}vei&$F1~vjDKBhGb$|ZE;hxf+;P}yX0q_eVcx(AZK}e z?P5Ma4S3}7LD6;$&O46a(VixaG|@$7bx?E@i75{{k*pT^G!y+W-{$q6d#+2HI-YIT zb7Oi%*-LYbAn=wL#ykOsdVT@1peK?SK2FJj3D0n^X$L!5-3pcYF>)%EPmV8|2%o8b zPQ9wX=cV;)fP;ryRH7o{dm4C9buYm=KeqA>K%b8 zlO?gzsI@9D#2bxvNV&4cSPBVNW{s7wuIS<@q2NUmiR){kzj*@$`FEj zy}W|#3SAAPn~FUKf=#87eh zV&^rbiNG*{OttAK&?^x(-RKhTQytTVgN>&mSQ|}-#YTtdjLdLIJlgvCuUP zuyh!*yZG7wxheGD+I_U-et`e|iu2c&*sr1k0N@Y;0HFSFSKQFVz$RD&QXN@sb%r4T zCSV`{lAv8AkVcdwK3@DrI_EE2P9TXd=kSe zi?ZEV4O~*#jv*t~!;@>!ItfeW^Sj|`J5N&zDq4eRlvTivxe?_`@hod2SJbk$J_D9? zNN(MdG-#;MtI$eql|c8LtxJ6^sQ5IhMlA}bSUpjXgk5!WgRVIoF8s3LJwrN->rQ0U z`UzO5x)NuAX!=u0b^V$(LxlX4`8X=JHADTJ!IGBFev6fkspwQ`f2?J%yB$t z6Dzz3@2V)#D`7PkvDOTh&J+&Vifo`v&ycy>UgGoepv67|*Qjm=>((QX%P^ z0YJx#g6{iD^KjU?eFSMbE>dfgF81>|RE*~kB&`IG?iqb5hD*uklnxrT=HaUPb}bx) zMWVahM-W0(^js}rOZ z`|yp>ielxVKC@m-#W7=5P@FGT>$;k_QbSy=vdqkAam9qy2O#+cBUXmp+ zbOzq`p=gm5l*GlDkT+DMK_zS6bNTfu7pgpnWhNJ@TnVaVV$D>aR9fFoTT!kZ*uAdw zeIYg~ZDizRW=9iR%v%~RR=S3{HKIF~PVALhR!CN8wJB|M&TZwIt1ldMPWOpa7#V*k z2)>VsLF57}x=Dp1aljGwNX_gCyMX}y(278o6ceE}e*j1%*#pAq2*SuR3k9QMxy*nO zs>rV)s#N`z5ePLxKdPC~j1;_)yl0^dVI>Rz>f{9tTt)7J8g~bL>6a2@bic#q7-)B^ zM;2oUBTf%}+0~zu;2<@y9ml{j!&}IwRAhik482Qg=64Dz9_A}%7nP3=2J@mwgo?g1 zZHA#MT!;!TmMic#q=*?7DTPd(-$A<7Phc4%_!A0(Mh;`cb*9=EcvO_nI9R3*5ny!Y zr_3Z7$VtkDItDQT;QrS67XA&2u@5dQY)jbSxPUMBfassfuV@Yrbxj!!5l*@pWOeOO zk(LZ}OA^fnzI5uuTGRs21(irt*h)1{H$Fo51M`ON9(3i87?7--kJvemVZ%Bd-nB3_IsOEqTda<08lYkUo>>Rt}g8)-yB&BysFs4FUlOv5q`N@%Uoxo`qaI$bhGV zAl(!eM1Z7VBJx3PZ!ChO{h5bp z{bnt~J+b8I7a&^0aDm!|toD$YcJMmRzyo#K{TM7h{UjjfZ9q3Y23j}KpeBl9!fM4r zVVZU1xmWKXUBQ>)&jn+-aSyvjF(k|YL|y?2Fi9qc8u3KPWCz(h}~Vu@yf)IUlTRr!DCkeU%n zjQuz({zw5fF2H9Ci7eLH%-esIucAM@o}?c8xsh>lk4XHkW&M36`)yInCJM4G2sFmH zgGVXq=Y*Z1Fjz;{IWk7U1C?UQjUElBINJj&(izs6oQFX{QFgMgNe03uHXMlK6QCFr z3TN0@7*&9(>`PvOYT2u27LZhKqk@q4gOG>m6OL-xLW3nhHP&lJ`7GQnMXwfR6@3c4 zNOJ54d=RKDCX}XXWQR+1Z-f_p3T3+Cnol7L4CW9wWE6@?B?kn(RSq{X7vGb|xCKrh z5N-zPz znUhFXE=aI|grq{#>t#TWpZ)Ha1hYB4%XUy)|ZHmtR6)j>V7(8S_(@W=Yt_8Dt0yry-G&1bl}bqNq!TH!oUl57k5X zb#6CIrAQsjs!EO8KYTsUfCaS(x@Unh4@y2ZTnAWSfj1W?0#RdbI6T3dPK5)xx>7}aK2Ruy zqCm%hP>@S3fDw~4#qvxaGf60j=t6uf{A%?Tmne7>sDN4hrPQxE@^7&MQ@adUY0B>_ z6b>-mi4e{%5ODzq#AqM^{2;Ha%!Pv-WlH#=&}j*&oXiCWxr#BITlO5%=_p?;w}O3{ zAnwya-RoDM^Kn0p!Py`%;$=WB7{^sd8izWYlxzp$3@2= znjr{i%?4R!hVJMgbCm=(9EfWhib%E-Fwga0#71r zfiPXH$UgNjG%fv`BXHGF z?s`9;5cj^_a37nB1hZ}c7vqmP{#=jUDCN|4_|vGvQ+oGj=rBt)u(|TO_%X0nMff|R zX^&jI8*33Dig!GN$Rn3U>3<#{F!FVI;tujpPGdkXhy~L4$??ag^!CvoI52f%--g+| z+t|{`>PUpKl zbHzSzJ$z8YymQ64KyBU%+1R`ujimgedB8~6+cKc<=`x{|y|QJ3XNO|_teAnSW6 z`G=+^r^+4<`>Vv(?`RH>m%Hyx|89R(I49}*&YXx?St)rHYlhi9E_c7S`f7Pj!>_H39-c-)e|cBSEb2{~@@ahx z1RhBY!EJxa4#U9M#@;E-cC{VsL`qoT`}mlhp3x@U(tMRZySl1x*NtthbM^$z$YG~i z+iPuK&TwBabC7ge)H&!Z1$2B&bQWM2UMha%j^ifna!{As&bKzF0xloWmK}udNWV_n z(+n1Zuv1mVeV6GMAIz3&ps!Y*-n=c(m%Ur8Us-j@s_9r(>l|UMJupZoGc^-^n-8 zt$(WNz3{lfLwhX|UyDC}2&5Psn9Vlw%}ogzHcl~V$_0zmj)AL~o)BoI$_Mjhb^7?!oZS(~7v~G7?K`{DyHw!T_pQM4p z#)R!^v|Bl<=s(4)c6^QbER>r{&6nWaR`_8ga{3gvAGogN3Co(3&eYz28=rz1+#upC{$eo>t*c zv3aXg{tz0*D00wz`^s$O=Foemp)!Le^>HI@Dc404vUPJaHWW(_mNZqr=B(P3x(avx#`x3ZmF{Hl}t^nTih#nU`&e*wHSd&0uydAB+~ z^U75_+CF21f6bZM$Wv?_Mkx5Q_^Kvd>EJwE$of4?^zwcWJ)tVRH z{yI?F4l|1FvcW-hP}!JU+=t!`mn) ze?CMGWYUGH`nXwppAQ4RJ>dy=(Zc+dIa|Wn{fqc=YaefS%ClIy1%87uK_yww#mEw8Pu1nkfz{vwP|!zhyS4e0&W^O6|qpxAQI} z?zU?@6rOzbELP!?SZaqJ{ph!Kjlp(qn$R&jk(T$Gi}v$)uTtfk*}TN(vxEF~Z8i@{ zuPTfAk$*F>;D%PAb}p2jIos!nP(Ro+_SXvYr3}&9;%cyQNh?BY*bY|nwc}xD`b5W} z@`GNM#p`&2tsTN_HShckKQH|Tt*_B(Bab>ZG`mLg+2^O?=Em#J)bx7$B;uwwh0Y7Z-kt9Mg#{$}IYr*$UzuS6E1y8U72w8`l4n&4@-nu-E* z99+{C91pYIWiDj{=lmj^To!{18J~+xKRElTnk~*!A?4jgpTk6xx0t50+iTAhQeU5o z-|JSlXdJ87!pWCw#Zaa!@9YiSkKu~Xt!RSP)Tb41srL!9`J;8+CZyYzSdM#kLle=- z!OHC?yj{og7tn5Fv#OV$OYxud^~yPPmZuLQ)1k*#UeV)m{BORC`J0BRUHp#QZ*J_2 z^zE#%!}XdMVk(8h%GbIzK5qo>&2|en1^2B{oC-_Lr2~2>pC24GJ#MQklF`pslO3;8 za6R>QgPr|6dg*dstE;e>U5>4t98W^izgsC{U}U`BXPW5E&PF1_0uv6LK0hJPZ+myB zJG_%T&x`l^Q{f`+?7v&Ct1IMIYZcf-+Sm3(Zb7^m04)#>8zwOkLB?jKak z;oi!i>$DZIm+hw~2eGz^vCNI>s!vYdd`n{D%mZ={O(i3B3JCKpb#G=KE*<_togUWq zn+5otjWPUdjcYv>7SpL(IC6LsmYMb9>rOSd_M9K>yv)K({9CM_@*afKmQGc9*Qfrk z-MbNJs<7Fv^tpJ4jC2nD`I)$1h-(FxcD94!%b-W%@ykMuq2KX!?NC&oywb4LRL^N)8TME$^m$W<8nc-Gg&l z^}JG^OAWtAs=CLSHJsC?pq8;>%&W0g6^j`vga;i6sDY|gmtHeS`ayw_>d^`@xz z-nq}7YTQRsk+vDgWK=FoEB9WT->tvfDm?w>d|Sy&u0vZ@oGfRR?R}j|4|BuN z#8OOcx!0ZKmf~Z8c{N{qs#_(%YCW6yJPuU71JCaA%Z|`F8h(~Xz;OpN2D3(gr$S|t zeeTLmW|g`=Et_t0Xy0}C++{9;-}FLuC!i7=ng@b*qhP%1q*mjMdaZkTSGjy9jSjuj z95M*e^IktI!sOZ3vccU*VYbtN-#_s>-Il+vV@*$wnSWfxti0&0ywIEOjH>)4J@B?U zqdf2?p#BYXGXkFRWhvYOGm-*q2OPla~nZ@RU!&D)ANf|v6**;J1b=>>j2jYQomWbw=cySUMY&!H1~!#~qbej)#<;J?2z@p5uO0th2w zBIYvD5$JWgUhAMx7t5M5;+3o$o}|#+4DMqHJ09pB-K>7ZM6M$J>#{l%tGDCm>03i8aoB%xoLt9fLJtI3Cdkbq5N4ozo25eNU ztQS>LdS6_S*je^QDUVqhWT2xLhAlP%*znqDEtUXF1OhCOK*$|5*&}eR%bL9iK(Ooa zu_0hzE?Q;1B)T%P1=inyfg2>ujj)6_1jM~?xOYNJMLrUp`kZd=-DY}b zEVMd#0uiQKhcnYyvNZ7sEzyN2Q09({i^+?NGbKnBHGA`qjJFRltTm-bon@GNLRFWP z9EEP!c{S&yi&3|;Ru@LHWjaiF$t$*E**FlmN785V)rXfG38bQZ<1ON&cMd2{O1Oi0f;Yp{VuQ&dHiCNT5G7N-?tv#Ph4`@7J7yF;F7~Y(OHW;INY1jK1z|t7N6(TXHNtC_K=#k=p(+T4gxy6a zbf+n-O(Fi%t+F=c66;tqv=gxw=N=gX=T07hqNP9=i3$S=mTwLAP4z)V*izRR>3)0n z-jn1582LPONDU;LrkO7V*o6%PxwV2hrvaQ}z#}w(rfsQAnb?Lb(9~|EorMtnk5U|H z#KAX%&9}60%?#)D8BSoqJP;Ivcn3?l@Z)+~d=Z@=aP|$;M;owkNOvsedS>=q5vej` zd5y`VLJ>y5#JzRqTo>klKd5d;P`J?|b3mi04#se|d?Rtq5OGFg+lB$jaf;R;`$O`H zk+%!$fQnE{cT99LU%UrCaDmImB?0h|+&xh3T@N$?}UIi~1@a)|OzBeZx z&q+GuPGYlZemqUTTk{=becDd9UU9wM=f*CZ!d-p23LIa?=JZ*-K|8WdeY-j`wP5hk zB&R!(csi?nrwN?5+MRFxg{&LBhc}Rman4&kK8|&bcFUhJ8Ov$xFEPKz@HSzWLhx7} zeHsXL)|?zKPY`|8EAH-h8>hQiWr4T7+MgO%GH%-|c0=jIKmjh?+2dp~lsNtXUo-0sv9kx;)1?(!uclHnhMs3E zgFc~_^vZCcrd@hC)ms~7di!lpw%?ZJZEY<&)Bf~^W6Y}EWi0eCQTmJHX1}cD(T05V z=fJA|*rjMNH8h4t#f6$MCB_M23Q8S@nzBWWg-}i+3dc{^uJEB8`rWkDo9Oh?&Mov2 zCiCfFj0+Sy%Lp=mPFO@Thx>xoRBEV^u5oRon#!xS7(+tkmCp1|7iwk4CTQYVTAwy5 zKaIBm)-{eSVW#0kXkr;ty0;Ex2dNze9i>zf^&%xWbk;Qy~2VsEeK;%s5< z^q=(MqGn}{t$~_*pR!O1gyuZn8$*!V3saziE)S}@W zC_ID=F{~*vcvuQhnbP^IM>kGAlBhC~LTfQirwT-o5;^s7Y*-OLy9|}6Ri$M^-H~Y2 zqS2D6bLqIHT>_9QB4=$^OtW4EQm!#oaeSO<)0Mtzw=CH)NtGcj?dq^uXjPQeo$b`U z(Ce-(iu_>HVbXQ-ape+;m9NN{=M?2Sa&C znOlaoMy)!gGR3W+*lKwa4zxyzz9a6jEhRGc@-63MQqrNQ$wDORNaYx-ZmvYT!nQ@t z%5@p~qW@W)l)kIaIwh?})7~nANmKw;Y%q^NvWnD;Q%x~mgnV}6C;gIQGsJU3lSWnb z%J#`3!41x*-7O-aK#}jI!kCD&7~X{l>vm_L-6yyVE_b59qDvn#kRRw#JY*Ey%n42$ zwSvA`4xvR>5CTF7dm5?`4vVVMR;DOAxXD0?08o-rQc;zOQWCH-MwdY8PfTg8NJZ_m z<#4w}C7}%JsnROMGD!HMc-#`Q?WiGH52h8X;icR#UCBkT4U%z{E?nNgP5EEr`OD1y zn*@Nxd6@dTLXCry_h07SyO5odgBn@MjE-WUJO5FxjT2HKA!Ss%Pa(mSNH=cKS)94l zbd3hna(G76X|Cj+@5vQkNOB;<2uEGsE9f-7`|k97ac zgHsUs7xqbkqcng;LKKD*L|#j;6Hacn$^U7UA4V9l`BM<}5DQ=Q?*oQG=x`weY}?*c z_y9@D2*45@!*HzvT5Yrqom7rq; zmDu4V9@3J)Sww(4P3@JaLy|NVLT^@v+dkYbXjFs=NxF>s9^z~}cnohts7L7t578u+ zoof3$4Q^)3E6CmkczYoNw#pOf6&lQ2wQ2f5I%ZIc;UD`xi~4*t*t6HMWDEg50T!-b zcOp|v#-Q(bHP#~C4RsuzRy2EiY0Th!LAxB?;98hU~WHm7KKyby$JrGhrOjTY?+b>_E~pWa~DuPNqL(;Y>?o zwp8~r$MF#NG-fnzbP@hDh3)RM~tIUy%A6iB7=8~li;4-w2`smS@L=B#0P5)@4%B!fZQT=Dc^oQ|oqM=-XK(wTj$_iQ>vExX z7SCHkZvXxs9a{xlboX|6lzi^fm-%%Qe_ycY;k&=2ELico+s5>ISRUs2br|km#*z4a zl=SsItpt44hS=%#zHcq`<^Bw>qyOGL7~d@vFb}N8)GqG6_T~DTwmfC*b`$6*9OHie zHeDy3hLyd<{+x&UehgUYdHC%oPtk?yO2d}=zI4>G^J8%IX?4C-l{{hV{oee|OyB-| z>FD#_@B8(Adov6ib%Y_%R zoA;u#^E=QFAH&~^o9KKtr(8>9B?@G?;Tu~WCYQhG{7 zt&Ww_pXrWb1St2&c+IThk4 zYCEhIXtZkOVWq=>gpLCtU9H|CLR~0LFlpz5DwpP z^!g94FM-YEW5tO)LRFwL6wtv>UBWokVSF(-o7>4GEKSY_nJX z7H2Hn=P63ALYXQ~Ur<*U)#6T|O@XN{DTac*p1|ZxoHPqj)hUEUtqa-omZv&tDlSWg z;tGw>00B?0L7Zid7~W*YSQ9EeZJ_HRbh7ZKQArp5==<4>B%)F)Eku&QZ^d+q$i2zeSSlT#XGY#@J757W z>pyl>8(^i*RJ;ByZ;ee>SenK{wYAFulyf;=XwFq+Mb)aDDRvAU1xg!Mr%)$5q&j7# zFjOIs4Tx0^KqBO6N3aL!bbZdx6DOG)_ zp+r}PU07Y>f*GMwZ<)L;D{EH9gHv+m(i@~@(atYJRdy?Iw_0FuQA0B-)6UR(uC7p6 z!!c?|JtnnV2lAh?VAi5(r%oterbJ~jqb@TpIffaxUdhP~7`3RR=2fEmi^bX=FQ(Yh zxZ=uNG?DB{q9SfZWt1$H=}Q6~;+i5w)o-~dq9UbQtzz1I(0_4|%$5}Bu#}N(aj{;} zSb_OeSUqYw>FKi~&Q;ysQbc-IG~mBRw{#gn}9@6;!j@RjyPaXphV{n@%;Ki?-rRh zXA?lrOE5q;M#~8COd6i)wYR+=KH2AyuOl!`qIRr7953V}As>H%7M^Rnv>6jjK}1#* zeu#=n`4z9VheGntQ}I7EERT$v;K?Qd4M|bzRwOmlS9g($40JacS3DTCG0F#edBe~n z9W_S0F_ubFGv4#D-JeTTqUR_Aq|m@aj_3eLnm!70yzj2K5i4FYIYfg#?%)T*DD#8SCt7ik5*#=h%zQ-Hu0@$p z;whr|2VZ%PHhT#4&rUizIQI;-G_|7?Ix40?n{s@F5hu!vvA)mKa zrz`d0IJ#X0Mu=S;{kBkH8cH2RJ(yjPw8RzM9fJ%_E5L?B$>1S?`3&a3ga~!E&@gH8 z9cg*+m4f>N2gzyDcoJ^hBaxeUB=kN=9>O6|oDGb0Wo9 zwmX+VhW;pBYYmVo=OUqd)S{_?s?c0Vl5GONzmFu>fFt=r3*NytFz~_PZR<$Ohz=X<`#aBRPuik|wh& zq_gGcz9-Me2*SxIOsdKB2gB#5w+Pp=hk+tVX9)P%EN!SG zW|wBmPCt#Mj)JdWH75P3E>$;Fk(&a`m+$jbP1_d z&bbAw+-TR6$EIWx<XrQ3FW< z{D3ewl(GtHtp(Jq|A-{f_OXsOOu=g%wFXVHB&DcKa<^GRVh)#Pq5{7O#%s~ELxtC; z;@cWyh9w9?Wm5>?Zl`>dFVl{1l{2jor-zyM94Ztw$k+C#ZgdvjgqL4eJ75{%54Nm3 z>K_(}pf0XKs4oD1j(xLD@C?EXn>_!~VbXAZu+K3V4QO#_`)OvyyzxjGAY`-Ec;~J> zR1q@S1*>)k(%LmB$j$*>P0b^e5vJt!*70WeE!(hX4Xs%R4rf!0HpTii+?7_SwN)+< zY#;+>?)w#}{CYV4q$intKA^Ja`kvI8vqVpvrfa0jvF}IW-<3ve4ERmK1#A@2%BKS0 zA)1*!;K+K!zt=?KT>0*E7-GVTIqE=gfomiXpqeBWkiqOj_Q6Ef=!12@fvW)u)s<4# zRXw2cj)O|1Z`@j~N~9a5&G*`65HS73{d!PlZLIPCuJP@yn&>P<~ijY=I_rbFr; z$P{w^4<``V8j3f+{KwO>$Kii%2Pb*+c_8m!Mb~H{0^wileHf&D3gyWu8RYn^a{Ki$ z{0wcr4f%k4f!ZCcihCfbMuQvxqhBZujsmF@cZiyJ*JSwH*MV(YMb`k8$ptgw(X~YG z2Bbep*dno!Td_mFH|$e0M4zSo3N*liHDwtz}7BI_N- zL(#p~Fn0f)xr1VIm<+51Ix(`xC?X zi?BX7(Uq(+I~}Wh`~rfmDk9rt6#q^2e$m~62X#`on@n?KJjFTSrBU7(emY-wJO}IB zxU*=Zl4)2&9paq}z*n5;T7zWJy|13DDZ|AQcG5cHFmYfy!?VPfh&6{^VBgVHe=Tqz zb|6q?G~^mj>+bm+EGL6bD-R3_x3|kC zM)nlhr`L^Xet66|;DyT>+#1gds>>0o%lH@$V>GuRhTA{Csoxv)v7xBFOw~I1n-}{M z)F!w7QQP*C_GPWmy~E!{%O=O?W%s%jFTj@;M68i4H-o4@ERHV`AAdVSXy;S)*2{4h z|McW`{bP{*_M8Wc!pa zB$soS=QHe$e^=+{MaS2Xzv-W_hfkY~zoAw4JEep3Hjtu3p80Xh&(U@8HNB~JxvR_Z z(er^^J9gXtH9Q8-Yg(-9E$>s$m&M)j*aXYV?T0CeP26W?<=51Ec!hl+|2R;4>%8Lp zwYH5(HeYm`y?Z|gM&GaX3)qB4H?Dsv5J$vna@?7`*vwL{ol?%zM z@;z9_AJy|3qqbh5rW-})?f6=n`Hf$k+gZ`zeA@v%i|_T(5&KnkF#!GW=W2V^xAFch zWasvyrD(R-Tr|>RD=Y3whg1L66ve!cm!G?LsY%P)7s5Pl z`>;kJi7oEbVQ_R$W83D@-PN|0(fvit`rUJr?qn4Rc7r+8tYtQ~VEF{yjnmxduGWqu z^~ER1D0g+;@yqwknwwRC+Gd9r;5TCI=hRW%s#KD64zWld7YfJW_BvtZ`#R5W#k1qN zzo#R~KJuVXpa1~Y|Kw5P|1}+PvSs`q?wYEdHHrvI-u8Oew(hO| z?MuA?pjs`EqF3808X}{^hUSbVC54D^J)T#a({-y?my287AUJR+LAe6s&(QFnKlAwt zh#}oK}f(*hA6B^(kz6GqD71!C+U{__7k@+(_VHv|B?||TicOJtp^B7RX4lU z$=bOpA`+fZ1kLLAX5$YLX8XsHMm5|v+>5ELcsh20bTY zld#NMPr^wyRI7q(kQg)#21PPIificXu+Y$LDT@eSgw4$B>`co+9Hk?|ZL zx1u8GeB1y|fQ*8uN{=prd8Jh#WfFh}8X_Q4L*Xb(aEfzY;sfLHKpMg%F?&oBMkdI>wdCo?8-1a&1t}&(N}%Kj%&ptPmm!k`bFua%7;Am2V8Fy z5Nq&jmJKXXQ>_OnabZkgH{=)VV?=^kGKJP}iU_C9v_HS#^S0nnXxSlaOTo;|t-~1Ss zcedUBB)Q>mG+xe|*{xw%>%I*x>)~g>3y}-Pk}4ymbqs&$&C(lsKC$6ka>-SuT(FUMM2qZ^Ej9s5JhsLD0&$k(#egH-ec$?-}q>qu_^ zYSpOxP(AL&^*ZVLZ%`GO+GD=6RBuxkooa5rFF^B{d@*wuOO_cQWpkA?iDsbDcMK+L z*5BlHgVt425Z!6|MaE{9Ha7p54yflEZOoe@ZGjxgTHmx2puX&`alKF)?Xn`B5&~nh zU%mfM8uokbQ~3Y9G@O6GX8y0FVQAoF^501#1jE7_$INAP*WImqy$;uFr6H&FoTZ6V zbL?g9Uz~IFI8)1>mLszwu^9D?*EF-?Wn#)67OcGJh@n|oF7z#o2&h638!2uB7yu+F z4>!Lc2@zlJj}RdQ8W<=Lj;64a^}S^)OhvSl>(=+`)8!-<3kxmeMuC-=lUJiQ z6(|5Bh=8U%He;IY%yzhOuHB+dooXG%5hJPPvTVj(B5k7Kc&%8lZIqOD?de&P>9YCg z+S;%+&!bgIhwT5NIA7B>+#=WZ{3_*AW-#PiyWc*->?$^0vC{nwhvGn{WVx1=S%&>= zI01_ind03EFu9%OSa!8#*_rHcdG5%1Tk%)5inOCMvzFbG)GO(lL!~UD&FjsyO+9oZ z^!vO?wp6t#=S8UWXu~&Ilgjiwbl9-K{Pg?A64Cu3>jD#;l^`klmnx(2dKMk`E18ih zT%3B@uw<%XYMD7V)v+b9*5smWL&v_d_UCrAK5r9gtA1kUNVWyFs#j>4((FR9L9x8m ztWl**mYqYE{VJ3em1!hVxnXwMM0WOb%W`NUpz2ujyBmeu@ink6P)M@3Tc`2zMaHg6 z$3@j9jE%LpXS|*Lad|yjiBdInyB3xt$AL4d&N{8B6`96t#_YP;Xtke0uOTxu{T<3I z$8h_FHNrPO#1=%0^$og2y2n<{{L!84(y(87+-=5HXKsmxHoQFV)+P{#rC%FnU>f5y z^_dpA2erB-(IH;6;W-~TRbp%ZE>M)5EMXg;8A>DwZOf@KdbpY*AkaSH` za4Ay~1N*R}L24ue_2@X!;-ha2rIcj%p|8*ZuG@Z}AW?wIRp!hO*hf-+5~7cehEY_# zwxI{=Q<{G;b^rng`FjgiR631mL;Vbz(pggA%JF{j#6@wAjUMvqMki1f?7fgY0|8P~ zWWiqxOG6UDbjeN-m2QYnUWt3fc+w0~Mr0v0Q;?$8IK-$BXEd-h83>dnqRUVsMoUWR z4CbXj`Rs(O#}t+Bphr)nBWW?GH5Qlec&^uO0kq19bzn*97)ZcuD78iQ{O+JTqXaKj z+`oi~OGpYD7~X_o(Y3`?y)!^9TFR>XmKVwr$(CZQHhO+jhOOT{SN{W-;@3cSNpo zk*nOCd%kln`05ODbJ#S}POzMdYK!7`^Vp&WRMR!Es=|$iJZ*#{-Q6t!st>}iP^Lgz zJZ7gza+|yFHc%Fq!Q(1qXADRNgOoT3aF7djD9n@0;?_9IN;n#6AQoxlt{jU?@P)AA zIid)1zOf1f|DZpmk4NY(;ba?TKd(&rDllTPme~1qVRy>}ISt4rJkILC z!Op%tyI@-tfM-%SPdYPaiEJIGayY+qvfIB)P+_+dF&Y@l zvIVN^8W6CvntuE)B;G4{=((-JE~wZu1prMY6&ba(db;K^N_BY!eHmdOZ>5{+tN^y6VIIOY|#U=P9I-AViGySG`TY+dpVfs$3;FWeOE&+k`*N|D!c%) zWIjJbFh>mkC+keW=<3(z<(dqe2f{Mh5n86JfR0@6)O&%OvSn$RZ8`RzoPVtRjC@un zTvL)3&Vz6m&w@OlDBTX>@@Hg?K(WhXNGY*_|3tqaS7>cPolQl$Oeo+om zG1+m5iXbtu{!}~QSbj!nw`nFuK~f?2K?oe&ZfgAQ(fTpi)Vx=tG=-Wz+$FonhQgv`HeNPfFB*tG5LA*gPXtNRKb_t-No%Yd=_Oi6t$OmcspFQXXY~YGdd&$-d zUeUNn#%-AZ2ZGr~R%G&2a*z`osyy(I2-}K5Kq~%7CVyw=j)8shO5mfS0 zj(@iK*2^3uNUznc3%wDJ56aVT4X}8O=Z6Zt&53tz4Itxb2((0(k@N|s%eawXVmxrL z1mVOx{0FEHg9r&G!}fG;A1(9Q%IT|Fl?Efnwpnd5?BH5J!^dH$<3Lcob*PV`1Xqt~ z2}l|A;=_xi$E5J;V1i7A-!bN21%#E7p3-~8%Emm?1<+YM1(VSSNRUl4P*+&idEp+f z+9B(5Nii%8Ff8ovMfN)l(MX)}w}pyJ$74f7ajJgdH&;JOW%2K}T*APu>ba z-IX|G0xBeViD2rQ25^L1n+BfdbIVvIni2+Uz!)G2rUxQOt|L&``Sa6Ld&_1EV-j9z zxr_AkH{2VLdXTi^%-nUB0s!lG|f~+C%q#cob`%MMg&9$fybfqMfU2z#r7@J`<7 z{W;m-C$COi3%L+w#Jm&t7XCamPdzkGTw3SRQ(_qlXgz3S3uA9CZ=tvUZas#N&ggRo zCGfg1_T-#!e(?a#4;)0lvpljn+qEx7-X+Abju~3u!((^R-^I&PS!lUYVBP~uG zRoi{@?!-59YUt@nQL1<`vHezU6SQ zJ&5n7Uy-EtJh&e!=vd);AFj?<)Tqv6{XYK;(5`sjS7hw@Jw9x}!^ffG<-9*muKOI! zj`e+{{n6|B#h&{%ys5+Ad(z%7>GL&rbM^i>XwsX|{QZ#rc^Ge<`@TvW6 z_7|dQY&O@wU)2dCN;kJ?^$qWo!3hA)vD;avTI(ol-|WDvgMREDrk|QGils4bNeRG+ zdrS<_v^|>`KnX{T=O}yZkp19cls)!Alp~g>A?s*eh>IikyDjL*2Vkp%M?H*lOb9^B z!NE+i0;V>xevB`{&#_Hrn*`koDy`M ziQ5Lzi}QP7Zh(%+FrpUJck)^C^5 zw2KL6lj-U3Gr>o<1{DME^LA+PFsRlf{7IF^7la=}j^poc_G8YD0_?A04s))Ol#Obw zv7fL1D0mXg;mExHSMc-&3;=-pKMm!KtW6AT|1a#Wq9kFr#E*hUr_tFZ0aXc-xB}In zJahQ3n1n+IHIr5noi+vM58j_8c2|~GDQFyZ*=8JhjueqVXhrz!H{fzbcyS0q%3)B# z*#qH4{QhVwVXKQrug9KS8seF(?5Q$<{aDdIj7*qgm4E=d2+nT$>dFm@jQN&=ICs`r zuWHW5{9QY7TBWg*+L)Yxz#Fl+i!?ARiXmd9w)d#!)eYlNNR6eOHY`$3u;uge2!=u| zQ#vFH90G2zo-{~{`8SzC69NbA(AVYdaEw;~wG`@qfMYeVNKWAb0Al+X1+WGD5!c1@ z#=~<0|2ZR#D6pgf)AuF>j|=tW5YmY;+$fkH1T)t##YH|3DB+VzKx|Zz)7;n_;L-xI z_DzC`8pMABI_R7fB*PgWlVg}cPM@pk=K$;X_eD0@wX}*@#SlpLFJM5j{ooMmL27=y zmv0HqN%Upks{PVzC*`c&S;D?OuX|aqKKtu%Umo4}Jky zOi>vFt3 zdAYbyTP2?LYu`U{uY^^zfNPY>MAUEpsgo3v{Oie?BKw0Jk~W5x ze06oKjoO@?KrDIh{N-nF>#z&|@V}*Xf%>E1NjZycp)>1?Dn_I)5i?#bmJY)SH?@Z&++A5LI zy(*4K^6h4K*nT2*NGhc&$vIl5tH9ox^O=)fvfb%)5f{o_FJ_Pt6jG)VHy1ZA{EJUN z3T8wAqWDUP05m`rittMygG^7tf9RH1hifMtE?IV)?KJnj!O^$anA`wc>tz+!AE_WwhcaCv9grL?MbmO;Nvc=x zV@>WwamJ{|lR>&v`Bqhu-H=U($q`8Cx4}3S(yi0*O>Ix3RF0)PL}@HZg*VhFEoEqn zwjYf?ZC$$0ZweKn`cpmA7-6H_%~7PU91WmME>fgyKq)ulGoanb#!&4%B7G)>YBW+W z&d_u-d(wm&r^7%q6f^BO(4Es7ZN1X?XccP4p;Qm+Hw0H`+@)Dwoc0JRWcZuHC^BSM ztrne_!zNXmi--S+_8u6`4#!otmARMB}@*PukN1~EKi=!2uw$YdqDNnXQ2 zdU&D1O1bd}I8|@eifbI%lSx1$fkq~rMJ)QmP{$aQ{> zCkAf{Em}JKio$#%rCVei$P*{ju1&|GG+6>$f1Q(;o+X>}B>V4o%KYhZU7bv$vPo57 zPVk+P3_^%&YT%txXQJLRuTW20!kK6>mRdIfrw)&mpbL2MwitP#&I-{c+^5cBokHyj ziCKM_dttz(mVenMPta%?j5Mv)q_1z8Iwe!JO~Sp>Y5WBDOsguIX|@{vV&R@J$KY?KAt0n%BKV8av13BIFe0FR4$VDYG|CLx{}D5l!T zfg#1l3iGO*Jo0pZ_|-FN^oU6& zN;T1uML?I3CVDTHCQ*@zSTlq+b`D6kswzKVUnq1)%nVN&dCvtUMOsFG_GsfpTrAr0 zOs~;$PK7XvKyMItORN{+k1Y(t6?DGGRQq&ez8Ae>#q7Bz09MF3@4$`umpKy5SYW<4 z;i*W()IisBYXLWw@t&&k5%R|O%p&u}C}PA6v7Rn*WJXd}>O@-9iR2Y{s^6+vL^Z=6 z^b8lzER9Um22a`aUxt(Oil+2e#34s16ZI-ba(xJ=WGdHF#Bvf$M_v@RaEi7a$flW@ zvKx5Ngo<9H?0?ALDWjdF@Q{pQ1Vd6lTJ1vh0q95$Xe~`ze zJR&zTi*^NUsKRW3ej2X;W8-~3!RWyjuvGd~OnIiWNJCZV#729Eu;5Dd3=UUR3##+# zUz4pyUj=p(QXZ*V7%E(bbwl5NZLMS^*7NNN+Z9X1G9R+ws0u@I-oq^R&dXg!argPF zbyYukYvl}&N$e~K4scQ1Pur=L>^e~y579B`si@YUgSRf@y%{zxo=A#Wb!D0y3=y_H z#Mq4w`4s4%i?I{6`LYT*FVMe>1(-MS7jpLRu4qUP|6gEhz33OrJbBO`)tt z2hVMDS~Qu5ZBbKVEu4cLSP$)4L1tef77>o7XV|4ETn2%<_?0laTROG`%ijTT(Mo*u zWs^fU9QYYAeThUdA{s<_ip0qSgGUXW`H(jEvgd!ukTyju{EQX>rLlxrz_Cfv89X6K za4M7R%LN2OlkCldKya7B;wRmsM;lDLdqbku2j70z2cvK(cg2l?#6RP|jiY4&Eh=-r zi5l~OasqX!`Z=H=g-J5w1PfhJK$!gw>D4nV_sKA68M$$a#27BkokGR}8qvn*R(Ux}&1lS zb1l>$~#pEiEqIgyc{f6gyXVZo+ zPT+Y`;$R2`HHMB9Pc>5h0s4gG;KxTO0fhmmo{34Q@($hsKDT+M`E#iJP4kxwDri0_ zs@)>~6ZnImsN4o&<-Peqm@Q!=ONVo23tq(+P8*m19J8{L(kl(o?HZ1f3L724mY6y> z)oy>LkOUa~-cmt(G8s!2431QSsJi}gwtVL!kDS%{qcvp`pSK^7jEiBtG+f)&+mn-+U#3Hoo&o+*AsY^ zrVsWq=ALc$8(_8ThxZ%URlycf_yXs04?Llt1W9}?meeuch&iffob^^D_4?l#IXrTf zNJjHA54{elTFuAQSVKC%YVv%*`s4*XPOP?UcP_k2iR#L9kqe?U(@cGKfrBJLuvylH zqXL=|jOWCVnSCa)K=Nv$CAP(yOF_&87cpSIc(>Q*`|t)k319TkSP@`ICDirS3s>}w zr%f8Y1C{0rEL1ZCvB)Av)yE6bsLmiwAJfldX@YrVpN?W#3giG@THV6jbXBONhfl0+ zq&`qC!7XeCGlRm^ja*1o?39$i9h@41Uu6bf?A>3^h0xcjEm!7Ye$3>M=Jv2VdVNWdCYQSfw*=aohY zcX5ezCo%Vw#*+$u$`F?G*^S;>w?#P?F4!An;4PDv`#q_*RN1ISY7)XFIt#@J-#aR?2O{37g>=I7GgLo5QN}3msGKnEly^$H+?y zgmL<2H^$d{q+iSMLt-5MK8hEJ^*}&6U@(dYtZj0L8Kc+3e{C|eL-y0*4t3~Gy0nJ> z>oeeutGx#W>5y~n#x-U<$9iM-P3V;KECam;oXr<{Yje0b!;rqmqC(TadwuL08K-mD z5NO8x)MfMz71a?FzyAR}QU>4rBMvS5?5PyTLl)cj?tC*=_Z!*0ee$DTnd{+T`O#6w z=W;VS($uD;1Kii~xJwhs{pvgNT$T|ehm)ff!)=zu=l5)P=KA_k_a`!UTJI*?_s6x= z=}mU)P49&7ly-aNW!cKkPU`04@Jj9WU&P?q=dPvzzni#s@i|H_&WFkVW8hEUS5q68 zZf5r9;@I!HQ`GLqi5HJqEw%4$i(1~>%kjIL-uI8)@AJ#r>MwWWXUKMz8|$yv?$?AA zg{yvShDPcy?CczO-7}?e>}49C%BO?wj|gkdCD}3gT2wr-wqA(zV+U0{$QZLbJOj^+ zSNMe)A-MY&W-py|9)6uNb?fLe&||jS2}q-9S~_8>@j=+!&m+9wtKVyio8y?>&6kD2 z0T8q~!|GJ2ii8?%8tk8Vj`ukw%!5Ht1H;Z{m{&aTr5RBl(~d+oDOM+OjsVU+wvubh zhV%wnuB91E(zG^J(6)wTI)=D(Jyh>^#a8(lOb&yivTDjO7=R!i#`}f&juAJd&KubUC>>>6u!npA>JAm?zk08<^Q#>3f0K?SB1jisdn1 zX@SkEK(l(gc02#~?G*0?_FJ7nmXFV9wxtA%IbkNmGLWrmO${TJ`44n!=_@_fYHh#O zY0U3d1@#*@->AQBHP(KQa^60nD((HOmJ_x0G2j0IzJw(M+oJyi@U!Uv0I>c?jcw;> z@?XlqTiwbTdnIS>Ew=hzeGStiIUNBaDSA;6C&8f@feOC8;Ut_5EDFtBAeF^POE`5{ ze6}cgH6Gw}r4+$j$^ue~+-AddDb-c0b4RhYp5`iOXR+z*>d)*(lh>^8(a?jz+=f$U z-)~L=&7b7#Blnx0-%r1vpSa5)wjD;Kh%0s$R)_si;OkLAlEpRr2w~kNAG^}4G)mY# zrL!JQC!G%8>SNXKMZqDRYLA+7?pr#xtQO0X?ON8q4IFhk4}ZhFu)+)p6=24kE9x%T z_}47kKEluKJ6g|2yz4~GdUExs*3LXb!#;r%qOL7#NHbJo+Z8;hSfN=aQ#^pZDRrwr zThP!ZUD{A@%{B6BW@{dM&}>`aR7&Pk4{&V)AN9KEh8FI3runzDE<}Cmy7S>@LH&>ohX0lrNnYHXLhC zBrZuBJd3Z^K7lqhjbabNrnpL4;mh1@m1{q^zy$Wzty@l4Dw%pzI+QIitw3$XaV)4; z^&_+@SGQm}W5Kze!De-8oVeZbcET|u{S!+v9H)}f2nSy&qkOA8*DrG@x=j3N|GxVU z$+_k^Z*$MLH0^^iRI^Z?Xx;|4(I35I+gr?5dHHr+3)oXcSFdhpp}z&Rn%KKg2rs5H zYQpv{pM>|0bY9M=UgmCXX=dlt;K}%cEQpUMBamgl`BN+mv^8(^tveHPUVF}{E*d&46^|4xS1qYv%A3vcOWPZ?N&MLNw)a}IAq+SdtahcBARsPueqQY`n)m% z0BGrA0aBnfqXpY-Ec&{FT{6jC8B(@bAe%rGsLBy3Hz(8YV3QoVQYBPQYcJs<;E6vF zFgBj-a&`rL2mzOqOrCE28~c4Z_^^OjyBj0C7OQM!TXNNfuYml1+d&=_#?yE5{2BE;tD>P22U>d-ls(C9AfUGs zqp22MrYTMTH!7r~FPD9F2M7f>07LYnp8m!>gp?G1B4-qh5u`s|`-hQG<(<$zL zE>ZSR5w#5W)UXP2u1S$r-(()7z#)1_j5@m1)cKNK2&?APJ{Y5WlySBg5;7vwI~8Ch z*ZPu?Ze4b^kzscGEAY#y% z&H#z5612yZr7RG1%fl$%EtX@pf*g7th7Okv;>P`Pjxb;|oGAS;ijr*d$SWkrf zj>nWBeJnxEL6|1ljSS1 z!Vd96Rq6a+(c5|VjeXFy$u)dv3JI z?pWOt%a-zNqF)rX0^tzPDz40#TBne{h65s?jJo+#Wxro&O{&+cROVGP9G2 z)s*xx@`$|CPy|l1I-!<0tZY_fDv8_Bg-9vq7f>*W9)l25=)31BFk}QZE+Uliw&pyq zKfwV!v)5fyBt$P>JbC6|DOFoI${@S!_6ACXgz7kzOs|jT!fKKQ_INX4dh3uvGxQ-M zIH&g2os97mnZ=aox?@414NB(_81ek8U_l&+Ue@`8NP9PO8eJ?`EPc|kN|t>fb)<&o z;DJ`en(BlVle>*V^UNI=HIBBEtAxF4#4v&*@ZYjk1a-iG5aWJ_<-@$-Wtt#3lnqPv zQGqGy^DZa|@+Fv=;SAUrSFOm@$SD$`;t<}>!*(0{?N-|{n}6e)>L`cmPe{KAlLG zC7bteiuBRDPsWE6`gWZa<>qS+UhWofzSVW*Vir~toVf>QtMszC%fhw^r^wV;I>H`p z1GEtbB@`So%F1-f$ou4R{q=u2SvPox@F1k*D*7;`R_oIDx~Y@+bD}jjTv_Al6$FttR?g z803xj7o^27q_R?iB~lLM&H}Nwp+~NDqGV%k8|(Eo3|xtrO)>Wdyn}(8F4~u9S1RNye zwu)XTmgoqH+Ocs&Bt=T9U)D-!9m+&e=h*_zvXHKU)PU&UmP*PGhTY_hwe?%&(#!Iy zosfILTQAN{QOPtMLi~vX2WO6R;x`&(zy_Ghb<2<`+*~v~QoJF)wxIydmf0UdtOvB% zWh)ZCI&4Km7$w47)rMa3Bl1DA62ho94nXz+>1ed*$YrT9N&Io0!>E?6EOH1tXbC1VNeNWNG*^l7MnT>%P60Pv1taaMH>R?&pBl}k-i$`d!_hV zxh(*w#O_2)^tOa1de9sC@ZxvQ?s{r^XnN5t4)4A{Ao>&+UZ6AD)JC&pCIww-Qy0F1 zF9s8#ad|~0Sjm#EC`m9o$5leZrQ3O|3&UsC)dP3g7!%ym%3v#e@`DM0AC6DF2Sw8R za*x3E-vKX-u}z7=Fe$@C+{Z#p@{)kTacrR6xCLTBvX~A`BUvGLlD=pGJ^(b>MG^4B zGS~`2Prw0?nK;bG1@@b96v`1SV_PA|kO{gl``{IsK$p2fc*`Hw;v?3^IhFh~)TlHA zYOyv&$WzqZs5?WCb)ZGtq$xLDioQhA@^ZQR$5k{E7)IJ9X@+GLD0tj> z)c5ku*Xo#8lE_+?33mNm6-Tkfr;;Mp2m|c$ZLSWA!!_f{CoMwvK=5~)6DdU^;H9bf zhfEz+?zwr{Oy65D2thWYN&z}sNJFT*BDOgsPIq7)ZA0>GF;7HX{Wmgj`Y=JSOt&Jr zfLNjnLQBdqO%ne}5l3&RkL}Hz&bC4gj94Kd!AkupIJOX-W|&MeliM!Pkk0fIHYNpj z8T#%N5~W-K^DYKrj?7*ddq{1H!Z zN7Fx>ykj}M3)}JZaF7lKAy%Lhmn7T#Jm7B}RJ#B=P1Skbz3D5W&!r zPTvhW+Z%+aGjT3Muc4vV#LiC>ZeTkMLF_@kion`nOA+|0aIkZjIqZV$?7aEFU+*%=;}Xl(5NiXrpK7>hadKY>y=u!mmq*81_{F%X57wVVje&ItuogmQM8nE|57-Tom@+rETl&t>`gv-ZOnhA7WnB|@AON~({}*8 zhp|J_5X59@bB^T$$-@8J`cA0S2mdQ1akqoio!dNx`$K>l4Q^840}%4kUdY2A!Wt!- zSxf=Ecq`_KSBCnc$|`I8j#Lqp7`X8uKq&;Du!kRNE5)vdcM{k1R?OZXpJ^Hr=`8}} zuw9BZ9_XgrYjN^sH2)lNW9%N7cmSCJ6ZiI|AmT+*-Q~fd4JSJY9w&KLVBAE$2o-YF zU_gIg;gsWa=bp+pJ=pjc{Z4#2m%yeEMr0zsE}v8B46uEg0%`yS)oar09Rfm%F8BMP zqK1R&njrueSp5rM-(eOCZ=Zi!;*KYFPjBWviT=v?mEUV|*X%J#Xz)Nz^UFfS3(2lb z)(oSm#uy+8F^o@0!9!H^ghKcvxsb<1T5KG&LNF0V0ortVUk?;Q7SXptg?ol(3ewDE za8S53EYT&tv@%COi$QiM!d1Ypyr1j}?=wGz;v62G-z=;yi`sLX={T!yAXKdWtZ>-hkVX<;5OxJsYIarn4X(HC=l z0H8h;OP9#HxrDX!28j(f1u})yx_iL2(N_qe!Mz>F*H+|9jf!yJ{5$rz?@xE(KggGU zv|8Fez2GO`d`<~r2tpB=2;7bx;^JqA7rw0qSN&t``0^=$01$*gHip{ zNV^fKap*VmnpJuD(zqvVskH6MdrNABJUvtp$=5 zri!kydBmy%pP`&s4EUNRvUQX}zv!%V^ak&?-qy@PYzxABItK(s1=_#hhI~+quR)=& zS+jZ1_D{D|5rdp_vwfkCasnNBQgT(tcuexN0*g4^CQg29or&S~AgsNGNI(kYzf z)Z)*2{s$EPCoCL^Rf6c}IdW6G9MgUi&VVCyFwHUju{b*-&}lQ5X*r-RlYM|iX8q@C z_YfJ+R?EqLQRJI@hM!jJKP>lYO1M`$z|Mf0egM{bWAAKtpKMM*(!2eCHL8DN!TbYT zCt&cvy_~*iKz?aXoy$sx_6q<0q*mYbJ-=r~zGw-tlZ(TgDhzar%cAeUn#K==<2RnV zt-D9jf4#^1`$I6k!x!4Hezz znS-N!F@d6FrMjjR_C(E~@1OVuJW@_+Uqzc3 z5-C*}N#23V(g)tECe`vX@*FuO(y9WrjaTdXQNzpH0Z;&es$5)0VcX_5PdnBr+o_jn zZ@XjabE7mCL(LRc^KTrlC364smLs0Nu(6ZQtF2$$^q z4eOLRwC;j{g2g+f5!f%}w30-Xv;tEup>BC$8}(JGmfU|<(&NXH$1mf!4s&vTTZOz4 zmt+;|&58y|V1CZgrRCw=%JYXZ;s+VX&A;;!tT*-^ZRc+?7HlxNvv&Ki`8E(q45u0a zR_#>N^2Ix!M5^bwBFKpy##<19GXKx+}duSwYXh*aR5U3iv{#~D%B8J-ukqhj$zUJB)zzi(oKIIHqv75T_S zC>CJgbnYTfa!=mK7Q;&9_qfc583BH?cZDEIRR*LfoRUWc5sDTV=QeX~$3suNNo}tk zxqgz0J^sq11V1GKzgnzZ^w>@=fGEuL{*EknsLXdPGd+%TlTzeASgEpFj4=v!)_#}o z=)ei$?2{tAG%je*vqf2*o?+=S3_I z21XtThaz*(2s7O!D!d?)P)54+tn_ca8axzYhTOQVL6JRf@^rf+ zuKuBz5~QB^J4HT>gLG8jP!SeON2JZ6cv4s|pl&LP!wiSUZv1au(vba`coON_scVIN z7qg}I#&WGqF%D)Xm0}_tlTxyF34da`5W}dn7&oYz^@nsjlWD#XKrR5CKR7?6c}zBV zcThJhfBv?b7C%>W72q`>^GA0SFD_tC{3ihC)K?e5E#)KOi!A_H2iiM00N*jmZ`k=Q z{CS8AFhBq!Y(b-@XLC}+lYs2iH~k+>DU1(z02pC>LwGSS5`B6{#@d3$FXDLQU-pb2 z+4QhCY?GgR^FJJXssHSxqojB_;PiMc00{uq)v#0!$<&iIqBg)Qj7t#X$p^Cn_z6Dj z>*72oW0$6cB<@F(251wXPvOxCxhYD$@scz)o62@Qcj8oLchHBv-QcJ6dnIsFV zhNR~vv}Or!Rsf z*YG#78A7j_7PyIR8__+d@rh?_{t$;;cQ-=LDN#c;rJHY!zpYEU=|L~l@;ITN0&TRN zq_L{TJ=CF}yiZ=?ofNAmva3h7PhCNo~&x|qy8IVQZ zUz}8i-jAF{``wP0+u3@zc)0d0cIUx$x6|qK-d?`CYcg$Es-Jo8Wo3_jRasF- z=;&;u&AYj}j7$n0wG`amGQuYM8(!}d&mla}LrBPh;Jl1(e^a;H9Ro?J-EPjfs$MIr zhn&RzIMXz-J^kKo7yE*_Y`iRe?ejF@!%*~XpLnbj|K9oGpEN0ndEsf7@H&R`qWhrA zp1p;_FS1g%KR)yWk2Nw4h`hdk0Xk7W*n(;6ZSvfWZro?}I$D~A@Q`EWY+m+PTS)M1 z(sk9__?0#$&-JQ%b?l#)!TUU}KJ!%J7;riG7`@o_^{MfdP=0>JCz-^IX{y1KcRKnO z&xuQoRO+EOVXJQAx9T&t(3(8SDRE0#AJFk~@Ri;uz7Tm??%w=FZ<>kU`q5ZFR!{zQ z^S+9~C|aSjBlN1XI5_`W|7Z!x>dI>U;c27aTko*da46VIJ=3|Y?R~L9i8*PQor}%W z+SBBjaF}e9xlV}SbLG93p6YQ|Y$6*m(%$lc|JIyAehJ2}?5;nTmW;R+CYyC#YO5@^ zU9U~}l@qV?#d1_{^jr8L^Ukk6MIMv2rTaB{r(-enS)Je46$sH4)=BZk1Y9|*#p7*dY(RWR~X?fvx(2f2ou6R54xP|d3shYUoieKn{6c^aex4wfP z#i3Qi;b?U>Ei@C6`P$STJ*1F6|;OuBHpn)7`fBB>%;dBavx7?gk(SIbKKnfjAP>Lp<{Fb zbHr?>73G|N!^{Q5jB6~n!~AoEyFiR4JbM0l!Zij?DVd*1i%~| zTS^sddo?J>$KNo=$6c%;j(HKL$HHgM{6L#!fMoeO-S~0Y-dbY?z7OBHQQk0tZ!o~o zqF&7UD2@;C^f>&hntuE+`~n@nq(t+wv+n}te}4bq8&JQ8T|{Mj7YWtFA9EoFXJ^mw zG}v7aGBHW`c~VOd7s&U;m`Zh z`lY^6T`@7$xMn3`urrNl;lOnnRWecW>>Q7i!RfwKOE;|~NH(NisA4zFuPRF#w2+pX zWYCyas-(_XOEhj$n{vp7{)%EzPlh)%41vqV#Ni^QNVY&OfL^-z2nv>gVS9qVuC}l{ z91+DR8HtI;NtS5N)SE^U2J`^$b>pDohtX{?419#6r2W-E3qv+8Q_TJ7?+q9j9yQfm zqM6X8?~BArN~;K}$e~z9Im8NF$~_TpE>;W>Fv>x|NH%sz4??;&m5N_VOhq}amuBM# z%#Iai%gQu&QwO4nZAPK=?o9eZR`><@|09v@E*)or2mk;C|GdP1J1hU&PS$^ENLLfb z|EFX$Mg!Uzc`YON{+uC|h5lhYoh`_OUVjQpYH*#qj@0_#&N1EtjCyKOJT-w-B+-U| zAA(tn)dp)eajcY-#wIg9sW`Y~r#OCx$GUC9hAoRGtNFq;YpLF*z((9tys5YV_Rr_e z0t@+^Ql68`?bmDH@9*~0nDN5ng*ll8F81=HXQezxV#H;HvS?fu z^oV&cxgtfzdA%*xWGQ2*wd~w_()*P-ZQ^Q?1CO2xka2$-kQ@msT3DiRYh!c5EEyN zgk>CF8;md@tp*Q<$*{Nr9SU(og0zZ+U@#UL^&@qYLaM+JAhP77(Z&G|H)`GV$+I&W zwu2X07N9&v$|OluzFD>i&V12Og0p2m5;o4n6}9+6fFJ{Y%z#@z3q__ZDH9^dB0}|; z0PX~ehuexeuSzk(;RkI~L+vynzkluwxrtY}RrKO?gf(YwYjLCUN^qjnC{I6KeTx3l z`DT&$q0Y5;Gm%Ls>*+`Gx+A;{O@3o-otZ} zY8toS((UUrmEMl3Kz@`(o?qF2wmD@_=G2sb1&-*<6Oju^&1mj*XQUYYLW0?|QBArk z5wl=K$O_qO8du3VK(Y)^#u+Oz(Q#E}M>5f?wo0%J7(%fS*hvY-Yeg$WImoLt%P5R- z+9r8OiYZlFev>pJ_KC?t+Ckh0Do?Ao0X0&%LZHwgEub1?kgGQl4^ePrlc=u=fpSE@ zk+czrXbrO};-U5oQixkM0#k6%JCtzfC?#ScBroDAPr@b2RSlAoHjCLSBto|;hGb|D zm8UmFQqZmT2^P9 zigm>&>qjef^IZ`-ns}FsoS`BC=#r-u#mn{TE7ihnas>_E4Dib&n9Na32?iHY>X=i| z%lMOnDF5z{R1CA`-E;oM{fWDidz4)AaKMV@mS*2)m|9eP)9370L+;OWfS%5TY(g9F zX159?L)Ie3St~{;*sY9k>{1RZU*3t1BwL}7LaC@t#YtA)lD1%_tJHT4m?#0$&ZgT> z3?cv#fT#>0K!*c4Q7=+Hs{%JiVokt+RpfE#G}lMm^*pE_%?MvsbPV8MQplFc&=IFu zAbiC>mMtT2UJc3xk(aNH-ruo1o7(y^E*_D|)`U#56>pYNEh4olF6 zNs(!$okBwrk{%QBY9jQ;C4C_@Yafk-B$ir|qcO7$Y+OJ+L!6xKL0xK2)mVwVhs9!# z#AzyyXvPNzHAgBJmEt@T$kOti4-R-Daf%p4MOqOTI6+a)EBGC~bMJIgObI*1wF-O~ ztM5wE=z;X(0&JhAZR(V8+O=6}R_qioCSuvZ7C@UL9VWY4FaS{ZMszO+94yLa{v&2v z_DKq;l&_VG7yyum>ecy~j>+SIsSCd-xYr%M!{bScbtPWr-zvmoq4mp4QwcNK++m#1 zl$=ugvs1NQ;nyc{Oio~DeM~j7$)w-+fsqS*pe6*dQzXRk z0MM|apdI;GM9yI%npI_hPAUYZBh3)=dXk)@wOL0H6>=)d+z!NCa z4YkCFfP#p`l5f}NPZ;`~{w~CFh8)Ftp=ey)E9o@?AoqnJG85MWn-5_ zBp9|I_{8gc&fBfjPOw87229I>W^Z9W=_9bk>K%&v7M=q}yy=8*Gq+9Hd|s4~=dyku z=oAhX{bqg{zym9@2p3S+R;I-NzO*ZY4GY62*jCgW)Mdl~DhbT1h3^vuv)IH*2iI>4eZ!+0{a)frNeV}Sm|oE) zAz^9?WJ(sD1~4=&N`WZKKpNQU%eBcph|LUGr5nqlAP5dC(T7sDW-Bp8P%K8s^Y87! zfgmum z*mZ4hj>xp9UKeROHwQSTKR(px`sobR7|A72MMyzo@$_W%w_f~(bE}OSx!NETaP~WcPSMXd<(OQ`XY8s(^WIcBz_cabS5t6x zwMO^WTKD`AUpoB2n*3}Gp0^oT(=D;|(=R;}oZN1UIQ7p<8Qq#N+8S8J9^{VCE^yZ~ zapsIU2^({o!%%gKGAf^1S?ig3iMYlhOjokNT$(Yl4()@j?eZQJhCwr$(CZQHhO+qU1y&70ih zy`%xKRU(}ZkR&c??>rUK#}WsGD$Tzk|d$kj-9K1&U+*8Mqh*W+&*4Og97O(SB{c317=c>oWcpa~`_otF8 zr##EM*UMwrJ9Apgx4@}s=l4pEtqVt6Pv_%{Yi1R%OxpST9OIX$`_;P-)`q)yhwJ(* zWf$jnnk$*l(^BHOdw8XpuC|NqxsIjvcJKJ#wZS&CBsjOC`j+?c_*e5WxC6Xqx)&YW zrb7KeeXqGkETyL7cg!*N%k{uD)T6am+pBJW1OAd#mg{|Sne3LkyXO12*Mr{c_+ERl zR@VLC)Yp4<(jPC&>HIbsDATp}jr2YC>MS|3RISVP`J?`u zspYoX z$hEa=cj0L!H`O9^<|M<%x0Sx@Gn!!X1dr#p_}9De_#u}xtKO*ewfmKNe%H%R1;0t z_ujHsqyF{%@}NUS&fEPRoTWCKhdFWcH_`MBbR8)X@BHf}CbTBjgY)NfM#WT3m+|%Y zpaw0Mz4>>b$uYBsnYq;B4ez#n@n!RtHm&ZhSoveM*V|*Xc-hnE=ehJGwtMgGqL}5@ zcK727%d~~ZxckIr`}4Yc{I=$Mh7Glw)%Gf8H*`mvfqmI0K`SwOzGrhb`9mk+_!l<{ zL@U;($mYN*doYY4=Sj?hd|>5i3vNW{wUzPXC3Z>GohNx!-hDf1Rnp!2A2-FKrNp~7 z^ZdH+%)n`bPmS?Iu|@27ai=q@8m)mxvzG?l$E78nXR+~-f!j*vHUQL$Cz9&f+r#B+ zC;!>zf*kcR?G&dV=K!4QAd6me3>_XLUCX25wq;^5_4jLM>2kK)IjNVC8To3a@7Jrb z-?rdu#VY2fNiz?GVMb!4_B(!H?9Pt4HK#r_il&`NF)O*^g;qQt0%Huf!h*O^4 z)5|aTe+v*UA)A?t{{)CGjDLB^|5Jc)`j4C0TLaPw*|??VqIW^tTv0yTA|b90*$G@Q zBTlwM2o(ld!j&_KCMZZGMV2nPC{agHjb$&O0Bj124UX4IbSOT(|M{N*G4Bq<*a^G! zK8!PFCM*IYcP45wvMp-#kNc~tO3^UKAP1ZG<;r*O=l19B?@#tzLqTCg0qJ+gc(2(y zAxm-|xe20(0%K;%V7?Q5K?Y$3M*&!2y;F|9B#Ea%cYAjOW>hI*ej7`GB%KF+y^&nR zdE#MUiTcD=q9mDMMcfQ=U`16~zq*_qJq?wEV)N6`fde-x>nQTyltrWnD#pSRCdNjV zB-xV-e}E*vG(}(&4jgzAPZ?UYP8QTyQNeSwn2l6fYvUyUHGD2q0y4@_rfyY$ZNkz) z3K3EVhe>uE>cWwO2#NE__{DQ&PBz5UX9#LVi4a7_#oP#zY6r#fB4c6}9R;Cra$zV7 zDhI)RWzA+{C9S}S3=_L#XG_kK2?1s6cwP=h1ZBqfuqyH}t{yVvNFwC3ZU>L98$Z#e zql?(QbU>PvE0Q|_Oe!o1Lm{HdCjsOq$;te(kXceeU;z_4_9tPF)C{5kQ;43TE~vvIgo~V;B*!UDrrh9phfSJPqDlb*Wy66ZE8Q8v?%xAJ|&<4n1 zN9IT!W0~cN@?{ppfR%KQQsAk6Q)N@gSYZdDx20_) z2Lh1ya1s*t8W#!v2sC&Sha8DFWV&QNEE3YRTZ}c=VCsh;BlK^|q(pLW+f4Wh7j!;u zkn}yTk%THCb|KztxXpjVq|4<{B#APbx-JBEiNAZ3h9buW?Yl@eJRpqIXn-cjK|aAz z%+ZST8vG%F-o4qD>Uq8SLEGv4ObZ;Fue5*qlevA;69g#z@I%Ch~^g4mYj1GDBw?ok3xq=hqRbLP$2iEk1q8?k_u&EFD z?JNr9{VDC7FT@==LPqnw>m%ZQOIQs5-i_bvEj@>(uv{M}$KIVM5d1s6F4!O9T)jt) zp?(jD4FEzQ89*~mzhHV@epFTe$yV7zkc??DQrQF|hr~^D)j0v;(Ag856A}<2xNyKm z@U>&{Eb{p(9LgbCt6)4(c`}8f^#N|3BoI-kEM)2fIOyKa9IAl1VKl+cHiLdnSXLY} zPvT4Fv7aQvw9x9n4{_JVB_ZT#RDZO3DAbE0WOgF6{(jpebW9;Ve-ZBE15$sq8)hy@ z4P>HR#5_iR+yO`Ub^6X8M>N14>Fo8!`WpYiS_&& z-qk8O7-W%fzi??quw+5LD+}l})_DnI0g@7H09d(|bw$s2tOaXd1u#t}XCIp^T4D6T zL?#8BT0j5gAlA&uHsc_!u;ma$K~y~=_^`Sj3P)(iChD|%J>hqQPii#yvS}q2CJCjC zCx8as#t7~Z%P%KhfJ7GwKqk?Q6sWWy`PVnNXS651ZxN>to5f^v1ED6-=-6MX&KF|{ zJDhTM3U{C)Ng8$&GOU*aLzdj!*+$6-B&Q@!q$bm-k|ztAYmpU<%=L#Y$^d$AY3)k> zXi^XA%{!>iX}_Ijj|-MHn?~5C@9*%A#@+$<5GNj_!|cM zLpHt(0v!bYXSQ%3+@dMEFoHgX7Kk#LzLnULzb8 z0DJ%xUExQd3=1Wbu$53qh6){e9k=*_$9u_jls4>DAXL_>0w&W(HoN;CIAIOx%WB z8kGPmU_*xx`?SI4Q&#LJ@fkwFd4oj+A1TpVtJVps5sBd$BDWMXw84~90$&}(&4=B_ zSo%6Zx+w_enAukShcEjMnq%##%v-eS2C)lasP17ds~7z-s)4Fp>Rd0>QTX~a1Jx}N zDlLPwWE!A>-C3lZ!Nyh5y?aBotTt%wnc|oUL!W4!RYrx)L^l&X6o%* z;3G0`Q1z)=PP4eo4m8b9Fh*&F*q^!P9t$#WMr#+eCW_r69$u>b(RX(NLx2VhSwYs9 z<^q~u;lCo!;f+4g6gVX(5-V&s`UY`}Mw$bTT~6 zCsB28u|_p$H;C3jPxO6KhM@;f=uEpx-QoDX@zB43TBlmte@dFUfVYjst+JqzO5yzM z@b9H2s91C$`ATSHp$Ex%ATO=s!-&H{bQ}e!3rOR;<6}d>8UT;8d0pWnHF`lccuB%| zLyaj9nsSasQTvXHq($xPT71C;e@F}Ef-0I+4ePd_U~)Q?O)YGP#QZF4V0VGRIg~M6 zJ@Q+?cEc6z+zE8|&rvn7>n5lqipGJCY>42?XkT7Tqtbr7_VJMo%P42$^k>|jSj@{B)*Yn8+cl}X_WnAu(L3tGTaMHbWpyDJ`K0#| zYgbZEq;kHdyGKgL-QD`jmy+<_k724tJh#1Cyv^*+NH%TBUB0FD3*YAoB3*B#r+Uib2ngXL4a&#%{<-=D8}t?b#|2DjOek?5ICxiaV*2MS-D zzjJP8PeUYfHQDcbGpVU#!Lz@_+`zOP_m%Spx;-eqFaA^Bw_1unuIg&PhSiTc-Mtqz zT+qL7`wxYD=MY}+c;3T!U6wwKO5G|1Vo)(<#Z0&yPu2Xso{KlPw7#vqUgskT-?^^c zzUOYIVi(<~@7>3`Dsmx8*~*$reaT_3Z#Wv*LncSC5oTTdr+a>*{u-DOQ#Z>OO- zV^jSkjjP$bzJ?bSp&YNBr?cHpneFhu(H$?(mj_tu8hpLSMEjlQNIt7BpSxPGxE-}q z?hW|@91k1SkG35T<9$eL%Va&R@@n;+oGi;i&1TP;7E z;bJ)1)oMJi{YX4SBD319CpkiNS39_z>>Z=q-lsvOygi4>eAiLi_hV+$b3GlaXBxfL zle%h>Zi{4OGu{s)o2%X3?xO+P&P=gY-ZvBJ*~N4{*y*mv<(Fi) z7G3u{6UKPm=*gxjXf19(k6&%@6>ujxiJ0lHXjeT24&JLf4a)K5Emc36Oa869UsB7p zTvC5efyEqiKOVN~KXL#+o8euZ(PrqpUgxrpmHvhJcXwNE_Mbdr32CqLycitI36-KR&f2)`uUkg=d`NDW7}vVF zsF0++!sYV4Z8gWP`f|3nm}gYGtTc}Hn%QO_MsYW;XZ?IXP1%d>Za9N)iCJc#CX71r z+PFL24!Yu6QmnmB4Bb6fOQnR`{HPr29xUDa?jTf_Hzhq^e~i?RelKHz$FvT^!L#Yi zTi#$~6GCLg?IQHz^;Q?qEzTxj!48~llV!ptJ*f5$Xbn1)APQv1x z9y1PtpUPJlj3Ea4__i`@Lm7{-4+$M@LG ziZ7QK);_w+A8&qctmEY`n>=oA7*QDwL)1( zVM>j8o3y1;-v&+P56JtBAyup>$}6Tj3FGFSD7#S9)isF8ic#Iiz9vUt-`86v7tNO3Lcgi0-L-JI)xi^lCXhTR8c1nx%FI1&(&NL z-^#y*f`-OhTlOcwYex=y<|XTUEVoa$&eV4nXFNfvG3zFDnj^;aX6uu?_tf2IcJg@u zNp5P>r{ybeasJ$!^5dvAgX($1 zh~|uzMboD^zji5e`QD;#t5Qak3$q$?aMq<4EkvL(63Nms*~GbFwF+zvTX}h3ir7r6 zR=e6M5N3QAHT%Z-GFmpv|3oUD49P^6HT66C=yND$gSL*nmr#c`!W-3enAH~WYV!)=2KiLz(dgga6 zGQn*<(sUiW6=3&2lQHCG@#-ISOEpev7&?c6SAm;v#OvjDijKW?1fM z{=6rrkX=mJ5E-EQN1n0Z*3bk~aSiMp>)_CHRqLFA>j}YLG24|a*w9gUCm)N}@o>sm zUf31%3Y3r9J{V@<(mG`r>h}%kFzr|osN+X`==Q2ZW^%VJ!F}1t8&@XB2oRUIW=PS? zTK^$7I-EY+tXe{_@T>!{jn@WbEI-#Jg@{{+q9gOj_Q%j1DPz0bE_ShjK( zWLJwvDKg(28@*P!Yn-o8{yZJs;83k}6<{y3iVDr1l<_T)P1263Wv0|79#c1E+0h6N zGD&u(WgbTbrb0_EbWtkwZeE60avK5pA$^jYU|n zb=_;NWvSwcPiXw=a_~9!*)wG#Bi5K6lIt|-uw zQnV0@z1hX-0P|eFqMd!?G|xdvuq88WSoz(!#+Qkq3mM1GlrBK9BVX_6WKQm@W};*~ zs@b$X0n@3YI|7_eq2jD^SsvQSc8e~UhB;Ik;L{PDwO$?6nND*gg0MbbNLIc=-KrU# zw_s!^Osy*19zA^XZTnM+WFrIW1!Ss&HwPWbwKRgA*l;t(cvEyhp=mITEtz`!zl|hldeAJpW@*(izP6HOC zX3d4`Xb(g7lPC+EG+eV$-v&(-1-kl#$b|biQZ#O2P;xR6WIhojGIwZ0!86O<3D)e! zmgs?4GM?y98p|_Xbedt`3G;1g@p6qN$zGF8v?|Wzy3FDvl!Z;X#3Kp?t7OSH(ezxO zK4hcS$!d&lGz24mVKg*)Rq|A{;S%U~7&uc@IC}j;?$S+np%k`mXxUm~P-JcTQ+xHM zn{M3(*6!VJ(zHoT#1}Y2SLa@v@6BPoc;cm_e#P#?Dwu?tlD+%(9OE&hDcuAI&(16+?1vdIcaBDG_3Vnl{m2U2D4tq?G0 zRxFhCA-anlA(SiNMF+SG{s)}kK=#cQU<(tdRed4C?>6-4yRh#S_{|5%hQ9#R14Yk4 z<^@GVui}Qdx-tZ?BpP?s9=raH+JV#R1Iq0aa!1>cDHk|e`&gyt+6HXscR)&H#P6>q;xQ)?!LQ#}bwg_UB zr_Y|x4*RQP4JRaHu+RcdU^-u60f$mjDOICVH3hX|xb!WT`zhInJt%?wHm}8l4ZYPJ zksJ<*C>CHrp=&D>RZM-rdHBy`5Jk*Y$8)__aY)NSO?p~AWMy025d87d^3<(jxuESBFu;#JkODf15i?};B_ zCK7zc5q9h>JLCOcAKwKxui-@45fkNl1v*ltKbj{AYxdBsQW*?FClL*l9pd{~ zL=b9=yz&qM|A_ zswbRviU|=U>IEqS9y-}T=Tb2ZSIh)iu`0*m^Q-VG_HHTCm+5>#ehIiKe@xC#D> zQ`X(^)$EFd0}PWZ$w=FVA(&^a><^kKRh*`=F6ogF~EkNQ#`L zHzbFuU*!TM#Y4G{1MF;eF|wS2aFJ-fWTaEQ=k_c6uQjC>Y9qs8!IimwK@IE_ek0r* z0q_WLOj?qTU@-A%<9H)!j0B+UVxsRl*iS~j*E({J(yW65svKI(S#1oEp7v?}>T;qobTCS8Q2HffKVT-L}&p8IPiM%z6+mh4( z2`Hxs(r9E-*l^;v(k?e#YImO`lG#9#S$NAtW;AoN@r)sc$}o2k+JefqJ@v>7b|{n* z%Is)hTEW+
V(ow#@jwXXVng;^O45@r>czvyyvOY2=TXMPhr=$twbQSD>aj;wL z+ayC3wi`<(bTJX-$hla-f;DyC?jrdv(C~cj!PgI!Uf#xXMnyR6MU|O#NN4#|2xOm) zi`UqR0i}#bK;=?Ot^FVYNYO!&$mu(P3@L~M_Y-9Yy>b_dI5SB5N1%CY`}P{uL5@)H z|6+*Fpil)lfRfYHM{w6#dM%TuDzCe5Q7sS$Q;2cGoLwx6QqZmV*;t3w=ygPbHXfA@ zW9I+Gp6jDC0E>-~2DY(?>L>?9i1yFK1j)+>=mG&aW%)@^Xpvw5IGc|;5KM&l92&Pz%4l7;X-`H8!IDsBxSWg z;H`JzfNKfNjE|+|R-UsrhG!Pv=Ca1E+pU0HN3H3Nw&_>?a2JJyB9U{MmP||}LZj`l z{yoWJ(-m%G3R{wXHy4f1&)qf#&eOI+U(T5}k#kks-K*Dn_B9)M`jA=I-sk}9?FX!e zhb`&{)!Ro_g>Yfw#F2Udjv9BjcRw0?xxBMX9-kKcubCgxZV&}s*e!_;vKsRbPa$^Z z>@$f5RIfGZsgZ%iwn{sOOoB08DPlcPPbVA%U_?3mWKJQkx&oP`KLyF#9n{bZcu4#F z^DUOKurplh=sR-Kj5Rrr-cR*}vhMMgN%D_La(mAiUSF+~HZa8rGQ|yRV!4qG{);AY zv!q){X!XxN9+Z~k6k%RwL{T|EN1(VSHQ|29?@KtIKb#uNPhoyOD&*2(pf;0s-$)?jP+=3MvDF79Vp`$dY@WipbkubY@MqIIX0823R~)!&LgHJkC0QJpqI zsSf58(dmMQ>*bobccp+okxo9@KEihK7T+$5%)~ds!C$!Xw%*!vy_68s9x9vcYOcA@ z9dnGiG9l18DH~tRCHK>VL$6RM?uFA{6{GrgR<6D*yguPn`5ep19mr{A`B4%|If<+* zt^D{TSl-RQzK&LCga&sO6P>=WTk=9HEU9WRV!@=o@2K^Ky(~bjCF;42O3HJkSXrf5 zxwT#rHv3=;az{x%H=WmJz&+&u;gxpxU9!H-ls~>7yoeCTWuLl9j5tDWQ(t-KQ2Cnq|8FUWZXXfXNcQOoqLR9kRchZ+DbstaVQ+=N+ z2D{2X`qjs=zjx_jAoE2ykK8NWG!!kY^(A@O+lO57lhk-(oKF zrk)vbV=XczU!LF8^%m)j%~u={Oxys&MDsI%cpB}Dh2ggp4P~i}svr?JH|O>%Po%`= z9II4Zj76(|dFasE_k2k}+u63?u64IzyVtxs80ahhG=ztQHk6BqHk`}yliBr#qxniE z;yKrE^-ga?kVAw)kf`8c^_N4<#I#@vnX^}c7XMDr>92(O%0HV<8LeC8Iw0# zsc^E1tF%l+2+JpqT0El1F~d`5Z_}n#x^09#v0(eJJjlSCk7P>1yf>|g?_97&)H9vdSL1H)mAEHxnY6%7thveUi4@AF!`` zUspr-y%C?XWTx2dlc~y<`eZ(5$`){ikDn*k;EVlV!?FimqzXP?(1_nZ|H&C=vH!~% zo3>(*_MxLgNdM&@+7rfxDU1h^;CB*-0Q?gcfFA*+h%)Tm8m77Va}d(Tn_AHp3Eh?# z@Y%zwpkOCJbK_a5T0Km8>yrp5raODi73)~j-oE>N^Rw+)i0qn#6veG#GjeU$2sJtl zmF1w+R?ZInCyI*_5+$qk$PR^U*fGiB?SQOxf+(p;gmpY1)0_(m4uWK#YBWr+4JiQ0 zVL&MGk4QYbs8y?Ds*Hlc5FBuOC={4Diz4ENI7PE)1XKAqPZRZ2dSizKhh)FGf6)PX zwz5PB`q;J`z28EZ6gd%X>GF|{Pycg;HE6 zy+LVKVw%ccG#vN5zYIcqH}Y%sNp((xDzejbROD{Sze&Lhci4Sg|SPx9gM zkAp11)WUc!i=!|hIPBr<{`~E+N}j$YkXTSY1_L^`H()pQ1L2eaYh-jy$;l8-@^M}I zf+hZ&Vov#^osq1Btx^Jb{UKg0fFn)V_Efn1eR37ns4Y4yS=_j2e0YN)AC6HGGxA8Q zG!e^sgdhC!e-rd2g;3z-8Bs2AN%k1IOw`go;uoooEAk{nNKxzxC2$6|<;p4W=EFbRqUL~NkgE{J%9mIPs9zqSSMpBuhyzS7_$zlL8i zfksrOAHGt&`&CU2ZOFs`-OU-?fs&gU!W_A~m7l(+N)Ad5bKnfm+YPav=24@tb6@)t z$zLItEx!>Jz=<7P_O_k(@v7DUZjwh%0LjIQ%7_fwqub-IzwGL5&gyM~0&Boo7kx=# zDI^}31@q73d+sS}UADo1X`{BrOwMt#W0t?N#-Z&$CA>W$`cAllQlKx4@TUMuVHm_K z-GMTvC`cA5$fV5oQ9U*|2QkW))aj!MK+!7Qy^$=D(GoRIig+JDwbA4JZM!H?78JHp zQ3g~of|k-k-UT=cARn5uKX%ZHqUtY@RER3>YQqMk3 z7Z&{sJj@Yybm?^qQf}uzvahl=6oz#M6V(Ap^CWs`aR#C?hf$G7r7R#;77(Hgs$C$- znp8CqMrj(ouu^7^!n|-wUhE~+Jsa0r#EyruBw>3zmb{0|$!{oPA(n~4Y&~MG9AUyt zwZ(sIEzbiyGK9nOP9+R?^L;_RYR7DZkM~CS=A(~)8?fs-Jl-KCFjh1My4Nrl#{j!; zh+pYnt2w16GyVaPs65jlsLqVwckyAhEn?pe4mJh!;0@~zA06kp_bdMZ`1Xb1^n@k zJD9~t0v-Rfl>94J*e}jf7Z6IVz%^Es+dj2`3=yh|2p9jsoOpxc3+4md(M$1lljXK~ zYptwrWA?~uHfi4Cjq|_xnk>?#Y+_Hx=Y-2f;SKY9<>G3~Vvg?iwTkP&9ng zr!POm3IBw?y00+7+nO6?&>l|P58&Tl=&hRmFxMG*7Gy~aydMkVjR{#uxx~Pjr|^In;(abevbzCG8Q89+oRbU+pBVWX!x`M1(i9;hcgcD=y1)XWKJ!t5h;>p=;XSJ zS+^)re@8p`Dd=tN?Bl@G{#YMyTYK6E!D8xF-)bpG5*qtH@Rc$Y8O!))OSfI17c4=eaY$zKT|Z5X^+S?OUZ^aDtuRe~+RmZtK;O89S3g&1^$c21y0Z6rHOlPUH^|H%8fA`ksl!K^4^UVU;h>vDg6`*6k&U?Z zVT`yH*aZ?(Zpn+Dyv{tm!aI`s~fqt2!iMp1t`yhnhj# zFLebN&bi7~$pxPjS&z!!bqi{=+T9tdG}(*4k4}TPQ%u>6K#R0#cn4LZAHx380s3Q8 zW2(|d^T=$JbHbFXQE75MrKypoFgg6$==3G~CK6ZxWz0`y%wvXw^|8(5$J874A$2WTvC0T<4aipE459}Y$k{c0M<~M za|J6|R!TNyvJ)i1tH0Rz4ngx=1I$$b-oF603U4VEuUC7&Os!0VPkB~;T=R)uGklnd za|I3Zxe^{(+!9tXb+B|pK-Omj-S_PSx^4VI1C|@p7+uy9)(QXzCeETr0Foxu1i6n7 zGmA-p;elaL1)&Zfb`wsd2nR2o6(PO_eG$ydRjJR0A!nBN*UbEHp)8m0Sn)UTa(_NB(038&R?F(|4YbU6t0p4OPr|lEm|USRB%~edN1`KrXF5R zt$!Jx$Fk45=DeSUry68cdQbC~K-5-lNsYRE81_UUtLhQcz1z=qT&jv=v#W zcPh$b10T~`u3@>2NDTcACr&rt9CQOD*(z>Lghw6pn-9VehO>pofexKo6Bsr4Y#pwR zDS~Z@H;DjmRu6#>U8Wr&=C?f!@|tP4FQg*i-7oxUYT;R0Az=^W z4yD>(mCzn#SX9LPZY!W>zNHG?F+v~Hf?qaXYTR$D)|0_s9YUk_2qFE!5N!FO^k&lKJ=2jFai7)FzC` zXQCVc@>38J<0tnQ&J5iTxephBofw247*Nc~6wnpWE_v-|j)(1!oDiTb$Q*i~Y@R2C z8xw%-KJ7pzupWRU0G>bico8V}xnj6eGvXTt0H1%%Q6*@1*bUv?(zb{L5I_@Qx`+QL z00Q+M4j`wukl2?fKw=o#U!z8izCr*_Xjc3VmA?6H8z4Du{s2ERz$b+L@c`POSL8b# z01Co%V9*bIzy5$*A`zebw9MOWU{6?9A z&P)+jk%IodA-!cz`yIk{*;Zdxa>R@=Es?aRA-D(lLIIH{{F*w%L z%w@CNvDkQFCSzxuV7fR(;|ON7`-|LpDIa_!mz%&R*Iv4f={8B>zO#700=b##v-GU% zny1ow?_Lk*4hDRot)R>k=)APIO_kGk;hniO`W)c3<>1N-*@DaDPOl{X;qqS$rGhWxJ z%13c}miF{phmzHd$`-r&8c{o*^3kQ&z1q%%Z+MAa?4Xd5(PwJv^p?*=ZdS)tewU7o z(QWPEE_#ygErt`3vzgh_Ubx{kbHkIZW|wz|{eE!bFnh6Pg*t2L{-i{$dSTLKbJt4E z(RS*6l=Crit`QC6hM&XJ^Y87!dVF{4P0vz$-IqEIZrU>k;gRFsKm+$`Z`Uch8cljG zi}kHNT5Ehx18aJTx=bnBYMLkXE~Bp~g=n*GcKowXt?|k0ezfeH@N93A@NLBUaFbR2 z5=XW*^Y_0-?!@D!Vh(%nl&15z8t>3pdDsrbb|#CF9RJOSoX}~^@6WzKSK`JHTe|h% ztHZq)+RN18+9Slx;k{{IYFxJY>bdXuSl7$;j#h_MSbvH_njD^L+RT;4M7C-Z>>4?q zMk52wu}zuLyD0aEo^dYPUe7wO@nAg9mhxw}N4Mv=Y1=A|({D846J#)>y)umuwyzsGLD#_9v1{33&YTnP+ zp06aV8yy#}(vexJ*-Zz1rM@1s)H{vSRT|CT#KnvuKww9)WOivj?qSc`!Aa21D;sS{ z7}8khiksij0Rsb}<)AC~iJ&`xk}bJhn>ClB3&|-3ZNQ4v`VlgQ4C!JSWYEv-wdq4%R!DL9SwtH8kE_yO^zgkL4v( z-tn9GjASlL=_MoJnT=jKbbOT;-X6pZ4Xx+rO}u=(yuX?+9d1r+4`)AMu0XohIvmwj z+QT`=Wa^_ohIIkck-)zmk|x499P(q?TMK_3&(frBgeZYdTF)eXHE6Ri=$MzKW-%9(MFjjMcha^17%ReNHpkVBhu-A7iiZ)H7C2 zEzSqkPIk`0|CSH%4ze|WuNM))YkyfL>O6b96zg?dc*~0kiTsZ6RmWACIX>1dVJvOC z_!QR4zOb|Zd|jQy#&`}P<5j)W)X7~tj?O@us-&ORXzp}x@$mY3%pbQnYX6c?$b+=Mm7q(-g0uSU}q){ZI6s{PH$)MXya+Oy}3TyJT}Ab zXEsu5akuBZeD~;Vo@89@>X{1W$Y?60eGLW;MET^Ntv8NVe=Fm-fA6hK+D9ywE7bw# z%9fz@bhE6u4>joL@Cb>Mvfv;3OeFWmCIqUXfZBV@c|1*Qd6capUq`=|Hy5+8 zEEM2yCFq-efe`RNE>Z zGsR|StVb_5yV%~Ej`rbSFmmIV#BD48yN0wtK30CRHL7sSL4)L^*`AcUh18(g9kMYw zMf-*Jol*kAf0*R_5bj;EJvycP6=u7I=e54GH9E)pbq<~a`Qu%qxCclxC!9iY^_%)} z4B+0O>V@$8!qH3h-9kGxA#DMU{du#6Vn1kyqu0*U1%1=#qXUpex^o2JgK&fFLk8Rh zq!;ds-^B=!Zi0qGcslv_s=%jlL7ih>mUF7tx@jlWo0}iIzbQL^6+Za&;ylofT3@_$ zlVf3)1Sp0x7s3P`*_3_DAJ0%GZd!7XA34}|a}7^`lGceN49hu@*?a?vazQoN zueqRlkCHe&JexQ>DP5w!xpseo&F)m`4WcK&Q*CA_j27;7%5|nM8_*A_JV9CaLcN08 znbKL=F8LLzM!oeeVm;VaDqTc(dV9e3C0NhlhL0p-Y65~%O7;<0J2dsP^>ww@w6_5k zd|Kh0DIx?8*0?40m1ViM7+OGa0;P;deK4wc>UiDjrd~{?tR$U3XgE(lS6)PtI0Q3a zAb}t%FJ2)?NRZ*<))Zwu(a4$JnRa$7oQX3w-@IA6bGdn=#(j-}t9n=Q+s5sZQw{A9 z9TQ&oO5H>37kV`Dl5ojJ+7sbei;m&Y;Tm1ls=G8Z!aQK=zUl&aiMOkw_1N0YdUhhW zZ!rp#9c(+z!F?WrK}a-?(8?|=2yC!t6lNv>@074>kJCf~#5))Io#77lzYTC}1d`Wf zU;qGRkN^OL|2Esf*g)Tb&e*`*hVDO0T_bb-e>fs*J9Df5tq`?V^|rz?#qqh_alE-& zyT0Tqy)ku&Zk&%N;T@G=ZK?2lRH0MK5WWOIFl*h++-YTftoj0 zETs`n4w+L9mc>Z~sHXZFtSHArG(!Xe0=g#<_yiXb01Dsr>}mRAw_x~t^ZMR3`?=v7 zdYUssfUJ#?i^;`!!UY@j1|n#t>=F&SO_L(it?qy+v&i_Dfq4kZOZ_CkX`*R@WSfTS zdJA-$>4wCx)mTh2C4aLB$5>=I#a2_tbiEE{mmJfhe1yxnzlkbFo~gYjDiV9p`)^wI^R$b46Hvx&Mi0uag&ng~YiHQE6`3awsx_LUH2lv2wc zwYQb`<&SlTEYn!nKt_aA$1-C9O{bNz^d`BKv|9>kd(0tIVdH+1RvyNpBUj~`L$B@L zZ`AAwI3H`M(7e)GXl>=3Q46*wI8|N+_CXq1dp{ngvqC7QgmgltjUq zzKD)w?(Yc`ET6DmDojbEC`2;br=GL+re^!ZH~6XTjZJefQ7Ae+B5f=M2^3R8lT2=^ z%WYI$*hHAFL^b9T{s+7jdKnVoK`&&;U6RnI>vq%?kFS60lLX&k5M)Y(uZh#il!8&R zII9i>skP4vy9g_$XNirz(D`vj#W6;S3rURdQ{xt$p5g1a!--pEnV_x*mCuougfmJQ znOn%@KKX$KicHsd*6MG(7?ZCGflPV^GJsSS5tbW~foYQGfhGtw{9oSer#41CZbuj) z2ett>toj;4gq>;hW{49Zw1*qOL=g#@fj8}otHpChT!a>>_>Tmr=dr>Pk4ovuDDWGS zfRb?+*vqM8ne`x?RR$)424F|fg^|WNH#HDYP5p|Cwh6MDfG2QOIVf<*D5WA{0gS_+ z$XpK6z%(X7!i6Ozg%k9*WYYm^wdE5-JGYfb8ij#oaZmhhJl8Aw%OxpfH^689Oh{}) zRw!g(l(?^TAj3ei39fC0;jV?{d}7V#o++rNzetbsdyrC@6d+<|MV0G=l7;H?Aj2;X zbRga>wG1giZGOCXfEsI%P>&4tv8xNfD@lX;6y~k{z+IoB=NI;PMt#$lV9C6lq^*Pc zK_JR+{sD9X5cUHJ3;D73y9VH$k_+*{$P4mWu@pn!g)r0Sk@_$jlcU&9g^lLtu2ed(83Ay=T^hgeLUO6Ks)u$j?$rmhr;pxX!>~OON zIs60W+>V`$zx+mMZ4W#j3L)b9PVMeG`OFw*e0{213fR@)|9Y2G>URBVyDmG=2)ibz z^}iphpGTssH{$O&&^^4YsP~ju*zf-|x(vKBV(J0@69@B#oQ!lkH39?g>W;*&H~D$K zD)4w?Jb`XLm!ZmCAA4!n7>u81VX|gVYhDGw=6|IDt1+`P2Ho7RWlVuJs6ctV)25gQr5lJYkyj6P+F(Uyqcl+ydw zNs4*(4xsYp0+5p4cv0ga7Ay&8YRTqWqNs#!PZVwjN(uU0zq{`2% z-;)7&%aT!d!HFI(P+of00o!wOUhw%CrduzE>klH@tCq^eh(p|-1o*Iwu4>=s zFgs&uD@pAt7|w`(Pzr0bDxlSLx|`jMa&p4ZLgKcDHYr5+gQK&vd+6Z=F>CZauW9MR zpq!SxZ=byT>^%GIcrA0GK_OZqqZ-a&jXo!JE%S?RT7)N~w%KU3(W$rCVk4V~4BP&m z`KefJne71#!CSmMMYgTcxYgfsHjON^y)NSXNzB1j3aZ1lz3ki8s z>xwmo04(q9w7BDtD+v|bl9ltYdeyOQ1?|MB)_dwcdjh#`qm`dmghMb|S)y6@B>q$F zsL8&SSow2#rW!~oAbZ(f0xM9?SC3}dRHS7VdG;&AM{ePqcZoE=YOd2!mBE@>qtOxP zCyKk3!@0F`iAKYmg=#eqMh<6X%o-uXhHYCL%JEUOt+%>yn}SFFog2Urb{WN%Xzp~5 zarEv&ca(dU+@P@zDp6ZYbdgiNWo|HbQxXaBGwJ?eOnHkfid9U-vOdq{t5+UJ6yh(Y zXh^{TFt+l(19}xa+S!!2N8bN_(f=5MFB7!zGm; z)p-{X#dEh4PVFJBnrc!5zQ=+Vi9vC+2T?Eo_Ah~liNQjTYOE;^p)mHVReArQcxc)m z)2$AQ#fud=$nY4TTH(x|&r9Jz@*^f4ou4A}w_Av}JHx(amjt>b8~DA+B5WL$TILPx z#c*slBt(6%t?==-*;XG9sYz&>i3<$wyl~5h5H4v8<`y}N)rq^HBKcJbk01t`i~2SJ zS{X}lZl*FPQw&65%iNDbIAJXFTXP;G;ix-^Cd~`DT~mYuQ&eZ@bv;tl7)Qr~Ac&Ug zP;2s1&p&X0_UK&XYfbUcJ$?)4$~h8&JcWV|#(T5iJ>&xd{{n5}A2Yv3ZX1dKuk8A9baJmHW&aS~NBO5g8iz);#f z!&5itUx=GvVh}LVU}j#hc}&Zj*%Ob@z}~vJXl~}>vhg8>jHXG+ib@DZ2H|w-M!hN-{qa1_=go9 zTE34PgP)c^Q`i?TV!*fFPQ*#$Y{Rpio4$wTv*1?VlM&u%&*$){(b}Zdklf7v>CMUZ z>%jR!(B?xKUeJ4gwL3re>igI&)=#EhzSrsdgy|vyE<=Ia`4ueBuUgN~m#BB5=|x*7 z|J}b^$9JD-r*mC9+b&-NJEI*+--q%(=g*jkf}T5c0!FVl3ttgJ0?sU|OPt@*J>LU* zcMS#WYnMGbf$caxZ+MeR2>F}>^LGLjIF5ew)0iAel{n4e*vw|uQ8`tfj4b@AE7rnz_xDHMZ0}=czfq6* zmt}#5nE?40aC3Wbt8WrL|anilCw z*Z(bXa)n69EAZ6B?qXz9-(%>foJ;iGx9jQ>|lYyy1S>s11>-8qvJjPuyp}+SG260Ai z>jjmp#57)Xz6=qnK5g}31?0N{4)>m z6nbwCSiSgy=)O230w;Xqw4&3*6cHWaaU-btFaoeSJ^X z5~YNH#jZt~!qP3()4sl4yus*m>HAyVvA)&Tqk#YcOK+y*ysS)j<9gR=>)xz?%eAFR zy(Uo;s93d4-}Q)WI`;?^1h@0T)6?Wib<(L~KD^8g89?Q_M0jA+6X2jwUg3Y@F2rRc)CNIA(`<2^+;wxYcPn^au*xFI}rZ+e_*^ zTgs>-Hf()%-IwdUNJdp#46BlMdp0T&En0kkYuKK*ji{;_J)PC5=w?`RsM{q5pTusg z35?OOOOHd4vc#h#saEK?npo9Og(DpMIQCw_#xTTh`EbBSbZE8GA#=2F+wzzHjHPzO$(3}1eyFLTW3GnxA-S2NVtZD~WG;s1B zuY1qeEb{a4me|6(Z$9*1OP@0<&7q(TKF_p1(*Y5xq}n-}J0luHwLpT)F9TP--HE(RH-q zIb(bqitWmXUB|ZZ-XZQCU5U{!7ZV{%YYVwcjg72U$Ej$)=?lc7HRxTtijF+d_{je#z8WZ?o0fH` zI@ZYA&?aDF@;hRqt)TYwc4E0;MD@7V7#2>)PM_5fuTwdHka7RJ-xBG?7|Tp5;f)j| z*P-D;gD{YT+_vtiH%X@4c#y}HOi|{%=I{`+XzD?b&wUPE1nJs%>c=Z`mbV^jm8=8m zYhJh`68TuQE5S83b33d%=+K}VKbS^0XxO!7pG4n@(P2RF%qI=rfp9*5^&$zcxZ2iy z!sczC8x2Dq%r?_z_a`2E2zbxHMGrx77cPVeE{=C~CsFC9jGbBy{VUFWp++MUD`%^n zNX^oqeu=KIaWO%+gx8rw$1d1!ur3o)G(`=8%#)0R^}QIjE^!6>HU*GyCSBnJNy1x> zPUq3dpv*oC9#%?Bx3%E@15L-sS%R+lLZGHiZJ*aJL|CgKBc@_*=r&%6 z71TE8GJBr41mKdzvWweF#1b10T?vrj-QLmc;iy!EgyB@xvmucf@r8jQif8nlyjsKc zx2#JendwabEhv>j(?N0s5dNvS;qXE}H|7uJEEaK?a!+V^WiEJs?^=lg? z6fXJ+&3|Q& zsX*Hk6Nwm%9@lRo@ZhK_9C8Y^xNW%M(FuxzlG+TSrPYQij7F!$pq^ z`6=%yVFS9v_;@{4P0dKiz4nW*DH}Xzd{cQtY}9dStJ!M#C(wr08WpTA3XyYQZKYoU zhu_YqKQ+ey6_e8pw z4UdfsAbuTX3%x+LT#IZz=CI66@jyn7%0u_;mrCL}>v%DPT8}9T#_~CNx%)-0gzKwh zYPvhQbS$wBhzuRM4x2(19qvhS#FDeDx%@PttbgjfIee6S<_ra#IQ>uAQ%l9s5($Ma zTe*{J384Gl?t{{Dj39WETz~;C6;HIk>d-P=<1$hFEENWZP8q+K51BP z+V|RSIRQHr0`F>o-?auAGb{E(A4r#Qr_O41^?P>=ss9Z zLSsg00rQQs{P5XXsFZx+QHvdgfg-x*yE*kcv*7O=P|sqZMTgHFwh z`8^?J_1d@{5#-Sdn_Ld}))TH-C1U=pR9q%Xgi#cZTpan>Qpvpjc80j`(B2!Z>{pEs z#lJjcwUrlCZO=s1H^(2+S20M>5YI_;!?VEm4TQUCgf|g~=t-_X_P0p(HHjSw#HSoW ze`O$d{J13~c0@w<0(B80xdJUFApQ49qR3E;0ZFFCEFinG_g)S-VE8&_heWxfK)dE*`#wI z-c}lJZ@@=!x@JW4k4$RMyZclRWNB#eRP(^-(qF@8F~lbtKG1f9$q;r%;-odJWk1!7 zmoR}PzBhGhrbHhtO0N}#s$+W{kq`>mCVH3^lBBqM8&&mxLdLyz*QyZjhU!uiwMt!b ze_f~67H5qk{`SFzufehyQH9*;Q_$M9)P6)|AKi2OsLf_TK7vwJKF!LWrY2CsbI+AY zqt&XCw@5#wrB=&mgJ7dY!4%o!cx5QcGWk4ur=GY9c2FX9yvAo44zl@R?9Lys!(7Id ziOC(gE+^n7@%o2}vaCR4+a1*Lk<*od2KRAl8wT$xh=CEe7TFA=-fFn@n^HIDEKyq% zE*W3L-C=^-xd{E0L%LTw9wO+1-z*=xgl~9IRnnoXaER$ zRWNc5gc|;F-k33CH5_0w5bpXsEHA@a?vZtCNUP2ZmwGNNf=;jDsL_KDKE^LxZdz^% zd#@XBixb~fF-vf~frlx5FkYB-%f3%ki7C5F80V5!l5-~(><4+aMI^U|P66n&zuTx@ ziMCSh@Qzhe77C7=-Fxn*xoz}-P6`-`EltvHIyyL{8+%Dsm3-83`@tVI z-*W5j3n;77yRZ=6gjz-#B`xnPb}uPu<&Cx04_yPT#~(=!eF2hPaN?xCDdrOOzmkS$ z6K0ML!Mnl;S5)Q%$|$f6T&sp)tJ~^%$c0DeYj*i8;rv1QPBToPn8?)AATB?^QU2TR z%}QrknRG^PB36JC+pTj`JD@6MHb26irU)4?;{TlP4LJmvou9;;1!xo(r=#yJ~qcWxurJpn$n@pFl zJ_r-Wq2e@O6#gt9gO+?F-n&!YvLB8yg&r*o5dTvbf)JRGpi32sqkA#hRv5k-VnC&? zAz!m^Bs?!C9nqJg9;<3rA4oxCPCl^%)4VPteKMF8(v`EPg{HAHy@o z#YMe?f2gMj%MQ_e0&FYAt(9h$el^zQ|IuGQg7uKkVt1BTxb0NT6BKJyt7_1yiyVLut$boHf@sH45ds7 zm`HP~^oQ}zUkydHJlbl3cW};yu-Gs5Y3DlVSK?Fi#C7bWe6M}PrPTmsX^itK5S-uc z(DVEX#R6b+jKn7S2sM!qXxbBM*3-D(oQXyww{DiS5l^*uEyCvMU7w zg|`s7pUBf22@L2mqdpiu)K3dCvB2)LU7D{#+hN&3cAsmWB|LX`(}_1e>K{a*zDQ!k zxnG2SUQZ)jj5N<|@xHLGL{eTl)p(Ymev8x!DE_{}*B!&G8qXFSLG;A~S{yyt z?q!X%y!D7*vwRD61 z=z!4z1FAla<#SQm1sM4$$a1*P()R=G$G(P#Qkd*L;&`v@4y*pScp5ez>>zsO;q@tl zhQSs_QO`3&_BnzK>IRx@IaQ|VyY^BJi-7j#u^k- zI$ZWD9)F>c=JF9d$|NYa9Se?U9jRX7K0}vC{9mgqh{h;h@u6z(PNM_s+=aK6$}v^3 z>mS-i8ab&Z(}~H~RY4*jQg3M@aZ^vE*>wmlKm?Z0{I%41Z>c42*v&q|!|R2_+xoOz z0R^)hT+#UF@>8@H>Y0#Eu=MBK4FciU-5iy`CX}PWAL%W^uh8QzjFfTI0TU4uZ2jJo ztRjtug~q_b&`18zM?YL4Bm^qzAJS`uHaK*z+^=;RKSBG!Vu>j<9d1)cw8AR8|HR2w z%QHE|Prw~6!A;ccglP5F++J6fvnJ3NR*JWMF*+PpO)9IK9{ADH!d!r6KU-MevrP02 zhwZ<^gCYH+$_=9mhAyo%U<6`75b2-UeUj2nKEv%FIS=_jKx;KUf}y~yzmjM4^WEYT zaSj{JEeA#8z5F_9z#K}Vhu2NA{gC?9>6%EhonfyEKWB&Ghiwc38LuGLfvcwHm)My` z-%9!ijUQ8ONGS`nk1MeL7!4^iY>77-?5uA4HV5u?OG1`9N|!x>Z)e}?+;{*1tSXs? z*L3ZWBMoyZlED5Ip$P2WFzkk;jjE$~Z!&DC5I59W8V>N{*S@_>Tb$wN1T0nolx=cd zWBumg9CSD*$F6p*kExfim)}jK+nnV?d?Q4|T_XiUez@Oj+#~F;i&%i1Hb+x2z3ew+r3u9^cR*m#&TZmB?cFS|3ZuH;dXGz(D%{^Q1K5} z|I$7nQfbWEN89|+jwIHCIV2MK*18B3{XQ?3eM<`yTQTHU!WsOqDL=SoleZY}GygtH zu*<;6Vo(=T0(r|czHNBjHKdxM*v(#)N{YX!}gucW;L_fwQN;NUvS2Ne`sGeFNjZ-A{}8sN`*#|OHGDUr5dqlG~n{2 ziZe;14hO}Jy)boAH%qJim7L_9b^fFH<_NotiRw`uCMrW9W57gej#opvLy)?(#lN#o z9bEvXpe!8;6AYbIZkSVaQbxWtN&?430JN3|c}4|Vf?cwC3_a$zs5JN|OV(@z%b*^2 z{~B%lUTiMD^k_{ueLWSGd4IDl$-=rC_sm`PkQ%MIaPH_2N}000ED@)Za9wUm%o*Ii zrVkn8S*Q)Qc36mTTT(z(I9vD&JhC6`zu3ahqn0w*5hKTuHM+j%R_YT{A1D>l1B?%? z-Jr9b;4)-w*nn_Q zX}2F0!K=gzu47otl6Q%u73fDJgQ^oIVU3H=<>?~K+sV~#ea?i~GaIapG!)rtuErQ= zWcnZ@aJkePGJBimJ-MQ5O8pRHpy^NWwLwKfwLvJ|Orc(LR4XK~J#>>;VLi|fnf~FX z#x#z+fv8!kl)ki4l`k;#Mxc?IeK*ZX%Ak=H-y46zjv9hZV#e;Y95jk6(|3JKEe}ag z$W>rIiTjK7X?R`unQOFr(EpX?j&eBd@mk@Li?TdWEa$V1#aFtcTG+;j-h^BTH=$T5 zW`kQ$pTwKz5|Mz_rBzfOiwk+A%F?1l*tvV?Yr&P3@=h3+(KW{4wW~%{MQ&s(^1-9( zBc_!x>)hHj0~2_Jno>$o%NqtC6yPHMBplW3HoU1kfMYcf8?}!1t48D=kf%(PrO7JSN&B&?k$TOtU&}OfE#%ezVy-o+CO5>sG z72zj$!d_*wzDKe7X43Q;kVO+Fb8y^n`utK-PZ3lC;qao?0+5y1T!r|mB6tz&b7`h{ zD{av_k*2spUVkjg;Q@l6lQbGHeVz7LR-)2qx?q#cmzUaAF&L>a&Q3oCLE9kyYJWB9 zdP}#zgJaw7E*7lQs*k)8^Wafn6h&o?e^9VOL!svh8nj>?{sb}`wpck&=eK;Y9dM-O zFkZT$^uxJM8-n^Uh`&#`N16(wEXMCI2*d`D8HJ5IVDEesEI(i#U-!Mt=y~s~$`sAf zkoas_0B8-tOcX$4O7+h9d}pth2K1U2AdNeb;|a<8D$~+PMtJlbk8?aIyl4 zE%J1nuv*F^yfd20OnE_H+F57P6P4nAH!hmaSlVJiV!9m%Uy?M+bQ?_Jtx21TfLdF@ zT-w)r?nSpiaIe&8Tgky?AP@82-@HH**6p%{c(g~3j^0>_#H=7$?P6i0L6sb0 z_Kg#xBD*0Y4uKA#4nq!M4#A6qG=Z6j^nj$OzFAgMJ-H4c5+_=VJxtT=FuBxW5;M@` zYesJbQzN70c>a~!S1)GjS44l3+ypD7dV2zWg>-RBcA_CfxeO5bKu%lzpm`6pR^kT3 zUPy9&s(1)l41`kU4@<6KDL`!{l0PDWc7P_79OeY|8Pg@B`I<}r^s1zCNi5?YRpCJ@ zbH>K_=CUS2L=Eu`<*20MD3UM=tsqYP5mVNPcsY(UOoG&}hJt1!aAm>ThIyM%;g5#Q zl^+3qX{GZ1bDDi=5T7>kcQjV;hwz?+ycz;Iso??1v2Tr1H+#hPUS=&cYV4 zH^f!MVM`NdVCe_0+jiCoNBXqx1BFLVqD^%ZdMkIf)>6s^Z>Y`X>r3k;Gl++Kv&i5lI0?biB=3 z#Byb(@`?R26A32w^c!or;nS-5>^d=#Sr54x2YiyUQ-mF>eoF$LNu?B!;!cEWTl8u$ zp!GfYoDFmEe(ZHi@)IvUtckzG4@!*Tt=CkZV?tWdGWlIO>`Tc7iM|4pYQP)ql8CuH zf5;UqRpwKpIR7w_>#P71F`_ASqR9oxUK{?D@95XI!xWO$6MMnoKJR*(NBnQ&26u7} zjac6Sscov-qYuRz64KIPDNau4Bcp45dVgK?7i)lU!e6+tHsxm|uvG#RZQh!DwFdvtEp@&`5$w=$kVY@MZA3Abl zA9Sk=5k$6FD_y0PsLV1@PxH$LH8(7-w za*;}|Oc?D6ZIZLZmkrm()tVez4mVDcvMvO{*XhNi4M>7vF9UvTvL#GqOEa>mnFzIB6w^( z6Lrm1ceP&h>nnAlfu7V_Gv{ua3h!9at)PKr^Y^&^yC{jB%c7$dqg$E9V{H-E$1#gj zb>N(khc8d(y2t0#!HH$<>B`B}t*77qx9(=(;w8gG2LqGU#~9F&&3aL~;mJkdn5pik z2Hw)|E2yOUUaiiGa(gEN)0OVejB^pZsG|=1JbxX>=-=r$^v|9%F!Swq_1wpK=7INH zaGz&~@aH8Ey=^*~J-dszOpndmIyohxGhK=A9=koUEj~||QSn~Of!+S2j86wq`xq63 zy;X0(rxQ-S3)f;opVI9Y)f_I8^5^cClQs&yj$;drnFJpV-i>BA9V@Z@_U`zMxcV(Y zoIa}+#}A%rV@9p-u{r1Sua%U@V=2a14W{P}@GCbTR6ouarKHLVv!}e8sSmTm1|1s0 zdnn38;Eur;ZBHlVZI=(YMcPyDM2scZMsN14RzA5W=pw)v{h2nKJ_n%$o!Mm|ei9|0lU8t_| z@z|`}7S5g24SVd()8QGI%N&VzB0lH6qOy(SO#OXA$7Y)*(s?T8zUPS@;70jB{Pewh z-11_s{p#9!$I{}~b*^tlJwo1LvVoo*mjv4C?;Sw(B0+O>nOze3KW&!##Lz@`wCQLG zZWHJmG4a(Xn+Y@ueClfzu<@fqV{_MXOXQtuOG84!My`=LeZL1v?BNCCVxr{YHyQdA zo(=rn>`Rn;%8BA|@7eKOYDKVlC5z2$MFQ5}2{F9V*$zGA3MFwUMZ+nr;`mfsUAvwz z3cd07{(NmREXdAPU9?-8SYCVnCVWWPxI>k$30b&iIckS9PXyNHJ0<;x*5?SuD-8HiWetKQ;2;=P?HScDBRUa(wdE zmYCT7_)L6GB$sUS)qB2V>&QfX-4y9`c8wbM8V5PvxdwY95PtrQD$gH~Z6sLVDs7W9Mq6HsvVs9~-4tFJwkk3oN6zmQaAEFuia@^}p^pIpH^&TY& zDmyC6|9Vz@M`cXHl=%9>dNJ)fPkQK3&x7WuV6@q2&0+bcpg$nu3K-jhf%+yrrtHWn6aJ-`)>TLX5NbBD*H1bfmq=G`n z{aP3n9)!;A7U2T+55|KKP9(^|F3ek?BXcaHIRuAfvr7+mnytKQ{~~i-PZyD5>FlS| z1co%F87o?}W+MB^8R&OwV%ZVH3WlPv`vMh-Y^9@PU>0V{Au&6{U!dy*V^gdtZOvjL zmn2yL>#H|^!=V+x%`%Yjin2=+mc;*fvqE_mHa!uba92A0z3J=!QklXcr1ff>m>gtV z-0I5l zxxTfO_m}*39edgwj@xZ=f;oXVfl(Y{6ZJb!yYAi^_)~xROGqoe(M}Akd9yBlx{Wf4!dnub=s!uI^**ZvUUsx`%d<^FL{QXGu@SZY`&_H8yWG2H2Fonn>nnG2Uc$ zGTOr6;}T6Uswod`lit<}z?e+6^umC4jyi9pSn1j}_jh)%*#q^$Zo? zo7h6$Rkbu0T^DVTXYwjKjX1ta+J3IfM@x|txVP+m57;3pxNdkpB-my{vBdtC3cC%N z`%&}no#-PCGPkrv3fAGsY>g4f>HRjZW;U8Sb{|xLVbqX5_m}KQm6-ln#|W2MYd>@N zcxo@0J%if~P~7OswMciDGMUMq{0TVCr8%7E*)FZt=M+-gwS3f{OJVqB2|LnlxkT^B zfX>iTZqKJNHfzoX&EI+QU}o9h_@`UbJw&`cxq%ipqSyf#E6iJcB-_bXH)~y%OTj3n z1h7yLG?1o95bbxxlEwUSmd#Wu$0Ja+W8pI;Ixf9to>5dG8<{Oo-5{SKKPEpv&q|rn zP>z`fGN(|)d!0L7ZfSQH!T%RD*KCq5fDviz@nZAM(A%xqi zHMgp3`nP>kWre`$fNMJsYI#jM6;2JU$m)G(%ACQPC~IaOU;Ps-i6q-XP&t+VJv&G#qyxVzYLvEd5)tuM8S9@UW+7dgSn~SVd;x`bFVV`({d6lS7E_LM^ zp%Vi_oFz(ej5x#r2JuxqM#E8r-t%P}uy*^z{U+h3Z#zOuW}+c(cE|0vx_9p_(f&&m zy@@PXz;L_{A5tlonw&Yc4;nBwY5gz>?KTgS(rXFM2)E1D>7wncMMDb1@U5IxMAN(N%2z9(w?^2HA7pcW0*+(cu=WlMqX?A%A5L5JSOgp&eQY)l(;m>>ws8w`D?Kn-Sp=+n9f}f=DWf zD=u4TV4VVR79Fsm=3>Io2{iDLgQsp>VhSNWG_=-7H3v9t(y~Lm8IKp&4uSoNPJ~uT zL`3;?*+zO(*r&13&Q}*u;Xp2Mmq(nW;V4oZQRB)0+@juL6rrDIZXuHL-0`eT%ZUab zN=o7J8Eh703}tr@ogYsL`p(ua8(gIv&t9w~ZM=_=f@q1YHEw z;E<~o&V>8o&1UYNcXCJx{tr&hEZ=BDW=P=xvSi&GU5VSbjm)0i@NZGrBg2v-MRIJ&$|0p(35?m^7V0o4~PqKxi4+VO3O{RUxrS_3T>d>@a1ov4Og%QyrcQ%0g^!>-* zcM$*)h>dj)$Z66aPm|~)sJ%Xm*P?GE1TE1xiHJOK-P~|q_!mS_!8S?qJejwy^>Huy z$!*Y$C}Y2Hc1F@ZHdvmx&v}^aCMI5(8D>L9(P7UJdsK6$E!qpvo33PfT5uuxE^Hh? z)M7AhzZeQgJO!(pS=P1r>sDcz2#J(m1kf5pCWO#Lzd8lO@6K8VLn4cqB9A!g3tQa- zX)-B8ju#g3S)cCI+9LO0cN~Hru!D!;`7cS|NEKl$J5b0D6S%O;HAg4E7T5v%UMi@~ z%1xAx{ar{tKoK!^M3=Zx0o)N>Oi_YVd!HCVqZh)!pp3cQ=qp4t>j)Y);bUMH(~jK} z9>_1HPLwf6JtaV^={6gi8M^9$nzLeMqq$O%fh#rc|`FN7r3qmLt8Ut2q3cXaw6MQki7 zFXf*(p8zviMcX{Cq&F}NQ}q$)mX;8VC@@n0%%RfJ~8$>HDobfF{^k-38)fPTami4cDvZibh?!F%TCH^bs^c}ET*== zq9s#p9fmv6(sfi@!r4`DTMH;yIqT4jF+@=G2x=HcMH8}b)&!bvU7=G^XP5DotVc}V zR=Se;2<=}1jLv_1TVT4k<1KRGMSoStpu$d(J$3U4rmq?t@Lko@)NzZMj$8y755UFc1IR zN3K1e=!v910n4vpD+R$PA`@CwPRkq|&kEj@G~b$U=E>J;^L8!jS0KZ~yoC3?jeK@L zAfI5Rw|rJQi1NGJZV)kNM0f5J5IKTE2e`!xW2osZ=&&Pp7vKPThv!r&0lX1|u*{_ff}%}53Lx}5gSeo?t@ z3=JfG?|&N4ZiTSDG>y)Fop{Z%lm)nT)py@4{ypt0xL)n8x4qEg3nDXm=~AlJ`j)c0 zeK5+i`w;o6*;YdO-1{stU2nf=O=b`i`3iw+F++>6gd9tvf}O2@A}0mK&k3EXs7Ea zYWBIEQJ~rHbd5^0aug90a@%{sd(I%}RPg&X=xs2jXFJIHdz9*W!~bG%{i%JHkWcq> z0C>&sZ*|jB5fu3J429?Ub(!|B@^>t=C(q}#<>brq;JK&$p+U&U{UXh%E&tl?_2^m@ z2~ufWz&UB&HRf|)DV0gc;Mrt!0E&S~fR*Sct)&vdH_r22O7#j;&spa;UnZ6vQvTN= z-KFjK)=B>B`yQ2L?{ob7hsN*sgfydXYdsTR8DQtW>wOPWz2Hp+-1)3$V6L-aa0q>= zG}6~kngWju0oAo6oAoqt&60<|2tduXiVWcIf_dy^^s){$y&**<38%C*yh2t2^~9Tv zQ+X60SAb(VH=c1_1v^jLiZYIC{)*;Qd*h<NzTxG>Sk1q zsWm^~@^t1ywV1vAcr2wTg@1}4ujz<~^tybf1v*TLP4qP_N#|$5O1znPp9ZzgX)VjB z^I;nPUgH|iNZxdXYguc)Wwq>Ut6O7_LJhgU6!T z%Brl3l8O+uk^~4t6cw>k+pM=Prl)E`Lp#WRM<>UnRwq}7q*pq`F3xm3<0L+n&dtnl zvF$in2JUP-?nI8}P*?NM6I|-9Se&q;AN>|rQ8$XHLb9{e??5e2>F`)kVmGT$1ho1K zUad;5n>GQKSbbw0%iAubvmt%AT{Gej{2$jEOVmuu95Gx%1`EP_(u(S(HAu zvRtgR0L;#AOHF)T%|cG4$#SF*w&*e6_xPulb5kA{my7cxd&o`Be%ASUEKFr%EU+w7 zU!)U=G>(Glk!Ftn)#)1Q8;g`v@$$&kbfxbav~jB3iS->3OMbVG;{pRdla}Bw$8QxfG^`L6pT#3S1ytuRQ5H- zUY#srQP|s8D^U$7jXP(YTdfF%uT-KlKtCeMhP|h!LFgf=E2?aFge>rDyj+5vf}mJ$ zqt#;CH2F(E#Hf#gpXCahauq{%p(LpF{*{w(w@ zuOkdW;SrG_`DNu@uPPCbTLR7z>R$}|vvkBeB;_})cCA`UDQhUMXf--4v!F|%@en{5 z+OY{2jYK?hQ;Y!d%w1m{v@q3NN}Gf`x>Hx`2Ckb{iUKM2iB#eduF=5|_M!SKVyq(nUPVjm zv{m=KQ05onN5T0o^k{Ux1+goEi(w^!TqTnR7?Wl*(X-s-a@eC2KU*sF@BOXq5I%vj zPwd^EAKO&T{DfFcY5}CN3L{y>On+WEq6v)>{(|Kf_DYr}0qz-p%oNYGAaN-}gi9r1 zXE8I~sT}@5x8I$UuOOM^8B?X3*Ro?G4$AZY0qI_(TKCmwbPpp?HiB4)nh*EKYjJfh zbOiMLA`G*jPTwM$P7wgEXjQ|o zwAcg*T#-c6T#R)(5|&LRg4vQG8HtVQ0+k7QWD#uUa?qml!!#KS8V+wd-dO}%6m)tf z6o?V5p@Rq$y#5uv%bJfE4z0s(V!SBNUD)Z*4ekgT2&!zu{- zM{qY21m4Q3ie$>CVX94dm*3eQYV_5j8Q*vp9po=`2gsoIFhl7YL>6YKR17JES9hu< z);`V6g*`WoiToeuy_fJ?AC>-J(X;mf#mHu+(4Z04+QK3@vZy8%ioxBL@%to!icne! zyds3D(KVQq8ZZ!hCViTN^g1x2n!?i9qQv*8OtTzn>|NMXbp73>$u^9R$xvA8U~zIL zw=C3^<#~FEoyqfPdjJ?>R3=1mj2m_EHYl&ENXyKOe(*vL<5X;aL5ZcZA7mJWKe#C# ztbM}uU@?jx`Qhs7A`sM9fiqj5AW!hS{j*`y=O%S{^$51eezO&8lJ-BGaNZ$US}_0E zqhb5?uWruJRK}l#=-kA2lJOIR%fp^I979P{nb8wrAP36 z;ztKljJaVc1r_6V_jsXDoA*JIIMh%64+6G*V7d~=_%;b)NGXh(T>Pgr>LMBlS#YHlFqow?5@1=dsw z2}oo$UBz7WeO9QOo>QK)&e4Pd0f)Q7w$WEqT3TjRjXa0(9sjSJDv#bvC!k-gL&x2G z85WzbhTn7n=f?*Yf!x3Syg>QK8y*q<>+6<$1azk(rK8 zQ09aAZWHMD8uWE`e58;q&>`ei7C6qwaiWAMj0V*xf1l4K3 zG30k8V>VeyB~#>}xeOe?TWm)=03zg1cOb?HjR$#&ZN=8<`MtH!XxF%X;#^!$_G>9J z+L?gmlyW}<1s!>^;k&=gqEd6s2D?tNk9zdX;2h#r1V2zO$sfrz&dk$%C&tdkO#{$F zh_pYpa^6;+MxfqTBt|P^Eq~LHc5u?&mOlZ?F@4r!0*bB7x`P6CB3Y{4hZ={EBLR_7 zad?nWW2E=qYNdWPak#xkU>7VCcsAB{)&ra)oUCuwj{0R% zo$i<8k|sk&-IkHwaNvpOtahNV=+dvH=X^iHo!6P)6>E(aw#?|b-Cp*l59@Q90i5W% z5Bk*TZwI8ad5qpZ9R@Y6olCt2k6Ux50qrVPov*%iogIGV$_||>M)lea)9Mo`%^MS$ zHIJvFOYk#(OQwR{M-qCDEgawFSmzX(TrPzw)|c~oh8vgFi++}8+xA6E2DP0LKljeF zNa8Lv%hoTREbq7D4XdUX9}OJa=G3ZAYBEl*Y|GYcBdw|q)LgB31=FM6*izzOoCY>n)8NrTeIy%AhC^RqG98w zXgg5JmPFSDGMFRKe06?p4e*;qe9?&9)M&hFds7&)bbzc-NppVEJWtk0+lCiA{0HJ% zO2zU%SE;m6OHV##V3PwoCQlw33BQ`OeP$ z%7b(5p$Ct!t!<%ltVj1TyK}O}1PxJO&B;<%&Y3{@Zt_P4O&DsYf=>1GJ963b z1hj8`qBY#}E0xEBv!n--JIgUaj%aFh6|O#!rO190AS;?H;L6)n&4y|FhEvr~kT%{w z^%bfP$zIU?Me(%9*NNlu5zxXQ#9&eV=RDNN;AQi}5E&=%Hh(PX2j)m@?Oe2z+_K+h$huZS5`myYi?(D_)-jRRI9-b8a{5TV*-D3Vs> z^z5;na}>-x39AxqDg|ZG=S_tdsVVoF#}?o zxUHpfldX^olSA^{p{Hf|ICjLc%{y8_x=WA=f54>iBE5d&ksXIJtrOTVlsBv>c}|6N7FsWpHu zDTp^~TF?hQK1p$}kS`UZlp@5!bt;fGyO?lSMdpHl$6-Lu9#P^%)%XFt+GmxtX`vM# zwODJ(Yn?#u2h*5rXaJ5~U})HM9iWQs{7l_iV4W^H22 zuS&48xMQSq>MZgEEX@)NR7yjumRy%It8M1@o!KgDHfHA~WjGUaDSET#3464DEmm3i zl!vb$^N-7nIYv*I7)j})qU zdgkymeNL_CMUlw4qp9mTbi52fCvqP4#VS=Tvb4ZW9{|mCyf-5)+};A-)h%4l)8e@$ zR)Y{0hkzXHrV~NP)qYU$OCxrIZB^?r-y)~RGu+ET$P2VI7J&~r@ z2HNt{IF(ZE9r0A$@~o~qkWwY&Vr0<8+!`DmVkE;sjKgGgH50+E8ORqw8-BV!uiqYj zoiZsTR`0gTDaDsaodT^XdTJVemLj%57b?EsZ-=HNo|1>7S#`Q7=0bI0Ka zx;^1_mlg=D|8Y8YAD$GJDag3T-}KjFB~khnwvlNxdLCZXB99m;awMTh1UHb4fy~M) zpj8cG*oFY^1d%bpFJ9JKzoA(w4cI{jU6HWF9@u7Rv@&Jbu>NUp*0gRSL?ztE8~iQO zA2<|{iH&)iuC+c8CRrfzB0V^97yCa0k+33~RenB5m*nr^^7Y&f1eQ4m?u)BuWlf8qmrUE_EECXrOwr)Cw+xMR%I zDVlzVA?Jxq%85>lslG#z-$*f0AeAC)!2pXofoJ7xC?NWNXfz-}th;vuGGY|$fDj{d zrl3`xhnV%3*)YJdBYXTRmb%X~)>Fo8sqbTSqz*Og#-bX7fwIxEF#pUd(|-d+VsuT4 zQ;XR7A#-lrK>UG9nhD;e&j~NnAEhOxm4q$C#fdj!Y#1&Q&bH?Q$uDBG!;PsL+IX9b z^F4%QvU)JadH=~w>6o<9l=GqW`{d+z*sBw(FKZITkR&8c=Q2@fn#@-m*f(GupB$|F zN6wHj8pXB0Z!bj5r1vWn@_%_WkRm*knjWhUhtA(aYx^f>@kxjcoJhx-?Uxo;0u)WP zi8@HjS~VLac(sqz1Qn-nKq0Et3%#+eSD%$Gi9e}qrwP>Wlxrm8$44oemGDI@I zlB8x`fDpa};Z$IC`tV!WM08L4Vu=QB3ja(3uNN;*{kS35;i0Ov3R2%UQ!ywT*JK-B zI*1=>)!So2EGl!Zje)TSAj%hf7hU#SNC$U^0*_loDphO;?xO?4$0R>dkn?WEO5lkk z#AIY#Tsz1ec6rPj=*XJjJ_}AD{sRG5Wh3~Brf|qVf|_S%o=bz~{NSjTO3P$N+a{rv za@s%MW`c%=kZSqK$nnQR+a|KATLo< zFyu-VO(K$W9OnD);RAP^qm=apK4FECV5p1_Zs$KhiIIFKFgwkgRI#-HD0VMiSXsRb z(A5okY6t4}g_yH?cC`)|zdXT$YM^$gvT95NuBUNqwldoZ5KOk7I4uRNzs*#r*Gk42 zlS6E35EaXTW^nfBo2nRyAXkiJPDrpC@ZI1sam(y;&cNK090ow+7zNwlGd4k{s>VYi zjxgU<3`|KFOwsn##C36X1r83>{+cBEa@J$^i|`!pH`!Y!J3zks9NC}Z$7vvQ*vm9S zOL={yYHbiRm7znZ^xj^UDpmo_(&HKk%Nc5Z&C+$tt9T3gwIH4wUWN{q3amKlkMa%y z$l51imT8+Jyn~CLvuzS})&ZI;7M~e<=3N@|ug&W97(5P4Ph!CCJ64!ng57?CHmMF9 zM%;o=z^z!AJ64x$GMgetu9vp@tM?nSfW$0|u2C&DHWu`4;j^F!NE|I$4N{ z)FBJ_d+B~!(MX(2=gw{$=irkd+C`RFPFEn7Es9VuHWI=SX=iLsxyU|;PH1mSxf0!& zQRu*7CyC8ThkqhO9a8Qx0@drEh?<*1uJ_VTWWL4{UcWue6cY&cg~L4>zYT8DPTKOO zhbFT$<6LLkH;ipls=)P`r{7)algIilYPV z21TS2m~;xdvT2z#u;B{KMjto)!?57+a*ABqOa~aeL*#U-Z z@BQS?d_kPTk_$i7X2>nx$iDmNnBNZ6Mt2of}_ zA<|o_h0?R4K*s$$A((57#L*p9Zc}-FEfKxrZj8uhq;d(I@WDy8@BMlR|FOs&um$bH z^!%IPbfz{$9${2LLewPsz^=E&?um%8djGp6N*f*aTkMugII(iJ9fnP4E7G`6tJYS! zFwVp5dU5mx_?E4OH~?9bMVzOejp8c!eJ%6W!LUWlKm3K+169gEmnUxS<$HVQH#S>h zM>2=u%9j~dY2#GV2H={N45zqHO$2~h)ea2rJAly2M2t|jBi(Ed7yqiW>&pju%dfH- zDY>h&AkZT;!}gbTA+Pc%vy7AefOiK_>jd2CvIwoqODMW#yliTuzMU6b?PX1-?OSt| z6I!%ed?@W;Vu{+~E_}MfDR4(Cm5zCZ2nx*?S`dog^EVEi&) zZ=Rc(mI=!7x^&jwEGNzv%ot98@B6QPTKP33JYGo|E&^OHgBQ_xYy=qdcdy?o>O=&X z{>fif4qLF`Gz>$;OSjo5?g(F|_|Q9B0o|y~hMfR^u+;f8+iVJ4H5dQ-!8$=^Uy<(2 z$bf+G9*6{WR`OwfhV0{tT_QdGJdHfGr=%A^S2@p8nD2j`#VsY7D*1-U(QdU2ib1FKhb=(usvmW zXp6^@lYHL=G%*Ubyb{1ql8*bPn7-mLi*%NsWte5UGU2LbdEXfG9E3VcuT&w>(u4Bb z;>vJTQW*+p4!G{&ywrJK5-g#rXx({s2nv!VGlbc1gKkp@vPc58zzKz;Q&LlAzLOL<)jA@aJWPtGN72mS_A>D+1;NS7!1rP@>{`&{2D! zMMhmmR(5AFvEWct!(AieRSOegvJ!>gf#Sl6wHGy4kZ@brMbK^;fDW!W?qY^_o-^NA zqk9T%u+P9+c}RSMKeD0|1^?wIFb|H~qY!gnK;LV(B|qs&AM`j4&X{LC=nZ-f?5_Z#?STEi2WG$EddA?!*}{9LbnQ0<*i3Xxe!Igzfi-*ISNCe2cO+=` zHrpgSd;hmI1HZ)Vy`@X@s z<5IX|Q0ffc{4`YYic0uQQx}lHG7e8MimJRxE5eR7bV)}Ci==P$7p7X8Ma%7Br!jbW zqPPP?b?5S19fO}SfCc4rY33p1oqNTTS*y^OJox^>9(c(O;`#w`-R|4xds#KmIQR)z zalLyN?fm}laGkdbgp%S4s(0)QW0-py;?Rv4d?hjz0X#kXY#2~L9&`YCGdQsTdH}JX z0Zv6&b9K}5St|+^WwYK}LG!9cm7?%PR7BVVH$J(Xsqk6b;lIwE=5JriE9V!kwU5n* z>5i9UY5n3->q~3bF?I9osCdc4dH;6&@NK+I+0NiiL;R#&=JPt8-u7u<wA6ah2* zhxZjOut%M$X+P4^A8n<;kIc`N>|}3G_6h<-uC)PcTU08>fd zDAG3pKXe>Gm=NS?&?f3EMlTSEA5JeQK3HeqU5!#FRxjWI4**Ymq8r`fBj{tko99gQAh}1**@wT0N`2h<-1;)IZ97U?kfjash}cpOEWsf| ziW}mJZ*~bJ!J&h49TGimggBumg7k_hv8AjCFG_r*2Y6l7^D%GGm?AjTsKlof!6!S; z=b&4RQBxpZkuR%W?5Lh=vO6E3w9vaZWU^+OUiW0bV05yXdx~SPE*Z5dcLaxmA0T&v zUCD@%-OeCi;iI<<_r~WGg)R;8u1G+oPp+y`@Vbl$U)i%2g}Y{SGH>$yca`DKmu|N& zA72m4?Un7^a-Zo!HxdgUFeQ2SX9nBlPuz$&dyQJvWx}84?TT#NY2e_Y(xnATl%lS2V@u*?Z&Tl4BzxtBd-iqnaNgg zbfI^mlRf_RImJ=0CP((@KD{0hcj*{+AHbu$nz8ox*VbH}SSmY#_Bh96mO)v4QBz(y zrxo(Pz@jQeJ6A{+lOa0xg7}tqQ3U~~byC3^ylbq6)n!kEUc+NV{4c>hSLt)}GWXk( zX*QlhEajS^m~=&K%6u;DNqHV`Hdux$toQT#)qOJKX(%g+t~{RD?NvX|C|__XUl?}} zr{hSM$a6^z4;RowD{L@gH!W9_FDDH?=m#2_H*rXx@p8CFwh877HOSi3P*|uOwkna! zwjNgdcDbiG375Rhd^mJ8bAOg>t1vS6m`We^kA0Fe;Iva*B+1uIxK1jU1BBtOv8^pK zStc2dTy*|+yD$6c>TZTTW}W_P{OGeB|2v==E3+=Ia_GqNAYQnyHxz*l)uSgl5oq4i zRAt-sPuJ1XWm5Z>1{%v$8FF7zTJy1$%2dvWz2n`|Q5_dT_7F}MdG^3%ttBk|H&Lm0 z0d=DM9L|H)T|v&{b|G&6bUbDvIJ;{7K-jALfQXVp)}?{c@YI=F|GE(htLwJT)@^nE z^bOVu$MGS-yOVzYI`WA1w8`r7vasR2)Cfp*EoS7u6l~*vw^mt=! z`{U}J<*Tfz)xHz*Wcj*vIo;?U`}rVo;`x3~g*0{Y3Nz5{USS=^t!+=Zxu==VN$5Q_ zE0UW&)jEGH)uO0cjfU#&e%Z>SOcv|;{OWZ~&qtcFvWx)+Cwr4=X0E%Hq39jUqeD2s z=UPZ(EF3(6VdH$ipN^%uqR#%qWcdhKDda*hH*#HiRjGMA!u%YaoztOrRZ};?kdGwD z_cr0_(!ZAx_n7pN9-kVQhtaXEX;}V-|cEVGq&cn>N z?tVWm*E0i4MVn&F&Gs4Zx;g!a4(k1Ksj1!QQG34CdV*JVVccJQA9aSRXJ(Y{CcC+A z@~(Z}`mDU_=6+IkZD>QW#)OCN+-J7=I#pu_w<1L7E6{cLYmbIX-GNV`wIr|Q-{;W$ zOpDF;LEG1gs0(%GM5_xXwzhoBR6Wi9NR6uz%T2V}lAJ5ganSd5ld0qC54*F!uixw> zaYHC`)RS)(udNIr*-1adq0tS_NY{4E3BaDUU*SJCJm46;PrzOJdz0S3htht)qgdOV zGpAiLxu%}-z8H02Z-07i4xenZ9Us?3a!nsGp0Uowqk9O+oUH|Aj&~wVN#9~uqn_Zc zDJ&#*IDc62bzhd>M04FAIbLYaQ;En{6gtW7hWw0Azo6?t4s!QxiFN$&?eD+hVzazd z*@q%W}hBf<%FN1?b*m%=VTubu`~lc z?s2aDz43R>uSY%gXdU8hKYa6ZK22Rmviid=t>)QMz2-?+heT%V+E5ioqQ$43zVF7e zPFDiyJOennR_p zD8ZccP*y0}&TBxQFRhpi83bt@H;`#)a1)FfOS`h6FD;D`Yl>r;_yqpXA*RoOjJ(P( zg*N$1q2d2uLrjz3-UD4{Cvz*u|1XF3R`s?*I>G39@T{jJMWf{C(D6rjbTgb}Q#vd@@Qc zmzGbe4k;*u)kzgC^dQbBJ`!73N)XB6MVUilozk1^qTsM?Edb3(Z>qSo*UK$p9KTjc zxKmS#-7x0CP#&?;sg=%bqS63Cy@HU=jmU5pAEJj5CB$HnR@YAItLxv0vL{W6IEA%K zDy+Bf&_9ph9F{VRu*l3XVF<&_PRO=cR4SoikzIC{)4{vS_eC z#WYch2CfV{%FgwdvM>ZErsJ$QJ8sx&2Lh~8N){q6Des+PCbTDsk8{?UqdyBP7ZyQb zskjM-pZj-om9?XmH2}nVj!?-P`8?RPxE*B!_HB6l4~Z>gw3EZmu9umKv^befO5uL+n&}`J3A+uM~@#N--WE z{N0B*khIj_tcL1&e@-F624wMwMnYIVBXdg)52?%WqFrZj?a+>JJ zA|h_1FH(fPPm6dTbtocigJ1U-@&k}=NYX2;-d$ic5QjwUP@XgOs z{sr>W0D%a?213~oHw(+z&nV5#Ca=HEA8I7lfi~l)*y3;(j=#+hkQ^OtO)l&&5C{a^ zRNr-f-BlJqUyxLt&7kKw$KNy<2VvaFA-Y*B1Uh*FHjpxr{wTb#QZbP2nU@HGRdv)5 z7qw1xG(9xsg9>0}zgz_O%0Catc@zq-y#b4WzO=3?euwI8Ml+~_??>EIDNXjqW&DR7 z@C_P$c;+V({?6{S7Y=>u%e^#r?5TGNe20L{Y>;BpjAow*aJ2s%IUK~u)X$-X8;f^i zP0U>&nrPud#ERA$@Kv+SIW=SYash5B>*#2L5cq9IXZv&~QJ*xpQ~^OKk0D;$AU8uJ z6wFgJkEst*7A_k+Y}!Km%){ZG+6KYvQc-LLXCb5TfdpWRRM&=;3E08dc{Xb=J9E66 z6rZxdRL2OlAT}xjvc$kW?Zzqyv}gu(;<;#Ia|4;6PV!I@E2gT=s_8rUW_)J?E!r?K zjYslL_Sm(&KG_!ybg2AQac(OtPXYiCn((;+5N;j+%9e&grASOGZ&I4_tOzw$_DTZK z{k=pj%dzpt1Fg-O#@$zj)6vPOrn|ifAO;e{oGyY;jJ0#G_VlL#3as?EZAGLb$!*INeqgRCUtN3*N23G0W0uY&1EJ83dwQ-|-Vc3V8% zNJ5MWIlx~~-$vTTfX)Y&BN>>8(7K)psgSl+BojcjLZ!ekV3DhB?W< zDeymJ{3(cCKtjZ;>@p;Ke)c>Z-%A|tkcdbhRm3jXJp`gN`v$&r^#`4pj!wEVQhawK z&!h%$ejpHRQO%sCq-w3f&%2lkx}+Z(qwkmn2R*QHl!%@b1O`OTP6*i~J*y+j znJTS_u&c`=J{@9K>-LuK^q(z=ADj)bBB~RUfJ1}7=B46u94sIl?yrT)kDBYUx4YnPHj|;$d9a-}-$367Dz0@?u$ynzp2NkA)|M=UW$)VwLDPv3 zLsQ+e_O^p`r#1mAOh=kDR(jeC#Ey zxm2G9Yq{4}T;5jeT?h9!wcgG~Q>Lx2J3M?}jdU_WIzLZ=1`(_)9aJ7=aCA@JC!0Rj z#BVMmP~o-BJfF5#XH%QmFJQ1YU#Hc(WN=vvqLe!yi}nxAH$$LJ=-1*|j}?K7+|ctf zk1$Xk$s4&Q`++MLVK_g_h&VGYFn8sbjM6wWuMe+$Mjo4}rL#@>8gP`d)?s)X-}mv! zKi{7oJ9wNsmAvc>a|^YkdQ+BfOshNj#ZtnGFjjg8$fn){LQ#*4$X;Q3U>zx}dzyL) z21c)(eW)7gMc5ITk|hTYS>v*m_8j?T4&17EqPt+IrY~YYy&!KNwDcm=CAw{oL7IA5!+Y9{7dL{a1=RKryiUPV3fLY&n3#qOn`&xr%;u0Zp zpN5W^9@A|1$jphn%w%>+qcKT&<$nIhne4o@EduJVO7OX(}G;?Iq%`6fKCF3d9_M674of z?bm$<dlBYfVs(2AUbgNjZ-{Q(_HLSzpz@d{dL2 zHyu4czl)LvO}AnD2(pJW4gH^8o=S#O+j`r?(fU#4VXHAEk>Q(0xt6BVv{J=eyc7dl z0?v%7YfG|=$0gj1P`*Z`ixr*nefs6U5mL(;RZ_;G`j)$_QdS*`tGh$0WQF3wgq`e> zGE-L*1IpSU?hPiSrB1l4#Pszr>3MB(gSt2@|6zf0_Js1NCn?m5B>{V@qJ$k5<+4L8 zL+uCg4T1caE2MM{8r{p)^d1|}{ImxyQrV>5OL%7)6i0XHBge{E46HH!DfAFAbJu2W z`Ld}C4IgSz24Cywkj>_CrS1)k+L%!e(g1%vwWisI!kJvdeZ~z@;Z*$S<6(3%#0EX zQ_4-nGQB=ya#rAVi$M9-U(Zw??u6{gXeof-fBCkrX5GoeCj4w<@ka76f;DDao9uEN zxikb7^G;5Uj0>4)CwA^Np~+}!YRt_i-{M{wX->|iOtRgk<4g4+2E(s?__#{Es1wyd z&zRoU^7p{4clMxG)-2S2uwF~wbtOk)c(190d(DDF@o{y*N*H)QiM)d~4o$|VBWsau zQ)4mAvujHy?}gQ7q}a||$I>h6GJ|pnd)4)*mPT=H9L-hG;|PSY^LtCBqNOl=8Nvl0 zhm5nKIJ{h3s+ogo+>LXWw3*S4`t6iU9&ac*vi<=Xa59(cRFvNjofO*LTrOqS*W8?iz4}||LTv&1&h2T;uOQM zWLxJ%b4kP#zcYs~6K$#k;I_T6^#!lE>Z5ezTFFp`>=KD%#}k$zmimrJ2^?c8>!=b@ zll%t!hS{Sv^W)!vVsR5OwnR{JIv8sstl3i)mZ73hs|(!4l_^aS$!AN%erM(wFNMiRF<;xyIK8Bs zBqL4H@P0kxRfg4|Wi=}gid!(6FLpjHFR?ifya%x~gkW1h6jwIH#(~s5cN8~g$mB-^ z1aQo&Bl>{_luSYPEXdLFqR#eRirmq=Y_mqUvJ*-Omv6XOSlrrYTKMWWc z#vD?_3SAd!AQ0Ntp8Z8FRWq)q1v<}YAip09oo<+8ZqEpE#%MbFWVdMT>5jMxeV@Qz zh!r^5B(N_u(ZNg%Jy3n_ArJ@<<_{ea}sq#A3eP&*MRK#GVX&mRP%#Im%mKP;j#6ldMVSP?P|HRCw z99m9#>z#pmied?Kqu?+GL*>Y%0JMQ|27pEYyX;Se;sc4kI_G0w(Z9k<{XiDzn9`S~ zcp{aKwrIutRMWMP+D#``QyCqXEG$X4_(=~Q(8j4X78zz_FSbW z&bEY}-Ak>Bhh)+Y*OO%Hv3P?k)nO56-YD@_OB?sY zIKM_k*vDEc3S*I$#+GS-v@tnYg6pnibz6gs5NsP3(Fy_hsKy?%WzgPPXQQ#RuFwBL{qLfpc!UE{DhX%$B#E#WpJj zSf-ra*66OXXS$w*FsDldBxitK2bRhusbpOLN9jwB+!7N7NP9A9P<^r^VSNCUP%ZHT zA^(f%m>gxV4IE}hd9_hs->TJ>C45UZAMg35)a)6`@AZ=H>00hz(_fqTCHj#LQptUd zYFxi1kk5bpU_=GA>p33sdv|}x$jU;=A0a^0MNeA?g@EX#074p#{puj$uQTy<1n~P1 z)yuALP)>#+0_|bU?eyFT`fddR^bzT~Rrkiq{^bD}&<}e?7>d*U6)& z#rCV`U1JOEy}?43d)tU*oeWOLIOic;xSLspKekv2)4EM(MCi-V{lLMr-Y#aZGsjZ(!9{14NaL4kA%{gWaf4oJfR0^N&Sgh{ZkDqm4+uONL@S$+sM*1|nL&hqpKO{1hj&nu9Q z5nNr`yDTLY)xRvCa-LvRDclkxI@rQtSgjXH(AQ27Zv!#3;UeTAyvz`^zSe%eD^P*Z zg56XjTyoD4AGnN7ShT>21C=U^$)BrHX<5?Ko6 zk7GYLK<@lKQ?JS@u$^wBs59Plu8{L~MJf->_zi^=R$+p z{+*a`bH%OQ$*8XR4hT(-n*MB8_|I?964AM|Ym^%!@e>Y9KDgBx&&!kiI5#xO{sLD4 z<}vpb2;6JaF`~yVIKp>)pyX$f6t5v4hb8y7Y}M9>d4c^cw?D`}hT78|I2>LKEZ?XB ztVB^v40pIecO@q1rpVv8NY?DGiZkT>&pJVO-ry%6DBn8l%QX=j$ltst7kT|VH(uUK zdu&TjpEyFf5OZuJgVaJh>&V`X{if1Q1n(+XHm&@hV8}j^GsRq?EpVPZotgDEXoFsP zYo3~tH`OvvAD*Y|Q1<_-p}@*bN~G+$AbPS@GJl;PcnYsz@eFYHf7apF#@f2Nu^wzB z6#i76oZ#W#SatGYqqyc}SgdMq=R1eg2RnLo47huP7$F+rMMuPL7?noEnrD;dWHVHY ze=>{=$BGV$s_~G5^|AvD2g=*KX5Y!s*Pyui%kQ~{43afXGa>t=5`ox%1rZ%yhq2GX z-^aoSATj-roke4mT$o1g1H>*gup zrLy-UAs`>dcfq8nAerdLj1y9}Ivf}y<%x!8H$B2><@@@YaO3}v#?2Sj(>`dQH+V;q z2Ykp|n<7X*44K#+E=br;uCuT39srdH)vlwz^b+NzvXQ#18s){6wy-en7yY0)n|(9L ziZtC~zh{&bIi>syYwiRJA!H(jqvFF$iOUa!pjV|D7L1!>WYo*?Z>{;t#{fiC&rGCZ zFn~zM_aMx*PDV`5%MaG=2efe}crCZ|QeRmlEYY|hF-mZ(_dF!p6y3ZZDI&Bq z1p-x4tUuOd*xq=904H#d7LK@qU=@c@KG=spk6WY&v+2@`0Rd`I2U5+ELMJ?k)>QV& z2%k=vn*m0U)@%0*ELdZ5Ak}fi4buezml7!HF?R#yPKVm+Mu&&W@#9)-HC{Yx;2`Rb zfL5q_!G!WTFG`_{cPEi>4uIR7B)i;wY1Fa@Rr8?k1}&jy$=Zvx%w1fRyqaK#{^;Dn zaEeWNC$Ed10jsQuwOv{ru$_ zT^`T|03~JUQv}}Z!9t3~w~U?K5+a!LslbeFjR83|2qq;W$Y9;H5^-&tvcpq#PVL9b zZ@a@O`v>lL&>?@vbDVe#0NjwS&1j{@x1|x{V}#}sNSjN~W~!I;S2XS?DqJ-GQC{TK zQZ0#o((ha^Hx;+!*JGzp@|O{MV5Y2#te>qMJMi!*ofC1J3UtL z7zyES)hk^#VX_pTDVUxY2TAsCzv4qa4&n~msn9L(;iCE%ISaWV)oH^-POs_mO5Y8a zQU%ECMqF)O^ch~zAe^~`SU7Iw+Lz!{+}0dFz+QeM8eAZYd5*uarv6&2!9IrCGqk!% zKco5JV50_}52yCB@K7VDwvqaCaSf%#?F!AY~jx}>tcivaf{Xh%MWoZQJtTvQrxjf zGY=u*NU=-on!-b|3!X#q^4(Hg3>_-i`fIct?JG>2mGV;qkv8e4sOlvwC(w~W-S8-U z^{Gc)xQ?vw`flRKV0DDg<9_aERnUyHha!B=3JS2xl}+JGdWnvrqpI~nb-Wj5)?)Wd z8HQi+v`Y+R2WtFo9)MIrMZCbOOLX+spc(#rD#nxuBm7-R+I$4$Uu^w$7y|DB-fhyt zC{S0DkiPNn@ zVMmrjGrRO#{U5+)wWnx&s;@Ij3%vJDBwLU_?oypmS>2mi%izI6&8=vdY}Ht5M9>Q1 z#!K3|>N@@3`)CMvgel>=!)FcD*GWC^nEu}Bu#caJ9IdHAsbdE}{-&yVFRCT4EbZ>G z{k~oDodUU3DBlulpCP=e6rXISVTDrvoIW5?bz&+Xm|QVx_#xjY47zwh4X2|Qhh$r`f=liMeJZTwV2bm zyhQSmf4;=qVO`-Me}j2;)vN@s?2h6QJVoaKXOo5VIoAx)k$-QxGy158_mF?eJ~uVo z9Pev%Kzky8o35j>t2$Yz4bppm+%9AbR*stu1dYRG@PGNzIP>_1N3iWzdog((EYnGH-{MK*v~d2ioL`7qfYPjxzPE;3s{=oDUa);h+w`+_6%TPh)t8YQp1 z`B}+H=S7?*D!)cU73_wR)p`yy>!68%=%DdLH^0c~)P^JGA}Usu)8A+PO4E_BbY4RtiN~>uh+8jt_Yx`~B?}DFH6!rVegK0O3xF#W zl!!IwkteqEHVB-8l?eiw!a_9g3$MyNmV?4rjQ&Xs3Bm-oy=@PQH%=C?BeB`qY(@oE zNpxb=@^1#oOCeTLMI(U`kdk8-z)S)RW>!L~2o2gb_D-r=G{`Uy9+OaP&(*+8mLXp> zoDb?z_B(~FtsR`jg`vjU0}*4YorvK#3=Z%TuNLwxQfi z3gf0ZxV{VOk(S?&*uC7qA}9fvq1kN)c=57K5(@UKC_uxmEK8_n!wH;}{^QOp(2}Q# z_m4(E^u5rCzl<#^O;3Z&f|Mc)X-UBCCO=z6j`_T1UpnXmeclZI+7{dcSo(TILpWuE zKj=|^)*9Xkf)YI{;i?7_)R=Mv>Cc_Y{RWW{7Uv~EC^fNcA@MOW-XI+cvFw~E^tJ>y zDgoE5F}Rk)uEC$IKKU<#Kxo5>ka}Z#vo#r=9dXaB?EQP@&;hlLH26?uf;%gLy9qLa z5&cUkpa7Zy17ZmmWj<7IBdIemDOieIWd-QU*eH!aqo@^8uF*8*4pMx@EKS9rc9Bg} zdQcO(RX1OX_6u59R_Cls0|Fi)YjE!%GV z4bd~zczmDxvFEwi9#1D+HD)*0)*tr^wO>zHH_4p19y%DDSuk`xpM&+yskt}fmoYpW zZ%a%=M{{JF)3!ewxyQQga*ju;bBAm{IacAHhiywQH(!^YH{gGMqG4=#(h7AUsNkqu zyY9Y+u5UU$E39wl8(v0ynRPxt+gEd9OL^uLlc8LByc#^;7tpvPr(%12U+NtV;d0*$ zzI!VyN4gF=MH+nnp}(2#Sn9rB-I;py;K^z$)QSi9j@J4v@7P;!Jhe=C{(RQIdw%aG zqrvj(oTE%TG~(gTi#tBlJEmim`o2Zd@jT648nQvwW_h2q8ScEU_)jHQYMaK=X5-D2 z?n!a$1~NqerKsbH+Heg}A0X#QN(DZ2oj%6ggq{E`SOuGgnBPvXM%jj#W;7cjeWsgq zyOAYEu`$8&9HR`+wZi7S(0jsUQe~xI_yty-_yS_1D0(PMLV$hE}xcS>Ze$i(79-^&f6&_>oN4@2M z$qml{y3l{4>v0{J0&J+k#sYz&k9e-mFvHdB5b{CbODO_5(Q|F@(Kj0Itp6?uYXkRv z*MeyK&k{4KK-0Bx_MF6Vju)g0%^40?CqMh912%+@h#mLO>FquP#9{Z8360}OPgaH< z)aSv*irC}{VYlz&zghhH+e`xxqLOXr?8innv`lVi#pMJydxLWqNU@zc+Wh`($`msv zk;+rhrPN9aZNc2qCymeZ3Kfg#bkoYlRF?$x!OA34#vx&urdT5fvpuHBEV@KVI%N|pF>J&-Qzj12 z;i;29v;M^{WZv?=&k!ZoDxZOx5X)&?Aj%_m*AJtHqJNNRDwa5$4gr-hcbpD1bRoYo+5+|mYH3?nJ8_G?wMG@M?2J8A`939Z5^X6!&c5a5E z62@+JKY;&9HPy1(=R^Df00{amyeIs>CTLbB#{VfmHdg-SnvBtXYh5int}m7|qh{Rm znd~tvYp);)&9w@y z(%#Xj#F!v?G1D432!^Imhsqa&AO%#FpQrC7-;r5pBOnPZWF9S`qRmRCltfdoGsOZcJ6=hYX8S5+?#m^^Y6g#Waaa3BgL)#=T zD2k}5955A&s91`GRi#xg>i#OXcTU7#KQfarH$+$nMy*DIU|op?R?MjeTY@l&W?ny8 zMAsHem^<>rgFawF1LMmQ1Hu5&!6equi*xD6YrsM>V}-G_vxqIC8#UUMGOV8uxv_qf zzt0N~;18=UGNZC?jA2x_WgApNhOZ4tu{$rTi@ec2qd{tT^cnVp+eN) z!_;F}S`&CJu<2(-Z~+bIf_olgBNs!c$7!8!A9DW~;+lQ2=YO$Z+#fZkO(;o~d(%|D z)!Muuv?8u5#hb6Ngt9TN=ihqbbWXuAuJbO34a8<%w|W44Vs1X99P?fW-X^XS4^!|6 z;*H?uL(J%_E>7RGYZk9mVlSZ4B+pzN*#N>?P2(_JI_FN+5vb-&|QqapUuUaqXnN6!I$O>)47I3~ zjm6@1*_*?5FfaFAeRKd9%nugSCVBmMP<>|G=z?#JFiyKE8q z9Bn+j`bs05)wm2a=$d$ZJMsA}>Du2Rv-u!d_5FQ6o@N1cS?Vd`c^y@ICqE-#@pE!N z+s>WyHOK0GTM697ct5T3V^&p%@7u0-y$@2eOQ@+-mKJLv2F5`87uDOReci=XpEBI+KO- z_2TpX<#IEqR;uUezMkGY)drvC-nB38A@f${b^nq)y{(pd)gdFj@9beJYP^4&-uZ?l z^VN;F#Is=6D~FQU^3x(1DSPdI&f_7^<29nGq%3-Pr@`bwGxSbh#j1x{INiy<;)s^O6%OD~l<3F(N5 z;4O{t7lO1nKm_3kBj$)hzA^$k4?#HBYg6_hJdBXl&&_au8)w5gS z6`Hp#E`wCVOH)luh-!;-DW935ym{=xi8tdqIygq6aBAoJj;iwZ%mj2oHYm~bjIc~A z4zV)+MCla$V`4{t#+t^;tf#u|4JxvD!hshSwX+i?`CRgz@obUKCT-9RsrC@(TJgC3 z>er7%*175>AT_=4^6W?NKY_G>6;;m(1OUMJACO}FS0J@B`(IzEjEamkmI!k0hQ^lF zn9!r@jMFZXOsmo;H&~u6%0`hTG3v zye9XeJy|e(`!`;<#CY>@!yp(U)4H>D8K!0)HUj1CowRN^zzuPP%vzHV)tMKhn}Y=G z4g-!&L9FqW|QiV-c-y@<=w>wHuVxkzpB!T0gri}h7` z{?1mZ&Qk%(^}ipnB$%?0bv$`(*Fg!jW`A*X|HQ;ZewMvJ)OezbiEDvjnR;kNXQ4%#qpKGwzF(?`{PK-kWPf+4?t&*)t@cE@{P*xx0K#hS}#ixJBk3_!< zHIBrF7SG0p6~AH*gekuKLE#f8O7YECNS1X_xW zHm6P@aCm|K5x}PZVxB=Kijt;^fRTgHiq~LBoBxYuk=hM1uLiNG8m^gLqHzTS zgB=wQmjj6gT28xaF|O_3DH0?bHNEsc1V;1ze2AFCQJPk-Xm}+KLD%S_Qq5$o^Dn!o z)i(SuBXK%dl$v^~b!C{khbjpEQMEaM;RkTwZRk(yn?GwOPeU)PIG`~<3_k$!{WL`` zsJK6qe-q*y@T>@?!>izMi0Nap?-!BZRFeCNrf06uL|$4{nO6f?x%~Jot?{&O~gC<{FGqcy{BOyC}F;O zINX{1{GF@JuM9!B*Y|wmH(a8}kgEvD7EW^gPU%uxdq|=W+G)y-;}!EBNq&&HrHR?m zDOJl&jFuG+GpC(0YRuUwnq>-FDy7tDAK}UF<<92lY0SU3t+xtz`P!IYe-CbMYR=_* zl{B*>?HKADS@Oo=jJ@>CwPwKy>%)9^pDwcOmx; zJ4Sq}wu3j+;M$ic;ZHp{?z2vQe;7~oMehjysYL~E2tWUa@?j*3E`Iz!HR>N);r=(I zvN3TsFtRcJ|2B3NB^mQS0?4`F5}U)#JEVq)p6Z2wE$Ml!3Q}I{&8;MgOYy9MX{1>@ z8^&4Jw&^=HHSP$KpmM3WUx1)Asu1o7c9E!lmFES7hb8%h?v3t9!Jba0T_dgsM?OzJ zcDvmhJ8X=(QULwduyAFwrPM;e0M+`lwQ z0-yp-hvU_iRA5>GEu1x&x+DCV<__{`QY`BH@{I`QfgsXnC5 zP))&iOE4RwTjC;g!Ym4OZ7B^MQlm0(7>vUQxM1L|c`%|_p=pQb`srnF0Q>5;BIuw6 zrnmrmx*s zaYoHE3jC;qBqiBLnM^|t`=KyE1r7M3T>v&+jGwk`%Sjox3m$-m> z>B#a*J!VCBhWN22LCi3k2S8wy>u+X~7IGx+vsg>1DxlQY7heh&EK`~*x>T4>$%FA~ zr4D zQ>BN-5UYdkG;~L>jUtC1rw5x6nhs_nIgwJC{`390B^SBpmI_oFC)&aN->LB%5WXvg z0RR~P*B0~Nr^dzU-`JR&{O?abMIG86`2=ULXJ_h0$JXHPMGy(1euDG>G10m)ZH$Dw zcl*Rq7)Tg`@vn=vBbMW_?)5D}B#d+ejxxd;V&$*-Lq9S0~^=EU!d-|yP1-&gPVORLpEAz0u&_6s=IsoQx4h=95+0fgZ# zy2?dd`GK(L5)rUaqz!(G_}C#-QTanXD}n^7c_*e6ZCi2Zje2ryd8VSdcWBCrg$ff) zib_H|Q^hvoZ5&CFe$9fq8K{M50l1>VNak$$6p9x&N<=6)fwur5%FHkd=98-mLkX%% zLxm(KE(QSbxR}~?hOsh6G)aniKqWxc;=;PW?0{mcJ-T7=kgAMaMI;OElJg8Jdfa(u zBDHjt8-W(WHvx3~^0FI4`|&zR5>{WaDV6G)7=Usf@Z3*t0t2 zMKu4=peJRtn!{o9t?o1V^X&g>m1s5r5m2@0K_Ec=?m=TXtA2J|%mq7bOe}>rc9~+QP#PaZC>RYH- zWX;TS2JA3F5IN}Ttx?2Y{1wzI2R`S;fC;n{c{ERk4 zAi@vfrL2y@GV!%BPt3y(f4C_)3J{-{TfR~p9owxl zs~DFF9g!7OF9^kOZ8>J*T7%W&%yAV6nZcTYJRZAL?vB=xJPJZi@cn~@two?CG4`4b z*~!Q-5Z#Z-ORN`3!+~68j?itv1sG|=84;(qWhLwbR7eha$b<7IdXi<<&y0AKvF6R2 z)Zuf1Sk*-bGWRTwj6CG95FWiCChrd3FjLF0ack8s1sQSC;DP@nOczO7776!Hj~2*h zwn2tDR2KYIj@=Sw7&I71UdKTm%1IU<1@oK|GgLy-^GZgH+_;dbDig_7Y&ZY3# zr}bh;i|To2*y)-kXz0;E@}u?fA~{qGV{J7Ow23lBAi_int=#NXmWs0}ibC4j3M8el zOCc|Cg&>`?ed56sQC&FaK;@E(gw!=iGzJ~3V4+(Pu`yQEjN^Vv^F3JE|k8-5jvYS12ShcLsHP#1mf*C;;mrp zQ*=1GZL*6($0kT5)aKU1n9bV}s?ZJV^0>Uvzztz6aMF0kQ&EEG;Sr=g(P1HX!$Ntr z`*yj}1GyICWNM~79oEIV@cBA3z&kOX792j^T!1!Ee40^w8Q`%G*mx1xI2rJGh<7|R z*f{8)V<3O+aqtd(f9zWSiy5u~eI|5k)UT|F@1L1%1v;v0W(J%Y15ZB^&Px%^GdDm_ zjLRnazQ%=|cpqgduTsaO*b2%qF? zd*ZL8$1Xk?30Vf9BSZS{)S_fUeDtD1(irfdHqv@~(*5-`co>49Fb1430?!R7^ubWN z<8T2%1j>;mYITDmqzOf#Kzoxds>Ak#$uv>W#n8k`pu0=}&nq?8%#}fjz%AA(+q6w8 zBjdWWvJG_xWr@?sbXq9>&7_A2N)Kra*~dm@L$n#T*GN#Vkko8Awguv$JN#H(1jEvY zeh8P1Z<(q?stU20cOApWc&)<_RN7tl&QG|r3DU65C8)Jbqy3`?=(yv+_X4qC?B$pY z{*m6+H2EE@crz_ug{ZBn!()6QPgvlDmWahisQC^p^$M`1aDi?7_a6yL9-^#Q3zd`a z35w_e6ktY>+1~W0p{T71gJ-m}rOWn+=}34~LX3{SjEvdA_GU;AITgqr!t5F~NLwHT zn0<9zGg#m)2gFB8Gf_zZ&CD(JGszy+PaB~31-V-J8x$Fo?K-AM|9m?c?+<2}b6l#i zIj|3!`&=go_ZRzt!UqEW7<2*UIfiDaL}S#gD-2nG6QrqRE0Fkj`wOy9>)EDf!oI~9 zPC9bUu-sddq_J8HVA%$pt;A3#X$H7W4`ErLC9{Bia&@L=17b#`@iRDFs0;5Y1U2ZT zDa%2DltdZD4Y}$=vWz+Vx8P#|?P2ht_5SU~C0-08nG4mY^Z>pjleabQ*oQ$=*P3(z z-vG9Dxo7y0ZkgoGgkG;)YbRbjHts~_-1lP`0&p_oP7H55VPXZZ7bpZxUSUq-u#|V! z3b1kSgNoq3@TAB*g3&>j(Z@k&2s1w6n^>iy7h8%;GPCLa79A5z= zr{TtTWPG;ceGA_21n6);CU+-D$_@no8R^^%^8;`uTeK%>1JnWxR$)f? ziIE2|X7T_VxBW5g?*<+RItDS6!J3X^@c8;1G^j9+WE1G1cLvS2x3KWptyzbS=1 zH9I}=m-YaU-)kOYarKQFW7Y9sU3at9m7dlLta^x!-m?thCs5k6dVI43l%G$21pt-C z)$=jcl_8B=vjD&}n;gsR9*O3geQr6MDd_Ju>?k2CHJa82dTm|VVvIU1ktMjjPNmaM zqoGT9nG7shg1^b2cbNq5y5%`izt^>L`J1tF$L(@=46fT=ak{fswQA$#Ceh?)tMN6g z2JiQ>sXEKIb(7cQcJ@7(vTg*9_to$tJjZbHS0`q?9UZzFV-<@XYgkLJf`*i~y}F=GcjTS?FR(5U97uZ+%aCx`uY zKTXc{#@BK7t9b2qG?OlOkI8#&<293vZ;@b3jhT(t<7*i2kC$98hPTUNLoS`G9#7k8 z0a42<^~J~YYS7eD)2+oXv5)cV(jXOE-G$Ej$?sZ3))1Lr)8nX-;J4HaiIwl;MM_#8 z%FC{+#T4D1wyu|CH#)fvqPvx;!h+v05By*2mO9R1ANNYf&zyuUw;j);ur26}Ij>l` z23)Sj!@RRuykynI||u*FQWV9=Ek2VX@P$c$K;y zx?2nUsk!ge3(qHH2jG6rdP8+?vK}rzJ}XRTYt>qxEN*vKaegW;`-b-mQ+{1k9fx~o zP;F~E8h=losg_>`j_h8G!RT@CNE=@R{3Bqa`#4BNO@UjEZ}K#g@(hqhby((|m()Nq zQ`^8x5sF?z9F<}nI2Mj~a5`%bzZ(c*NrY0#1!@)(-=H&&!z0af)jzH`JPrmL(ten~ z)}yS{^n(J$Me!snhJw53QV={u9k+9QkV^IBdcTI>dv>=r`OZPT?iWKDD9Lx8tBzk6r-`Y ze~Tute(^#q;*wVHcBXcMQ5XGH7sQ}E58Ha*5Em$BKa3qm+#nZ_GFb*RNjI<7D-1CD z&My?bde|E9l!<^}#iAp;VsXj;$*Lp1LT2d=JU?G^_I`TD`%eXvZHA6f=ih?+{=ZJl z|JsLXYhveYXJhc6*Fi37kpKEHF?Lt9q^vfu>RK9eh9frA9SsY|HF0FK5faVF8Z-B? zT?EA?D%7pi!g@AQx~j07F&O8El=3-BDd>|~ml7$2FvKxxrQv}BS}DW=7+?ypGZ4-J z0t%2d&*=+(wQ3^rN0|Cv_kO>BUwePuxjcukM30b1df1b3wb{(EOd=nyDDNQ zOjXrj=_fa`6>5{2SDrhds?a;CEzn!%$~QGij6bGA6<3z4JC|N6OG&Rl<%kKF`A3bV zzOex_RB|XTJc1}HHAj>fyFG@er(B<&RYgZyGpzzRgyzzq%JVmM>r-KR8VoT}YK%8j ziO`$2Olm4WkXh)|ShnmiRG7}_%^lf@o_%O(lbDcO48$6esaUT<;cNkv7NKD)Eig}E zrczx*SW~K>Hr2QEn4lnvD_&Gvq%v3V<1k8R)=gDx#-}n%KB^-WOA0A{N782#W`_85 zXsf6vvdL$HomN>J8RZ{VB~L;oRVrD9vUx^9%Bm_2WrW_sRj6VdGGPJ`WTh46l&6b)d*AL z$_RgxP+3)mRFY=tU0{IExhG?=Nq(q(i0>1L_;|>)+Niv$i`1E9%}dtVNSAl6Abtx{ z5RmJw5cuovV&=4xXk)cNl?E1K>K{|&R({I_QUh9)c*x~z6^~$~kCBhuQ;Gt~;!_7> zJb0mlI;i&xjV}lZ?zYgZs_m-f$>9U z0@~mXSBH#x=Dmk_d?vWS?}Wg)UT1RlJM{J2Ix3o560p%b(LHI6q<&WygItJDN@*xV}TKC*z<{Smy(OzfYl7DKJkR698n2XE7wU zEggoeCm?gy|K+Y-*_#11Z~f~O2Nq$KWX|gUO@JH^l8LfP_^;FC)_}fkroY|?DlA@W zzn~My%pH*O@XvcK5;4ZbuF#2J1q=uJWMn*7O}QRE6tz_0p_C< z)P{&VFwrYAh?Pdv{;q5#fNJP;zxMbWkbXB(dKA0=>XSA2%q$UVxdht=ax@`VlBtkB zHi(hCzm|Wdh%W*`Y3mbp#1%==Ds<86%-{a@nX(PsM!@@E*i$Z+Lr5z{F|Yyvcx1HV zb_8keEh*AFL4G~ZUTE-kh(lp&I6|}AiMQeqgb9<@ErC$ytbGBJ%LhV=Tfx9Ki2OwW z6rBA-fR-{is1hbI1W9P}s%sSXfLFhV#2NpwMP@k69R5uh~xVnXe_ zzNDxHYrNTPq@2Fi|1O#_=(qY0VF!vh2>T@KChlf zY_9>v71MwRG)^}o=0sdR9y`~guCA#r-3`}xYvi7=9dC&1R$Q{fbaIY!9lrZSCw0O+ zAQzUnB))UBA?8wSwcDzcY;V47v5$ChFlsNC8jgL2Z-xxnVlBsc`8}Ifb>uqMoq>I? za}yF}K78q@b9UaBwnXt3%7}l8^0Xb^2P<0W_=X!|dUoWnVYi>RbIR&)(%I(v^xm$% z)T*ZPd>X!ofaS8W+;uqKL;XhX{M?4D+HNm7xm}!}ZBE+se#_2|yVP!a9+VI4ZdRwUMh)?=qS&AxkaDn~S@tzWsH1aZmp}Afh>| z-TMJb;qrct{qt&m-T};m_WZ6{3}1KkHgk2C8`1N4FTsQN6o#%1egH1)UadhE^P$jwh4_h*3dp*AZG zg~euRz|JeGqiJl9K20~J{d(Y(%a!kMe@H8o*Z};Lh6IEO4)|+V7 z_lRwVPG-m0!#zEpYpgLn?%o?sqZ2Q_u2X{_y}IYjir#3ytJkPW88&U-H6NLg3OU=~ z(}49KE!W+qPrjVVQZ<84(P-?8Pl8HU(yLG1y({OP>{>?`$5F5y4hW@|Js#rJ?6vl* zCCJ8bPQTKj+Fh{4+sk7t1ZRhF>sH0HJ`ASPxygK+to!R8S)$ZJv4VM5DK}l`+>hiQ zk2~X(2kbMp-meEeTMrL!?-fiQcq@sqJc6IzO_qK_Fx3L~voX)r%G9aGjwtqcycK(_ zjC^#L>f%4pI+^+2Bs^s*C@KY*Rh-&rlAV>yS0W(3`Va6rouG_z@BwEtXe_wO-w(C8Ly2hhNm4uGqu5KzV0B;~G z30B&Q>13^=bttPu?$Et;^-_8q!?5F%92X@DT;nqGQ%7i9@ae(&0#eXBdWNZ0rpB_T z#HSgZ8h-smu7a-C*jd<3H;$)&(f*U51+C2%to$Qr^ZycvaQ@rHjh(Z-o`Jpn|0>WS zYOw!!8Dj4e*XL~H9LGg+I=idm3k75>kyPXHu&eds7m*%JK>Am+Jk+EF=SZX+=iggo zaddN182x-cg0JG{0$2pM^I+Ma4C$?T3nm*znv!_{n&z2%C48ZWRdhB^tX~M_g(qofH)pDI@Eq;)l)xQ*19(!%F5Q=4p--ak5HD8H~~!hEd=x z6`nK&n;C~$tc=83jTV!+W`*(7)>Z43>tfqtl^mBetH@G?Wv5gd4l958#mdFbQq;?B z$;4KHd-TL^&Z3LV8Wk&0A*yxh+IW_91{u0l1R4oux!LkrOjF&K9uXzr`({&(mCTB& zkeb|9)>MDnwtp0DQerB`ii>)SzJ;T{_-wHvX6l(_CdXC|^ z(O8Lf*?@$p!&;Sz!;r4rBWC7U`b^D)96aHhjxgaI=|Dg&4bbY=o^{{>3nU;_!| z_(A>&Ux{LP?b!1nD@32^qt_3T591nqj$ry=gk5kBcRe22oZWutXDt7tpb!Ps^+g}t zzuA5K{97IluTZ-2Jx>=FefW#n{$=q+Z}@;`;Ln~Br^BqfaouUc?tSf=$KQY3L*U)N zzqm7@^+otsfb`^?p6UrZF1^FG@aMIA@F5|Kh5ebMYZD`x-5{%(fer&Z70BJg<7e3b zue|@v&$8+J63nnntB$eh6$vrqO3 zv`$5Uisw(PU=T%*oq$qT4KfAkEl_P80EbJ86HP=t8sM4eKfT_-%AX6wM3rqE!lrJJ ziLQ8yO)r@}4Y&r8bCM|zjtZzpk%NzcYfPHnU zb&QJ;5eA48Uoilvu^w&&V_+)35iSyzB`_%I;aKIbLC%eSrsWR|@-O#@2`pX8>fo*a z!QVsWCEVDQWES2fqUMW{UXBBJmgWzA3IzWV2J%o>!7KPNWW?ZZN_^nIt2nl}!VW1w zM-=|n)`uw(yYRB{ZjZ^Iu#>3lv^7y;JZyYM0%KbI6K_Q-W?dL|c)+&I5B9y#k2e(N zH82`oV5>BUbo?Yef5wjn4QJGO3tQ0WKltzHdigL2g2rTs>k<(3p1)Y55*~z(53!pD zVm$C3sdq*dIY>tcy z(S(SyGtT|Rng!y^gBfo!4UKyb{n-H8bJpW(k=jKwwIrzH4NwZeqLJ2wopkiEfsEYH z_JZ(T_>eHlI3u)_ZP`#U6iGSmVJOMYBc61M@2Uvg(F47@-gt+Dz$0YC?U7Mr(>chQ zAbL_Dd>njP1lnN`konb@Mvi`?>PwLT$KI#n!^>k@J0iX^qK~8QW%fWW4J*F}NBrk& zX9L~&E&01^XM>hIu8#R8&Ne5S&PT!cPdhw;#|78QAdMA5eht;^z$_e`z^T5{{Hb03 zn&78(rVHD-{5`?9sl^40XkRZ5H4pLYE)1QG`pSspNt8nqZ@UtD&W+?$>vH`k^jr@6 zYJ}|Gt@1m*v|-j^kAopXYpR4yNyAMi2O&_0H>r79a0V zhu4cm-{*OU-8BTfbZ};z-~F!sQ5DgM$zFS5J{opxW>0cf#bm!EVeyFNb;VR!1EJ9IgG?%sRw zWwXqlY2P2_N1aZt4n9BoW$-=Rzo$-Odtd9XuW|4>uh27iz7HF7eqQExc6F?|TwdpP zJ$fzGcRY^??2bYZFP;9B$RibOX{V0ZYcWpLF*+pBsz-U%)$ zZV`@tZU;}r7&oOblRDfFO49+`VVo|xf^ZCJ1mYThuK-$h>&8$R9%M? z7iwqC&5TDQKBxCbkH=MpuEr`dYdEwn&F`%H>f3#n%Dcv`!$-keIaKV{q@gEq%PJ3V zsy2&;N7p;LSygia^E%d+En9T|Rxz-gS`RQvnrdQ|6&}o>f`lFfCPOAg$*(UyTd#NQ z1|RsM{j^nCaA~i!4jjm7uv%>P0sJQ)KDUIpFbY2&RxU zfe83DTl*6gH*MOwq_S>b0C!|Ol2comlNVEIOq#e`Kh64Hw(hp$`LsvSi!1XxJ|QtW4||@6c}k{f53)tx?W5_hDr->k9pVYD|Z(u`}pHmy-^jd~eq)p4tg;mq=v{kxcq!wGN6f@uS! zb6lbAjJZ_Pc;hU?Y?-3@O07K3r%=h^n!l6TY9fwgz43XIYUK2puanE9GtAQO+@ogLQYWD`M5kJ_rFV{x9$zj6&Q<05;X>l~1<(Ml%pxtPzCJS@O20;dcI69} zY@TwuREVvU!eHR(dVyPm7{g<+3xp zMf9FU+Ey$UQH<%9K~vL+Qi;%!{eFODSNCi8nh|dP4!Npji(yd+P1ekZ6#;VvPGLBu z%-W`Gz9LPVP%RIFtB#VZ)goMAvMTh5SFGi5NYe+OlyPrXK^8o97VohD}8 zhK?*&LY+I8|DaxssFJPBS{K4;c)105Bls0;1JrF3{DP+BC^r%ia+OQ`YUxb~E<&Q* zfgc3J3i7K|yK26@`tnHO(NOLH0aBBeO;{nVysfe|;xi!_g}*^zxYM}BmXb(k6f|aR z`H&%I=mWcDTDP>386YX9ufJgNc4U z@O`mT&lKuKs32ot6f5k+I6fL;y%CO7C+em-vDL{Njt|ynqL0K0Hv(c7b0V5PMn7Ys z4jlLD=`jnG*zrJS52c5Jx3#oeJhj9u{CrGSH?U`t6B|JN6UbCz&KgMmlU7xdp996& z%`52{W7ycD$}QbNQ)7;t;1g>qDcyJ}mQwDrR1(g+*X{utiHJw~lAMDQTnQiBS-%&{ zaB;iu!Uiz+bFVRg$;bt2tXn>N;D;bK5r(K>x@`pg!@FZU1n=JAnmjN{-lrS#Q6Gb= zcx0bh45`d|zV2qu#9zj;tF|JSwj8beyP$foMQ081Ru<6{kHVd#LGr}@5Q)y-+ zTz$nDFqmE?05SL@L!YfC?+P~oIIPVgcy+#Mx&vVrF=i=7sPTsv?|ku~pva%I&By*` zX0S_%8JP0no%l@R$U;e`pIko5MTez2+Ie?n&(%FA!6aRAZn`j(BM+CV5AVA70V}KB zgxT~|LOS^$F-|MZ%=GN1*WjYaXK;wrG#Vim8l_%UCK9lb-*O4+O&jwC0E@~|VBQ}W zY(NXRq)vl#FdoDC0{u`lXpx-}>e$GZicugav@G8hrr~WMBeNZgnx3Y9KF5Mu~Bg`iRNS&zA9sCttmcTFSv&_d*71DTfU zQj#;3zU?C=5>()KDDt*)ttIJ{W@t*LL91ecv;nV%j`Vn#h`+8>p%p{hV+1h3LJ>3r zsMT|+i>M1#rL=OG!nbM}57hFhfF?2mhEq)5DVsb>;b4Ug)trh9Ckjr&tWFM0DXf19 zlx)4kw3s?KP*(>ZK=MTZIQ2~LR~}U}>TX$vIQ0-?pHh&t3e0=GvkK4IMG`RV@K~V2 zNtjBr0+KUSX9}>RH&ynh2!?B)$wzg!kn85GcS<)hdo3Ms}{X8EKiTEQqYK7J=3TeU?b~7h42$Bou~=4+T!B z=_jDBf>E5+31}G*?It?_dsPh`rb${?yYvBvGyej`GKa`DunSekV)o`=jB*lWlYpEW zK?zVIscOD*TqrmV>eh4ZPcvP5V8NX4YfB$yrSMulI1+Mu+L_ zu^Nf{798EcyWyk4g~n2!@20~;rW!*Y5a29zn$iR#X%SQdg~2V}{(1xr%=r0PArjY6 zO0pRT+y#Hnb<@#7#WRVFMkmteW+$=LijrMwOYNf?w<9$gPr`u7 zYno=%2FBC`(V*exsO8_VkgmoaDXH6@pL{*Cra!&Lqr4IP9OJiWNI#U`Q zSHn;A^zUzwrd;voP2I!RUtPfqWN+(h0icb&ad)`9Qj@9{Av^ zdT1LAsOyCI)k`b8B#=%nR3dah-2v}&zP-~rL4~lCeLnIli;=w3aQ~I_G9@tPD=ISx zzla(lRmykY4TAjg0^+U(jjcfX##l3;$YXov4A(lm){XqwhCAj^F*aIcQ$313;g+=fR!g(T_;U zT~X>F-b>odJoNIMk&lfFo%|Awh%tSG?Xb2jhFMOK2kbM6qQTg2lei9L-b)&ikdnB@ z8W7$Om>6G0q|bn4d6&_5bUMfHBT3$Gle{j4>Fda*;S%cyyThBu%P8r?hc~<75BTPVZso7#i)M7N0G6v0v#G_?k^nqOKbL_ei1OYhbe`> zcZf&~lfedv-Z`~w=DV*J^E_pA`K2;*;1zRy_k9reBT)k#w zx4pDan^-L?q)?C26qlE%mSwf-$7F3(XBxuk*kW#M%r3nwsInD=Jm_p7X^=Av+C~RBN;Us*PDG>}+Lg^@u)_Pe@$G;G$E4!eW%FOB>Kub^sP$J|_2Y3v)gD?L51WrjEUl6TMEBWVzFLV2cb zM!vna<@~^{@A%{3%|3MZbohoVdGPw23HF;jdY7_ZHY>(_a>u?wEaGMqG7vW}A!e_b z?T1+dm02uD1-e&XN<=mG)Jfi?vkWL@J$kLZQ-vH(@z}&Dcs@Ssp`$W`Y%(*1)I~RQ zNAM>?_X5Q z)&-A3`b}d-fc+O-MJHzElEJ4l5yP06uyBWy~hIM3i+Wweg z!H&9z#O=EHxn*@XwaYXlN&{yi&*Qq&zbvJf+$mnS50q-zFjvFBk}oN3e8ZPF^Z2TG zrhH#Y!UKx4Rf7w`Kn9{DQT^IbUb zNo>nO-R6P6l99Z;Gyi*{Be9hO+B@>asQCrkh7b5ev#+T@VSHmMeqx&U;PZp>=#0ZCN=oM0u8yn&Y zYICpJ)5Z-(gW%+4#$uA_@L{^Pc&%xKv9asA4ah^K^iS(Ju6Y#-QT*#_tk>4o@SXqL z4y~_SxSrCN>BwFF+f2kB!9x~W-;kjF)YarKVC5eC%J0H;fhrbVpdTSb^dnG4Q<&wm zX@n1OKRCdbX{evnTT5P^Xq1O$b9x2rm-8gu?$e<|jIbDd&PLd#g{3@T(t6wC7uIpJ zA6Q$_{tF!dA9;hF(K~+X-o*Xg}8tvg>Lft>(ZCk@R{Dg8G7YYp9>aP z2a0i_8}NxhP@i<#vB`rrZ;<9Qa72zk4PxsTpbNwyqFBlwY~9kRN2qn4=;BAq&_2>4 zMB$4fu?q5+X9B}Ab53Er@-g`UK+hJX{0dMSKJL9)G{9q|+Tp=x(AJU5FQuzxq`HM27Pe>PA_X2_Iyhz$y#$l|S zXus4`KGIRU#7I8ySFwR_>(Jl9sMt0H9J>mm@K_%|+55JaZw_wma?J4Lwo|dm4cSt= zj$U@E2EF3#=%ULd`j*7!K zmv<1axG@ayKD z!;j}H6#qt_kf;IvS#UlzI==MM-3KS^bLEjE~)i2-8Hb?VdXV$x%Ni(2+wO2K)9*pmP zw}hAFrn}wuiwJX~7&XC#oB4y&jMeRdbmSQAAJ(++$-5WUFfl}+qln|Fv*2)h3YRS% zMyti+(P{wW&JCIScRvWd1CCb+r*Y;|>hbz|-_Ip1cFmogO@I6RVE&5~KJ1ju>t2h& z_G-UlY>43PX4hNQtlgZ;TK4;^lM2b6Y=t$phvDS| zJEpqvMDe*^5*#0Cgk?;{YzoGdZeWTaF=9YT^FpH)LhP-WDkPRH8C`O6JcJf22yP-p zIOVbH*~zQsq|R z`-5Jt@N03d1~@FhSLwGE&jje{Ywlb_x)0k1?8(@a`tGhs3j|2R1Y2X6^aM}W0 zlL~uj1pJ3vYbSimpw&6lKk&e$ARyy_%0ImJTJhXkHMROmSxZ8^X+%{Riv@dNzwmXS z!nj963xy8i(UmTrZa6n4=Z2{Y)zhhap^DtmK8cmr&EejaH}W+D?Y8rPv!*1V+e8)x z)gsdtXS53|LQ`i@S5bk9tEbkvxf+ArWzVnY%?_2{=B%d@*0^#cjIbn-dvw$_;Q()= zF}T9>$4R7*54~H^Wkx(E>l9iye;giTp^#}NO~6J$aqK6&TzdyMC!)u_9{AL~8wYxs zCn%;wwcL=Xqu8J^bHIwspc-HMXzo)oWNmO?0nA6;hdLD6F0cd= znO*ia?@;8c3G{O6GSU*u=vk?F$24t~bbI4=%20LdxXEU{8+3CuGmGT&WUEgs{~x%_ zQP*{PlXvT}rHVv7Ur0`c2``oS`i3asTi9C7@H5E=BKcS6=TCOgagJXUSiEFYsX26p zJZ0yTg2E{ayeNh##7sp)#gggUw}C;V)06iit;Lyaphp*Ch}QG<4>G} zWM###7RodO(_E3Rm?Y_J+nmEi^hBtOMjXtMuIgI#-{QW*wTJ-&*cR&gyc7S}@rjAE zM(on=S(%|%O?o9!&4?)Q%3OCss=V{~l*WnzdcRqgx>m+`-HNLz+E(N=8AtURj&V~DvcSYc*RtjY zE~N{p_Qgi$(|eXifmhG|UKv`76E=mU4eDl8=6-Wux@&GbY}hU6nvzFH8wtfuH-+&m zYB?t2rVP!2qX!hfgw=H1OiREow&9YF7)!G?lKr}Dah`JPJF$}m%dor?ByzL1r0k+& z^qiR&&@SVdzYcUi#vr<~v$s!m@OFC=o@CoE@GkyYU6{34yJEtD<+dYJ$|4A`V@qwN zge*4w-pHcG)Nl4EHD7Kqd|514>{e{f(f!S9#Ir-w@wsCs@~NsxxN*;CSybEJx}Z!? z>^fmd-i*f2x<0Dl$M<6deR$z z@A0{5drU)L^)SyWPg!GIC1B%|V83wJ_tn;RNA*&GV!nNq?R+gY>&tWdi>a}e zpB1NXr5jt$U|?gJF3Y=#6_i|Qij7eHt=yHns z%!H@nq%tFs8gjU6T{ENMWS861*7$V{f&Z`<9Tn>9qhE=z-u$v?F#=WFwVD9-zEp~r zvuvHnGV@@kf7dBY7{yS2x?gh+x$B3vK8Z{F2p#S4YHP45DHeJK{*U%_o~+BnJRt6s zlt%B$a8{y(8YWxphy#5h#59$>sO&-_T|rK*3#mlBCQ8>?B)oQAg_EDtFg`l!j_hH= z4ao{>ZL>%z)9OQek4pi=QNrT%Rn0Ry1P=L{zUFBW>p594a?7nl2%+jWj1o#=L237X zssr~~#n!7MXoX2~w>@0oZ9FT>t8a+E_I52B`r*s({i&$mg&qgA+5M#615AI74*SH0Cn+4fMm&xo)3_9 zqmejIqj@^wL=u^HW|S@)8^X&{Li%m}pt`K28NZ;8p%ERDgw~A8geqiXFzt>i}yRTY#e( z9`Dg-XMhE*AK%gifOVNN;KW^z3jj*|ZMP0q9>D7cT%5lL_0pLJq+9e2b%+=LYTmsr zftl7?*0jj1BbEJ`IUKSdDKXDJ%||PA2Sh@@#!;t+{bFMo%?f>naBj&wP4)R+x``hjS%;uy9w0Q{Z*7+ECoN(;D`R@HSC5Td@eHLkbyUu|A|Tw-8c;A?y` zK~+gpe9O;3IRCIX{^aVsNcO&8--Z5RM41P!^E1u49%Wn^{RV+`e~4T@A9fw^==+B9 z{v-e&UAqJEsOK7$}e@gxDG))hoFwX$PT{e)&Q7$)IJVaSI?0|l>e zDO$9|C++tjhBPaDBw~&rEFt{^$|-`3c6w;<-+15uI0P1V-ezTk0{{s9n`mktL zUvl5T*~G;0|33#ht9V;sn_~LT-g5S=^*lAtnrD>Yr>?!9A&s7{jW<;LODe1uNp4bD zgDanBHG0d`)ZY$lb2?oXDVHMzS5sW_RT?Y&P-cRKHNZhpQ{x(D3K%pY8(yT9L_n_L z8yYdBfneWty%cl&74mSH{m$8ayyu%W{M>GLWr~Tj{S>osd#$rhLU5=Sg?MJqv`}GW zb}Z4yUU$gXGj3zkXi-<$1}!!#w^?R^ZR~GA z<*iM7mpX3lxlm^*VGPq8$11s=K~)y6Rh%v;knQZ~RF|g5xQf!qsw~wAv7nnSqsA~1 zugA8$SfD9WXja;W`>}1dni;FSp`SHt$K*l5ez*+Dk&m@VsXkwPR#&D}jyO5v*QPAj zQmL<)h*Gag`wFHvwz*p)4k`HrP;Y+ds5r4Dd6X+FGkz~HSWi!gUxmuZFm3t7^%PNd zV8+2=G@M#T>y1E*m}pYAJx1w9TCBWKSyy6qpC(xrk#E{CRW;v9RgJ9BjviK!)J82+ zbO!d=3v4pZTP&&0RG~Jm>Vck@NHp}GH=uJcGiRishiIU-R}gwIzlw(p6&jMd9+=9ae#;ocu_Y)Y zgNK!Q>#;yk&!exuko0>FQ+U{yz!cI$+YYiNpK72!BPDXI`J{OPD{36D9#|)PZ%*CN zF{PU{hg^;CWmG+C(vq(MW*19CB}zbX5gdW+5>3K{o4K+UfHDFt8Sb=e!}DWY2);nDvesP9Cw( z?CS*XVuydI9;ic**&48g+Jz!|CqIg3z`}(@)+eE9V3L_A+`~_>7m~Ts6Ydc{7-Vqf zgE2g9P2&+-|09$XF=vw#_&)A{l%Pw7Y*dON--SA-~mTelP25&ZxA$-7QAp?5(O^49})PL4gfO12$(zMCuDW34@<_Xm&}3U8v7Wz1x}^~?xzJVvN)d%?!Q;?G2ek-056gxeJY+vfZ+ag+8}`| zVuwLzF>|qe=|tS5gt+33=W-H$en=o#Vh`dpdO%qkg$v+#m{2=oML!l3^} z+8uW&Z(MtFu>@8)ar*BIJ&?!15JvV{xZlX)rdd1A>{RS4LIsIfd#>SfPp+NpKCnBk z745ZRZl&Q$v5Q9gJp~O|89(`oI~7E!O;ZK82<%7koSlw_h-G*-pm*juPu1h?r$acV z2*xx;Qzas2Z@uM-=!5y&xZKZ8>2-57UwnS|qpfa|gGXn0TGm$HyZ`q5Q;0Et-`w^% z+%6Q((y^oPceq`t`u3de5r;y9pYp#QO_ld*N%s04@O}TN&Lg+w-=m+0{&)!HzV+9* zJ??k(3H;a@SsuNl)c=+BW&C*j^l>wPgdVT8_%-8p{yp#gee1UbUe~^8b-Cqp%`cq& zjek;k%JqF4-+nAEeUSC(x|>>4I6b`8PRikSVO#pxE(Ufj)sg-!CwJ-ieuB;~kJ0tn zU+Ui#V&mbnGo9?Oxx+nF#eZw9HT~)OeB{@@_5L3Ap0&%=Ytc>Fv3{avg!+_UX1iKE$u?Tr#v(396 zKaP0`XZPx)Bi%`f^?o=BKE3&VwvBb}`5m40ar_LFJQRtU*(Wp|{mAB=GhLWJ??Q^f zb2J&^@LbrR^1o5YENt~_;=<_&)x^!dIYSe*b#jYp9F3lt0wxdy*JZcbB;)CnL7`+i zoFL}0Jx@l@CiqtRY)-|0`^G*9@b=jZxApxVi=Sug259kz$}RSS*OFrS+e*=QjHugQ zV_fA>UXG4t3FHhv8<=+RJjeN;$6cV3>Y5acFe#2 z9yk5luv4+oOgk4vRsz^NT*|ACe}}$0p^aPQ)p+FPYKuGaqLfwh>hk*byVm~p2Xb^w zW+ahVluCKFvINw4mdLf?^<;-*SWT0B0c4UQlhYXub{I}= z4}-eQgzw(~#wbaPW{}LS`oH0CPtcoS?k&Wikj510(7XR==k)=R`lS5N-U0~#0OP;? z6$d6mMXjmo2HChbf4dXDbL>aw|43t5n6TV2u`>K`h?(e#&xY z*-zZr@BV)J_TGN~4j3QG4ap+BL}v5i0^@4%Ng^o03nCCl5s@EASTUIg^qzaItLco7 zuxcnSB$j-Ekjj!xk4NFm+K23@aS@M3XmAx~BwI;g7B05e^K>iDpiC^htC3|hm@ZV5 zXpAt~OD8&}j2qGD&M9U3%bYkGp4(e!txO~tz-479Iuvl|*ld|R9HPi!zRej0D<4N? zEeTMjOqWhX#XbZi(4XN=^ov=!@+c-vMz$yMq|A&{a;3|TS7j+iM!>8bP;x4Ru zO-3ZBPB^%u{PGND#|!hstFo|>TB>?Y|DEH+)08YmO%~kK;-Oybd^pbn%_h*lM1>WR zDRK-emou(b3(L2IvbQD=G!T|@=Sf~j=Rm3BE6C$&G2+t{Se`j$ zQaq`56G^T<^WaI89b)c^53DjuV5}-zV^Lo|k%@}%UeKGgl^MIQ(g3@--)bAbY@U5K zY>k2jjFz(W*lf55(u_l8fd~PD5zt9CcbCU4u5p(KBBl%2+Rzx0N^WjuagCt^$AfdWXKq92M&g|QC2c7RH??i0Xs=xB{T zZc#z32#hnvYwL)EF0Pb^dCA;9M0awQCh6j*=~w$93nih?CJhS(;=vkQP%IzUj?NFi ze;_=o?^$l_o-YCsA|Q*Evq~>|pCnPT!Qm^9B0dT?s7rteE|WkkN%4RZ0)0S(V7^PJ zoum{Gjp9yBo&=Me8mi#PL9z>(#dAxK+9@ctN@jv6WE66Id@~zOKlEs2-$R9{?*!Nj zkwaIm{~S}3ns6c@R9j5latcHfSQWZICwOypL`B}xFGwb0kDGFav_UzJMYCD1fzB9R z9keCE2+e(s3LW+Y7e|&U`OK*L7^EOn1XA|zfM}n=GFgOCMSo)^K{}RF9z?Nc87O>n zF`h+ysK)l7!ChUD5t`?khAIdE91stUAh{*2)9vpbs0-zqp9k$Q1j_6o1s;T;0264` z8JnfNlQV)gZ_ou3m~iV3zzViRH9Ns2tcR1$ot!L_4g4m+J9*UC<$O8GuOi1U*pWV_ zn;F3AB%yDH9(M5@=MIsSX4@$56%>^|p^Ue50eCCWNSr$4oHD?(l3h?RM<^LEj9ObO z?-kX=1#nq#ChJ?f%0HhM{oa?KB}gh75EvcFLlgrn68;v59*z(jO_su+>Xd~hjklnpdxeMutqqP+ehyElss2y# zb2Kvuj3|&k5FxZCQCg%%u?31wuzRIHe=GVRu5xWAcC3~1K`Ri9G8N78UIfuI=P8Px z8aU=?u8GsplANeXwqdU=k=Io$Xop+ql2BZvgS17NECa4Ic0|G`$k9OyFEWnuGA!fA zI#>wqwG$+DA+?uCs;Eu2l{-LZ23szx8FoZE>${=`J?=Ju<6u7`4BeJo*6^fQld5N@ z>c8#C{be9TdIsG@4}m7|RnnT#U9|B@LC=76uA}<$tQR5ubO2n$;nW(Y3=+;oklM}# zJqS=R)PS)C*Wd@_usad4>f@2DK)}2Gc>8FCBM3*rr3#Wf1YIGVJ~h^*QHJIP^TDT= zuzX1Iu9x|eFr+bKjTPgri}xIv0a^j-dXsSrtODSK9tY}i@8sn22k+i8*8nSHaY@nv zn^Xv`*O&tP@D4cUuIO2LP>k4#2lbl(TKt|NSZImbfyw{@E2NAz#Z0KjS_4+isJXwS z_eCVD;lS6c$1WfM3IVoOCStGzaz<#&sX=Y2L1V$z5kY#;`iKE~YNEH{K(}qAT0*yH z{(YdLMGe{w9~3zO_$!*Mq!B)Dug1`K;y*ZQ=m`I0q*4OM0xASZVYKn80NR1aE(G8s z)8HC@?nR9Vm#TouhCYmQNKw^;eDPkGa}HCr02=BAhyA~^0&2F9yJ5}<^eJbxBU}_M zCOERta?~gBKAPb_8dJ5v7Nd++;2UxYkxE3jea!-1?unY$S3jN)1*CY zsbk=EIOp&NoDIly;f@&No-cVmh=m{(MjQg_8C#N43 ziz5bb>$jurv3j4Y`kO-Qn3jlqL~ncnx6($-#qv>qPZ3%~=SJyNUrSk840C~x`gvb{ z$eH3!zqepl({(Qs(ufyo&^>~!j-JG(7q@@in7l4eLVJ8SaQOHeuk4-;$@#k&95}w- zE62N?QzloAM&V8Ad+2j(y>7b8WoNDITXsM8KGAi`Dm4~-KR-fKbJBV3Pg5gJV#V>E zn>ST)HoAf=(ml?{vjsm*LrwV2c77)hSNRWaekR8UKVR8$A+}#a3s0L(b0E2GKa!eT z>309}m4;~iSGki0$`8-TY+&A?+ zPEKB#LWg(Qamq2N=#({eI{v;7@93ft2Kf3049`uFN-VDCtvZC{UG<;4= zcji{s^874!X5zUS+0h%nTMQc)@aINh8Rp-HEu=>n_+ZbeOo{fg>1x*DOs^oEeD<(e z5>C19wF>OLZbo0{z4n03dLhdJOwRAz8G1L#s_5fgXMs%>b}PkR7kVF$JYeMstHr$F z#aeSQ#-89`i@;sl;bxQ&c!D#DH7q6(m2F$d@E>suxLh~>ZrF|Pf&N^L2g-JFeqL&K zc6>M(+NsuFE{g3Y(3_om$p{0H3Dbx)e{a{D?F!6-*iT!`z;a7Bs@)jbCn|YAl&|$ zmMz1wnDYG(0_Q}ta@*CvBBvHS0091fo8=vht#qB8%&quHD#($IN^ZUqXpWSS+3y7gVOO ziO+&0?47s3(v++qoCG0}EL5O87RwePhF!#iLR$PjuVTI7IO^I6*X+b&I>Y(Oz03JJ z)og0EdZQC!jys78CJN?l8lwM)-x$NBZn=3xy7f%MrT#|MieBM9Ak$$ud)E%s#Ao3l zviPsDeu06evg63X3wN?bY2=*lTWy`rkk7nxta>=#&egmscZq{mF zW41WRf*=B6;w=YTyGDsAD)llXxVtD1NQgV@YWX_q?5l>=XnM^mM?xSyr*f^$C!r~q zRSb7cZXSnj-`=^j(~9|ug^txr$R3Ji2|p=L#oBSgc?vp`>U1-lN>;}!ClX|Tt7c&| zmEVYvG1H$F>5Z8-^VwG1MyY9e38t~3ztrNRLP*ffsNWvbEdzqaUro+N1q!sI;j>&? z4Uuz$A^`B`CRG*mQyI4ej6LTG&;m1+s*`9{%@gsQ#I`Y>B&wQJ0}=vLDihZ-Jz>ZvXT~x%Lyzo%Qn7L4u3kqO<7siyE^+-5Z53 zNB)3=ce;V;!%~NiKt?D<(Zl&~Od-jI2z?CtT6$`zG-=D4%9xjBm2<`nVWVp7bw!0B z^pji%ZdDbt`G8MbK;OZhbm~_WD(QrnnDiK1BZpT#7^UQLj#A$X_AM{&QUSaX&V4c{ zNQ6pfeo!VK8Sd;Zz{4)jR2KOf_pJogv z3X*j#Z}?Zc?ey9-CkjvpI&OTJc#aR zoYEeN6A&|lJP+RnJssRjv#eIt84{5vF--^>v?PEP+iB~DZmP$QPgFizEN zP(93=1B3$(DQ8~?%v>rkdazwMC^Zpd+F~STJ@j5G`(EOK10@plnUlYadrlEg>L3j? zHGml@z&5M^4qEd2zGXH5SX}(Fm>m6|EeQzmKQqcr;qf+ND~n~_;KCi^45CueUzLQu z9qnCBDCg@GWa|2?XMO&E8Dokj!YCo6Swl|Ou5ZAK z>@#*=S?DMmbj=m;ad6^U;%=>BMyT+72Mhs42mZhi7CeVr#@S@qUOi#~CiXAu#e+qD zhz*AYc3a+U130m{gw#MEq$Ae{{ut2mZ9_9&S*5^(}0B2<#>LH4v!HUKA6t$%| z$q4o%H={*CI1i$)>1CpVc%K*()GF>@6F7qSg@E zsxlHZIuj6#C(-^(dv{kqV8T-|7&n9>BqA&ya?}MFsz$hrQurEM6pgs4DuHZXRM2ggy{fHt%+1E$xpan z1uQ=UF&wjuF_C<4i!e5ru4#H%D#v^wl85)RGfGG+_>;B>9yG5L@eIyb=GDO?)=kph|)}29s^b`ew=KybKMEUhwr0P-k0U6bOcKejuty9|r`8tH}@| z0SD2ze3e2B4&)L-X@r0;`rSIHmk9rW5vrL7o|zoA`5zi_&Q8|+V))hZL-2B51-5z5iJ#Ka?$2aW z1))%(;=5^<1dMUhOTB>?M~$#Tg`DC&EX$C+B+KHw)aM!srw$c~Iy8lhMeLI0*-ti* z&GbO(QqOP|jZLr6<=O>Y^@dbJxw79h)$Br?p&=K`j2VG%VCL{H3jC2fo}RZc5wa#= zZw>r>U*Fw@#GCT;c<wnz__05yZyPu9;RdC!+whnuL zW=z3neBOs&Q+`jz8ZY^MypPj$b2h$FuWe3EUw_^;%Wb#r4B_E*eGk44bb0J<1{t3n zUZ+!KyPYm3s)1kFHGhwwO>aA0j;H^QzUbbW)cowdA-T`X=${0J_I&)bm-A0~)?HWo zzr8D$E=u-%p51}V_TEn{|1ht9q5Aedj1%Xs%#|1VcFw);S(eAd=a=74s^;|eUJu01 zc&?jr`@UKbEi?Y&OZxh-u6Ez7G1pqTo7}c&>h5N29Q1fyEOqk2|K3sZKP>^{=Sb_~ z-UqwxzQ4UiDtUz_>Z=B?I{rmuYQa|3WPW1NP_QvX5dA;WDyY_fXcKSZQJe2Bo zur?T+RddIFee&CWO^SQJlucdPNf?%+qI zVutwf0B2X8lEQU2Obc^|ySC|g<-NR#qFcxXj zTD6bV@irN8ijDI_E4>-K9AFt#KTM{ouS+%$sa2A6>9$HvZ)Z<3@rDw_Q6p;i3~{xh zjDd|sDM?>pwXn_Yu!&Z=y4Kz7RLO=#x|zPEguBJrV4a@Ita^LMSlU?1a$H@baCYeQ z{B-yPUU?eU&wzX_UMckmkA9?PUQwFl;0dwKq&)#*wy`KdOWNafZZ(o-*7(4KM=K$x zY0VWnja?X8JvEa)Zacq=3Gr>K>K#LV*U6g;lu{s6}$6D7v#anSb zO;svu0wY)eBS{?Dw1U_IiKIqHP!Z&MNRjv=*n`D}Q))>tPp3FmUZ(3f+0drZ`6}HN zC-Z7c-mXjPZRbk+X^!WOZ_X?hHv`M7-6s>Y_i&U9wVb z!i2cfDOI@=k?|RfP=N%6tZHfb1gsN@!Z{E^Y$#B9oNnU3(4i2rK3E7NAXkCEb$fpq zQW*N4dmaw|pZKJ_NS6Se0tVE_Qm)585=!FKKBNd&2E_L>Ae+TPiH30x#L39w7+lEZ zY6QGwYsDa)-`y5juw;KC7_rU;x^XO`zzv{Q5#?A57}6y`P?evNISvs38PWMs;~ozI zMbuP@!`lFd5B7QNWHiFfB{^L>8R0g8^4HSLh%mxjNS9bbeeXaP%hbEyyxfQl{)QNI>+Oy`V@Esethe^b#rwnOvwh^-RN*2^k%@ zf#sDM{gZUh2bTl%V=v2#)xj&RxRT|g5Q5T>WKWBk?)S$~Oo}5jlmgy0mXIKM-BJ1y zx+g|E8XY3H6T*g~!!Q#DKt@azpdMf&LP*h(D9Y;EW`=>bHxznult`lgrmHC|T88Ph zaXBRJjXMEmoQDFq2p#w@u`RABfn@nxyy=G?2+{t*kkDm_(0qP42r1Pf%3hkHQ2I#H zBjZJa7CSg<*-~R4kfKBgNEH}U-`_d$ymqW0aMUbLo2tD}cTu_0n z^(&s2TLJ(@lyR-s1TJqQEU@_34u=|3mP1j3@#r}!N;-gfqe>5Yqa^et3g-p6$VWIp zS+-t`$k#g}u-4$X0QasLysv#>^SspPu6{4tX-e7;`K!jE<*q#^P#{4?p8{bV!nzkz z^?uZFsieX&Ss=J+A}Va0UN{9Hjs;URnyP&|52dPZN(Kj{IQc{7NL|r@5=GtF0;VUw z%h5v9`LZXM4GX&pj2l`2_34j7@xAMV(Wx(06r5FXjzo((mO!&%G%~EKlA zt}PPoKrIzLAodJ%Tww|#VqzsCh(ID}LZc%IXNXcH4~a-p64zIYZ}!6^MJD5la!yxDNH6<0R8|fZlV_C!>;Jq>f(nChhUr% zpG`x6Mb8l=BL2k4r-GJFBv=IB_d;B^O*2m}()ycF{(k(t8Lbd0Z>+WpmVR7)PW~(wnVz?Cc3A7u2j?zTrd41^==+A}XC#GYSX;wnIy$ z-Jg+N08eUU5wxyXkOdf~2I3^mgHB~Hwc`yECcp-84w-;88ijlU<|i59)!9E}|Y9GTq&3P&{U(CrT3BZRB@?B`#&oYxrO`atDixb8H+Eep zc}iSbG;m$0_Tki&G4S{Fz%QDNOv*91TGr6}4F(N*gC)Ry8s;|td0bm)Xtkg!vzBZ0 zBE(J>5W5E}bX&tbyuQ2z!HQ>J$=#-Y-6dc!IIF;fv=tc}eY>^F% zRGAkTavc5Bsem&MA}jV>eaOcJ<#4k`fZ$6y{CeqU2d5VC!3Fy#F-8~aLohWIfznaq z+%$50M0WlVN&pQa0w;EN%2Irw>Q=&%RTvlIHa$oLN*6T^kI8dUQqmqLsuN_jGRp1m zP-OI}nN}tkfkT0XS$U3}UL8;xI1S6Bk!h)lTVAjfmE$j-upixE-+Z=lx&MGYAdug1 zcXWh2Vq@=X*H{?q7K>1lg~C@s%^yH{SrIvn`ygwudw8__bz)X~BIDEaZ7*uvIG%P# z1aKL#PDY5j(vHx2!I(wu7yB2^gCehIB4hX8aFA5A2H-DwE=XWrJZ-bl!HR+TIuIkZ zKr+@nB<9V7eS`>VR)JeAc0*3=y&x#7ro1ZcF&Uf?XWvXz=c}4;Uwsv>c zOug;FuZvr-&v;a7w}?vM_z-k30e)5&d#afMUewdoXdB>IxOlncgQXu1T{$Wv0NUv% z8t-WC02Vvzd8mofHqCCz`0V|M&K?X6js&0moi0+0V)Wej14ls^V+^FU{ZJ)RL1;VW z++e%#cCLD5b&tSLE&4{!c;>SZ9e|2yx`r1AtLa2vU;-WSCo4zf@Oz*j%CwamlSARQ zxb4l-JIDhlq1?ivfv!sF*mFyLV-L0v8)6VQxDa>E*bRGw-6#fAgxR&E{6(>uy3ic~ z>m31d6Gf_x8U@k=Bk@J;&2?;Ag(g-Q*%=1iYIE3$!{2lAA-Dy*@>VO#ZFIf3S4po7 zdp7FL<1(pM)^Wov3_WH|p2&>Jh4_Tp%^gm5sN4>qGyP%Q4x}@HT$qvBy7U_NN|`Pf zC}DzH+zBTu&`iOzd~L>d=2Tmx#BNws4w+~Kks+noCv0bKd+~ret_Pk783kJ#1h?jk zS!W-bX;*;9Qx-@3=muOc- zaIHY5lHoX<2?xRRNxHtIOUQhXr?-omc@5yFt^dgFF+yDWh=*J&uMYZ4?PO(-KjV!O zavL?4i(+srtN{%#0}nTkx>9uZeXZV>y^e`KYrlib7?E;!Y=;O#jQg;V;7d$;zWnqm z;wg|-KeNOQ>0BL_g!1SpP8=q^#vCCObxmFySpwZf>L~R0QSgszf$ha@o zwWRhtN99VhlumoTN{;{1hRD$Y=64Hw+LUvi+KLb%y0ARqURKsqOHduzd4ZH^OnUQpqY_zH@71DSObx( zcD~aBbZ}!kxxNhQvAMrk1M&&D0zBC`{XxmQ_1k13LuaH0oso95KkCzt>kIU!v}a`r z-OaNBV}Dk9#B4*Zl3~wnS?P;&YuB!6RmkzEoQrtA8j7LwRI{a4J3ExXtXwVBUWaZ5 z=li<@D@V|+M$oO{Uz8`Hlp$%p3wDpX09;;s!MyEor(XE?sh%%#Hh7op&J#a11SdpP z1Af|A%L@s3Gm_03v<_I$5VmQ}5EIuCX^` z*Z$fNTY%&0%cYC0nXO9~>25v(9cl8c_4g%SREa;UXK>i|uNb%BF~O8A8^>EYODlAV zryiG|Vn~6;VmaH_!8VHIfay%8z{-FhYD94S-!m-?78qa{;`~Rwqr}^5PzI=Ryd-)e zPhku+qF5)!{L#TQ0VA^hG_P!#BUyjOL9aP7{)?i=MH~V z`|J?hc`6gexb@p0eDR|G`5QqQDj@Nn+aU4Jg^lPZ=*d1V?|WIpU>0D5ZUgI+&IG3> zA)6L`#S{sW1Gnq1t-5k7wlM0e>%Hf6 zeb#u))^-Cg0b9-9zlXlq*kI!zZS*?!DT%?u=J0wqypGd<@cSO|XRLV2s(if%{{DPX zkBVAY2{_xe11B|6HQSjhgGt!DoAbS>3!sz1F1B}&`ENcS4{XAi4B z#Q6Bsy*&*TQXVpQ`q(|ZAY-HNe9MU`6pwNR^o$%PC))HJA2Y|^(r!%2c%R?(u`A|y zjO@02RF0H**P_cbd9CfLdLDj!=4{`e4Sw&^;_{!<*ZNH1eAoVqsJwnW6yEx7k@0EA zh4X$bit~FO9IS`V;#_n-U7)+4^1F4nUalRC?sSz}pQZw1)A9f8ftOPK-j!^=&0Tr& z``q={U%b7NS$lbGY4i6q-5(vF-l&f-#rj^3CtSSl@;+30TkgWSU(&mzV};sWXSq>6 znaOptZmnwa{l?r~bL+1A_}<^dPORo~uPS{Pa!v+DmQj!5)AT&nlB;}uJze+~Kk{vu zZmPF>?5B4|xJ0Gu=J83xT5mX=TvYDX{xDWxapg{5;yAZm+E}IKyop(^YL7^4|=-p z?ztZ&bDwvdz|w2?yrPrlW^bt3<;lI>-`<*Y-fJ{nvg5{jdw*1d1}Eko^J{zl^tgH{ z-5w|l!zZ=devWETv0X2^+8p}SnMqc+&v;566~j|on)(=c-@E;$zMF>d{ZcD4E!nRY z7`xN|J|1aK^wM#Bb)U2rj_O=+4f3vAY|?3a8+YvQ?&^53dym%N=foZhs( zE>7XU@^eIP=3L>kw>)GTXLiAR$=Pw-CmPybyDdK;ztC6uJYLh za;O8E}Oep-;g=4`D$D5 zNWJkI#e6|T!R)&ZK*aF{c{?JPJ6=6shQsUtRzgI(UOE(j-)4#ojB=Yp=3g_QA$fr1 z%aiZ;vKrCvL@Ec3bSe@(xYf##&_Z0}ioQfRpF2VEncnupNxL2gQIx1gTPC&%crU#! zrGl68f1eee=6aSr^3%_3H4HDJfN!30X7EHlj0hq5j>^UN2UF;L#*fyT7}N_`U_)Ub zr8`k1T$HF46C;Vj;hvX7#k(X z_D-DFR^_O2&N9*rRX;a$T$GFi5u2A+cK9aOZSHH_8NI4#@G%O&O#cb03Z zz6U%=9MzwTNu4t}38eC&Gk{0{2?Lt;m8a*CF_q^nNGq#U$?F>mmnJ|d@6i{cRv=wh z&H0d2$tm_nSo%=WsVc~~AzpGVv2gwTXK{j7z=ss~pRiR50sw&UzaL`loy{GLbzK~7 z{&&abtO9MNsEon)np4w#>9VeEt8Zv^B>@N-`)q?dmbqztZER&|2pf*}4!uIa*^Z2{_ z{(IZyaZz1O59U`v@z`d$m4O57mj(u~*MPcopy~ysk(PvVq**jV3&@!gS3m(vdYE96 za6q`e6j2cFwEb|7Mmc^GS2gst;U_>1)%1==D4cxMHt|&aPZ4&}&g9t$e&i+wHOR0) z1GXHMRK|}tBbQo+74iPGR1}bx5LLS0`dSh`p&u4MzbR>E-w~QNML<-Z$2)BVk}Im{AkX!Qkc)6 zz+~XSfGJGruCCTytBX&s(~ctq5Z9}h2-uw~6ZHosLQiayA!cuC3Ne7(niPld%mmk* z`Y`N-QMxY2)}XXbnscDeNIXF-t>44|)!Y(rNJJ2%ih#+E4Ll6D=vp}B46lzlV2m>; zy9yvw(++$ixN&a4E?|rsgxhl8J*Q=mGOpzmLS++@`jgbYDzzUXYE-6!XYLq5$bY&8 zT7|pmZlK9FeDNC-4&LcZz8+4ajqBjwpLXhkwce70MMC58r}spgT9SE)HtRd&30C6~^q;sj4vrUj7@9|L9mKw{hkz_* zCS3?VD3P&^3F68)j*%CkGtir4hJ`t8z#w99QZOFX{t)T9z#L`zk1F}9ZdSQ1*mjkC zG9kr_JRC{>D9F76ZXk+k=;4ccTprlWkZzz@X0so0S(a1`vRWcR&PaKU-mpuTOX69- znw*Y=oy0T-S8VeJ*@8P<>!5PXrh{vHr9M&i=AI2iS+P?NRa+iHohwsRusNEYIP#Es z{q$W_s-bXV*yr3x9Y}KIIczpgA2HrYzSx=lKOC+-qnR-#;2tc))9%8vVY7S3522a| zAwii}KbR>qWB}D1uA9r>=Yh4+>VP~!MXcw)x|ot@)(4(*eNiusg7U zIb6<6PQMFv)9INaF+eH(U#{H!5!9u&)H^xFf0yS*8lg8FnRT6GZDwpa#rVn-b zEqWOn4>C`EdXFGg*tNHo9>#6pT%5M62ske_PItCE zo2`$s#~#FfLJF;^FZ2aX&-XK1M>v1jep`&kgR?&a+$$yDUAa%dt~TLr_`UYho%fR~ zlcTbZdZek@rr)ftl=Tdms+A(xZ80TrD#KHsx{Uc{e1$0EhS=%4Mqa$Jl+?BNEvu^ToBXSgdfJk5%DME6$;R{^NF&$elh<{!g(R1_}W157G(XU~Hi8KyPedZbSdSkGhU###aA#xsg`V zHNz3c$n9S5?$n|if2mvbgtW6mJ1QlRzF-l%=G(%Oeg_hNa^ALIQWuQ!tD?{I){tWA% zPXng9Byqw$UJpOKC)94}&zfM%$$K*d=z#O0qdm4@n*|q_phmsHhDcxYfK)YR8=bq* zCTI03@=6;#0O07BSHltP0V2L6VZX%7?P!$o4JKG=D_=FlU$tqAOb~Oik zWpZxDiVo)@YvY5?;YKc(jFuX~#J>?GOb#qcqHyM{anWYlTGXsAV2CZwr8GE6l{NdD zVvbq`$y&RN8JztyHq^ug){mj)vS#>%*M%@GeCX}8kPZYZ)esUWehA+a4z~b97oGOc zR4|lK&TM}Os%-}1s+dHl1>|3_RY<@FA7Bs`M)UH(4C1vT7#6vqh-aT-Mmm=Tv~d3) zUFXy!Sh#K3G%9V|wr$(CZQHhO+jdskwrx9a_3ej_IC1(9d~bWjjx|mVCUIy85Y>lyLh3EPC8wf5w5lda((S|4k!ipu3y!Yg}8yy?T({ zD47dBo7g>-MZa6q=#u4+r%S&l(Zh~7vcR7JLcw`ygnaV?^JC1FE*x#7vbFJR*GSk} zzf?oG*=><~>^79h9_^tk+}_s;9CV)5a+*mi$vbzfHo!U)v{{1tmH>&Q`W`7{wppK* z=XgCUh(FsFP8fiu}|LL*EGCa8g9+Ax8b+D zNwizt{ue;Z(w-mGE;;T%tiR^L5X}T)mnL_X3B| zOC(N7YW&8ELG*!U{xRpsRy%xP=K0gmvnH`o*Dj!nSjL2{t+%6aRxVx^%YfEiXVNU* zrPVqUU5qj4;=IVgI=H;0(MPc|nmJ|Gh=nw4@gKbG+?;)ZdtuTnD{XFNv9%%-E&d5o zJy}z9vc>`>NK|vG-?<>qF5Jb>=bup&?$y5{#sSKfv9U~aDuQd`4eRL-@j8+$rm_=5 zBTKCfnZ-jUzU6xeFW{g5F<)HA4$Vma%@+B8#ozyW8+LLrG`4X3zY#(m6&-VI73AI~ zx5Nx-#`EbJ(yZYPS*}aCRAaNpMOK=WEE%?}Ta`Hgrb0Gwf=P6@DLtrwme?`5|ZKoNYDV)6A zae`r)Bd8)WG2pgPc;HaRLbDkw-F3SutkhE|m)UBsTH6obOVfM=_10OMbO8-L2KW3F zA>B<=dAFR%mN`UG8?{wy-;qWbok!?e!}U^|X|}A~8V)FLnX1fV$svnI8os-Z$Qn?V znjOn`lRnF3AqmzyOj9M1CZ8modG0ckN03dOetqqcXK5Kd>2?(XJ?3XhkS$KU+yir# zBppW@voy1h+8*oFZzz^c$4YB-P>gBvu*sE7D`x8^YYr4Qwe{NN$hxRvr(#1#Zx@|b-zy6t%^Nn40T_NiMyjru}`GLh7;M2HHFh~W%9QKeK zUs}Lk(9uTXgdKXON3JRI+57N?>>_5wneygfP6T5MDiN;aPc-Ti$tj%bvZ@rS=8`~v zR2Z|Rm%(@=u`E9H)$z+PT*~7MM1qqlhvf;21ph2W3TrAMpm6e+B8wypC!7CiIzpTl z&-Q-t|HrVFQPFQ~ToY1eq?HmWXbP`DwqfTA$nb&`lTz z$W1X6!7oikA*c`_vnL1!r)bb8991a@3i|j2X1s->7?~{3azL(?y#Onl`6p+-=6+k= zOw{x(cgT>=HtARaIz`fcQwt{a(~P8@=d2%-VBTcBxZe=lHRl!wv8y52px+eOjuyLp zT$ATZ2OMc&5FTsR(1|^afz-cc)K_atW;pw0O)gM^i@gF}F39DtuO0WUboKii(GRMb zU9_D=W5}aIuH4^`u0!?+pJt-Cu~Dp#K5}apHD1nb8z0{(eRt}(?K5|=orZlT%n;r6W^|hbjp_98x-U8A`|wXcp*@<}UaucibiGA= zK3w;Q3kz$-N&#z32zfB?*;;VatM@|wZ1Lzm3wsXb%gnce;^wYcfZjD#;Vh>#WWwlG zk?fQ6*P#ZOTc4Qtd=ADVzJI!Cu^UnXi9cAjtSixZ;oP>9lYl!}97q(Kr5hU~H2m6W>g1``gaNg&cdU>6H5P>xlQ~^LO{RyDf2p zWn`r(AfGR{aCo0v%2mvDR^W8w;|#7hO1KO8l_c<6U6`on`TAHxV0)sH$o<}0E_q(W8+0t=5Y z3Fb~*Fi-))&{!ZKEeCEdbHJ_K?&2JPlI9H&Yh{Du4xOK>RC^_*hv`qeGw<4c?){u#C%!dznb1Fvc|| zU`Jowzl8?}#4Kci-kB3KXgjbr~nTik0%5Q=s2&6x;{XsQogSY7Ihr z07sA@D(pcBsNuJS#pLqAXA2yH`v5^n^ZUleL zk~Qd!HeNK+09gWko15x4%p5A}QOI`_)5}1imxqr0r3uUR662mPdk*?n!b;C{;y1+KL#qX%t&;&1}UgA#FIskhj^hLodA&1xH?XW zTD6Hh`Xt7xCF%l~{>13lFU50@Kt2sm zl0xe#U=@`C4{9tBk~=7TcC@{P>A||xnhk^pmS9|KSa3R{u%s%Sa!!GdCh~v7nbU0P z9}9(@Op(Yz0c@d9RygXrB$y5DwUo17DwR}^ia7&LgxOO7%cwoXjo+lX5Jxl!5t(3D zi{|MH7*xoOI47nAj4W)KS;N&26JOIkMA@%aaZxBuY6sa?+z!$m+;Q5BZnf zr6xs--dFQ7OrSD@TvJ36yaWxH6o{+UGf@V5$l0H637rYFMc`HMl4N zJ!rFO4M!v2A?=wpBT0e0W?WvllZhUX>X!|RJtMeiq&q>6RFexK``HC%*#(Z8K&{<0 zV4^31t8b#B6)=arW7Pv1&253O)xePiNQbmQBOE;-t|kdjY(U%uL8na&<^$iy>$8nU z0M|IO!x}Mo!IRbUC79D<-nCJh0HqC*T$2eQ;UzqU6JZf1!_TcdLAy8t`V~PX;u~rN z38nCXqX!+V6+oMy_gOaN7Pp9})T0M4E3ZLd%Bv*Vf#fkmqJU7fqb(WI2TSzoP}hJh zhP6&r5cu|($vAQdTNBFRx%L3YeGFmkfjP?ujsO*;U(Lndyzgks1|{J9z3Qg}XqMM3 zMx=p>DOukHbS05l#4Kpdc!iu)%avxp&XoZ$xG^Yk;R6!2LSX_V2+D&XqaToPG2lUp zoHgp9S`yDpx2Tn4H2m+R0Xbaboumqnv`AQdB9+NBHHwBIi6d0#U?Gy{XG+ovwIO6` zd6_#7AgxurAkw7zxBm#{;5%1FATG5(G6}QXc@W4ruwFW_Y7 za|Ac{PEr67lIM3lGzl2Y)9-OW_AjdRLoSw933AATl408e32|^q%}oij3By`p#2XKy zYZfAD7H&XpZ?PkA0p!5&n{Gv#^2=W-*9^Dk*k)k_ZD9~kpe`8Yq@S2_CIlf6kCRmT zV!~}3#h*k9=%nPFNM4nvHPF<+Shvoh1W{c&aP+8}05$lbn+9b=4BM!zLTld>qx(9gJ;Ng#4qUZ>IuTi7KTj08t^D^6YJgOBkF%5Q`#vCq^r0?jaf?n#L%7uGN@mMT0{zeSn7#Dw=fi4vPyu4?2NB?l^%1L zg!sbcv4*)VxjEaj-kZX`x&YaOKmN26U)^8$t~_E7z~L=%wU$T%Kap+NM^K5d z)09mJpwU0BI3L^`1ELl@ja4vuQR6^)J`{sYen{$qCsL=N|Ckzk*eof zjhZft?4z5*Mv1n-gKiks3?$3t+CY;Cg|Idxvl>!gP2dx1@K=v01tjDflT0hZlBsfz z>!4|rx#27+`8st9N}m$*CdJzPX$%>}hfM`4(QW3_I>mAwl}P*HSk~Lj7AV(!Rxv1$1$F>H1*@z{j%V#DupdH^O!Uv8>xM zF(qMIJ^K~jjQMyQv9wJ_y2f=FD`G?lv6 zf7QS2eUN^!OFQJbq_j8T%-yQQ$&iZc=PTa#j;0C0c;OGz33&b04BmcEf+#ZK1}*2z zX&r^OfI8a&m6TQLJog~Hl*r0)^$Rq^;l_rN=5ZxR?&>9a5BGf*w-U#yh5 zrx@}TQH`pY2a5)IUc+myRHwJGO6Gp_<8QSFL5T`r3~h-PjAyrNIr1oyJ&+XeEw z+l4a~MKLwE$xzb9g;FJ$8Zu7LfZUC<-(1S=U4UzUduo5%Qw@(iv3p>OZ_#+1!G_v_ z^eN|aS=_+3X- zk`cU8(CY@|^MEDM?e391LXuHlinw{O+O62qS(Cd7fV6M34$JG<@VZk4*A6YFEFJu; ziP@)jqtf2c;skKQvzYlVmaz*V#ST8US(~CpZLeTVDwoaZUt{sj?s4h;X4oco6EuBE~d{U)E! z>5}ui@g@6!`AhdfQPFkCmX-fQ?lruRi)P><|bFc}3d?iUqzCf`igDYPT?zVwD z7BT$zB|x?z(fotlJi4dqJw|Nsz#GOEJJjgEa8z$QN}`GWOwowOj?#YOm+J0+9b|zm zd_yPli4XkBaA%&npm7bH-o1}BdorAb!32BVH6e)nHCA>52=DA)+}Pi`F)lQR4-4U1 z-vnTMoV24u{zTflZav3G6a9e#=><9`lm)N}n1~AwxX#J%T8@kZ5&#aE!9PKCFEYNq ztZWovhh9m5`xvjcF?AU|C?|vi9pxX*j|~r;3^ojm;V%jl160_u`&w+pjha8tPd$f@ zuW*P5#Vm`3#DT=(>&Png>5~A$EOD3tAK$A0LGN-@L!OaZs77L?9{)){Gh-?^T zmh8}-EGd>8EoyVn#_K4P5d+oRsS@BH?=WyMeUsV7&*_+%}2(G!5u$TYq%Th zFWrRoy8r6M=5V&$=bXl||I>tVo=z%Z<;_-6baFfIZQb5FPknq`{mib=vA?aaZ(8xK zeeZ8%Pl<5xo$6_;sP&d~Sif)0FK({v)v@_BaH)1!PR4H2t*|yT&*U_@ZQ`6%Va>LN z3pnbn@xz;9kYb z?oZc!o&NK$Bjp*uO`mPP%<*r`5pA}veA$|dZLj6P$ayzBvmM9Tc5JG)u{!%s25QdE ztQ|Ey_Hgx8-e%qHe-X)f%=kX$@D=fV?%!Oy$jN4MIUI!0K*=(ojtuO<+|@h+>p!Hk{8)3d2K8vfFtLFeQ6bCAS(OqbGCtE=1aIy~Mco8qkV^mWqm{n+YM%cSSi zY&SjS@NsfMRA?thKjJHdyikv-9THNUgk&edGh3Wd-`wa^RC-33HDD} z;_YOckMGGo*{=7?V{bJP(NFyK*LQr)M#mmnZwdHn_ow50haPJz*1TUo8}H-7Y(-&p zYRhd72lK_oI85PZOGShJcYs`MO04J0xdiylGzXhV<~Q#IMTZ#%+gkdI!-`qV?MfUj z=IdFXEVU-9Gb5+t*@!IXosK$Z=02bA@YiFAS)aPYUe1UgTJD$9%(qocRGB*7@3UH% zr?ys|EVo`3(mQ6~BZ23q_avT@jo#H-{rx@5Wv%Sb$zbFS)ar+t9{J}_TAUB-AaI|gg*HcDw&$nOY@O#1 zCg`64_s&VvB`>LA186aDn+xdGS- zbm&hZ9Y79#o!`eg->p#qyy`oZ$rGCDd(RUZM;G~`(#@#smyAW6ox(&65vD5%(%Od3 z0WLd->qJ<}8W!}lQnV@Z_TG+Rr81kwXOWpoR}_}_D3bo0e8p_g5l|81m>6)b<5uPH zihE$aO1dGl7Y=XOWe}1Dl*dV&|1J?z z)UEzmyHR$Qbu_Hj9aS|se1{uWQ?uDoTUS%9#bg((ttT6E!gLP$iIL(>ENrM!TP`FS z9VD1YrSUD05@BIH_|3^dz$S<=NI^;IxdMIc;Wjbk?(& zzZGV&uRBg%X5VKz+)P}awzTCGzW&zqqEx97rzk`+N-H6^Y!pra3$VmT!c`2^Pr-~c zXuyyjV-lt(sa82M$4iP9Mr4;GLW2uRTGKNo^AVE)kLO3?-RD#OO$@LXo?etN&?vT0 z@t`g&Mr4o0mLAj3#Wn>eQs6+vPRWpwBu*u%n%B1;+jC?fAfawL&mT7kVjLuem?d6L zo)R_;`olymb{HC-$WmTjG*GM|GXhj#z`>kIzh6E-#;-~o&^AkOyuUT=pb}9w{l~#w zoV0>`KBL%PpG!fTh?!a|G3m>#Vx1zm1W2hlo|Ar4H;7@H4;ZKATWe00yhkTk}?1D_>1XNIj3njjy0ni)Zn z=$^Rh9-!KTUm+1NTZE7%h!I9IP$KrP$stZcdKUrPH7hS>%&_Pb0`#XmC8lyr(15uh z>pr58Td)k|1IUqsc2YM-1(?uyHGOh!G6ra+fP6FqvPwS{i2Jaj0}?(hxX=(ideKCB zs|S9?1f$r>fH`xH7VFQ_-Xw6Cy}oG%$zSC3lxAeJatSBV6#Gc^sR4|Ol@XRz@H(q< z4CxWe;Y{l_Jy;SHm+TRl{_}Kvzpv(%_~grr2nQ@^R1p(DV5eDy^FAE@sDP8X>;YAR zIYC`*ddTE9gcfGP0u%*rssn7qIAUuw8UqdnY8|!IvrUMui_F&8m%D z#ARBRrzE8^7eWggyjEO`UQ5g2rNmqi2t?g<93Ivu0;+ICn%14gIx%0$npQ1ZIcu8D z$^AxRb^Z3J0#QIzu}lsQ>ggP^#hf5Vc@Ipiv1KE&!ct&a>y&dnw0Ml0+PuAjjCYB7 zf>yzhop{dgggCR8Xe3Kz1&4t0(p$Rav#YlD;=ZwL5n|e#1k6Wf2eBS)MXA@CjYY;Jj>g-PsObxy8^TuFBuEkCZ%&~IIvbqf2Y3>>42bHI>)5lj#K>+5)EkUgw+ ztKX_~B%(-s!B?x8+z>tJ>|9KTvxQy4$LwCc0LpZ8a{;nnfKW?%o)WYx&}M;dlfS;( zGZM2BHm>FrZGqsx9vC3rzZ`}?v$^r0q7BHvgJ{Mf-pSmBNy7zH=cK*l@>7wD zTgcuhWb?gUe|3#pf-)kEIAGyQwojk%1Uw;u5$hdIjlVOb-Scn~;VI%Z1FUP2%qK+e zpBTu`Sjfv2^X0VIx6nra9`@onNp%kqH*~*ZTXKXAGp_{>xx})mIRu_L=~u17Q`Ni1 z@>0FaJ)-kCu`oth^S&J_N&|7NdI7d*&%Nu-eddw;EGGJWfc>EVY3;Y#mxr3OhK4GG z8+0W8GdVEmDU(eR{5PvbFv5j!dc#o!UH=T-YKSJ@Jm#b@bK>UO1Q~s+*IV_tA${>HsF$#+8+kWqSfD8{Ymei0JuGa$IX5ThHa#2 z8WrefWQ-I@YbnOO6+JX32fRogsz^R@lS&ZPHR9EOotu3?*1KpQYaLdG=3Y~h!1Vlz zYrup8bd4B+H1MxmI}vb3uu>4?Nl*x@Fp2St)FKcV)g&`756vUN-IsLZqk$$pFD^H& zg}13ZNzh^NfWrkXD%=8wCJzWS8Sg7Pl+KRk3^Nzr!PKheQM0FO|I?pp`@@g^dgZRE zJ!k?fdf%VXr&I=(#z*|Arx{SeN$UgvO%&r4&ebTaJ^`t%Q;NQIw}7` zZXce;WA^Jm;$EP|*ZsFt!IPQ%8i6H=raAc*DCVthl zN_=5SAmq#Yhd3ZY{U!yg2LamL8CmS#DWfL$cS^cZoAW@=SPRw|^|t0Qz;u-c`vagXf~8dk^T|>U z=ZDMEnk!oo@ZJMjS<0VxXuY-b?A$Qa^|biahW^mOY!`3TICemV0rh&U`_ud!!=z|q zg1t(HzOW7Z0awGOwDN!nXh0|lYw3{@*pH9lJh6I(ho?xr(Bg9=)MULV{IG+E*xA8) z0L}4B5wAMJ-VWbv2|NulFbPJzs%>fF2mQZs%D@!}lRTZ+m=Q3kY?s!kNF8Y9B4|z# zvc1V;41Li@LGO5H7m3cj*PT6_&OK+{v9oJ+OY3zD>&gZ@Z5Xpbs!*eU<(^!E%OGe~ z5$TxlFkOlbG4#51WxTV(@l4t?vx3FY=2ww;Y{+-tXH^o2bBA!(UiMDHTwiNc-#lqs z3S2sr9c)QHmEs~Iwp5yW3SS#G=@v<_cIbH@Ud9=;9>=3@qt~I6+VcFYz77JH@itt2 zn~#sBf~3;8`!3xFY7Zy#==|)Z=992G;JBC7@W#$7xm{)_U0&UGJx#_de8hHIu=v(W zacj8Q&pn@7$w)WjZlK0-W@E=%=(c}_k4w5gAt_73*{V^e_H-ExK-9Tw!86U(c^x3 zzBI%2-LEst;NW3q?5&~eeflY!?0F@hUiMnCZh5}ywSPQ+xmDZxR32JGx#-~vJ6j3*lJw83k+;@IO6?Yo~3p>n`;EdRtJMRmp*@_Ix<~ zHfjU)MWE(YiFY?yZR+a9cq3izY_{0Z+PNF=&YYYX1$`xv0SD{j>UwI8fLVF{u|G23 zCZgTk`2P0t{Zd|E%Hiw!Iz{6;O!-Uai^8M&ljaqSp)pIPqRY z<9F5Qs@qdP%-(o%zRKrwV@C(ATmQY{v3Jp%q}A8`Jk2oFMYeqjhn@E|jyAAOk{~iV zJJXJrp6wgOgU&_<+xLB$G~3-BKhy%@28LTv@>IA)&MKv(5oP2j4D7Co}*VR6-gKW6_7WLATGLP5#S%F-vO&PS5jmAul?MwZ&xw zOhBz>ooQ{ScsL1xB()!iRa&EYR~jf{>C9Xn=P#IsBqikUZJc~GH++NvBo0P_bb>m0 z=ATIeN-h+r$)JRE?3KqMh%0_WTbF3-P8O;@=#!UZaG4i6a){a)($#i35W!xD| z0QDJ=ra({utvMWLD-z=5WXwdd@{djzNc(E6iGN-1DCC)`(6Z1pP`3KCEa!V1A1go_oLzH%J zB6e>2@HK8<^R6ubby-`S=MHgv!d=~B#n>cJ(;R6}=iK_fdi9N3_gqEtifHiUh{I@e z?x~ETgfNLH?C8oni8i)O1*=ha><1p2I4Zr*nKv9UtGI?n*^+8a#;`17t7eh2#hLYN zskdw2=MmJg?lZ@kO4SaX;}X!8yDv*mQ*dakS~ysiSa;O-Z(n=PxRZ&_w$!V?WCtoux~E82oOw>m)~HsrL>oV@YZ6?=nrN40!y(kH zbZ(VM1{koM<|IT2TD7QFEU>U;S4FEeal~;PkEhAV&BStP za$KU2U1~M@k+Pi}xnAG{GB6HPDNTDC`9{x?YCDnUUUPOC1jCU&^?%f^18wau$`LNyAMkuFn-w95ds%AE0yafd~9{~r-J z7SLWQ7L`M5W4Rh1)C%?7>MQ3BfeoQ)m_ZW);rnthPym)bfDhSZY(9C^L^iCQwAY`$ z%(|FoLJ!YR&)>U_?M>dYtu%bJPokrhXWFah?S@u5d_T!siFpRM*7ny&1vKp8-n0DM zDA?ZkrOus_94TZHWz=8=o@2N~?YCWqs zz*QbVIz0lPAQRlm5`o!;!arzD{|)2t$pRFc1*tqwd)NYV_5 z(runASdbPCvLo`>Q0bIeoON@{1FSk})X0jt2^J%X1Vu#1tu9`8o5L?Ilbbo8WJ?Af z=`I(cnkh^_PPZb`JabF$N+LWZ)1BQh47G3<7xy>?xp5gQ%a08OG>MvI2F$sB z@{Q1m$4v1Tttz!`>#fa##1RNE6lIq~o|GHNQX+;8$Wa=MHkgW9Q-d${wv-j}NcgPy zW$?nKBoSaEGF;VaLY;5Qg6ezbWegVmxL)TxgPeo}O49Ig@eD9FEJ-2Lz>GZR@rkXy z%gqZL6U@c_CC9uSVrxGef;zgJJr##IZc(Tg;2S~RWdg%2Yj0jp^sx@?5; z_4#A*BpTA9`MQW;`$2dBW)${bC@Fadr^9C7h8V?~taK;x^vUXjb4idx*g)`x zG)>muYxaZx{JK(ZNU0uT7qIFBCe$fIn1hmrBl?3+y6VxFZ>xa1lkvJ=%Ji|9lzcTZ z^Fy4sS7?(19v(uxk;h_f~8R^ z^)>89qS+M>Ok=+E#>p(32L|w5PO$wpvK_6F*^QaX+)!Wt)CF=nBO^S9Fj@C3>Ord{ zj6P%OGQTO+wfc0m#r#K9t!E@NJ=>0=)jZSynp801R@O}KiYc4@*T6TbdnlvBQ#cZU6a5Wxq-z05vrfbLHcAtA z38L6vt(ghV*b=omVBh;9eV+0p4DS=Z4S)u7yXauwqJnKU6D8&w7udnxSOCJ$Nm>`< z>kXji?;r^Qc;O4F7R8;@KCl5S(m4L*dLhxwLI{jshSQ(pT37^UDvHNh#?%xoRYo*H zhD{T*Bxm^(Y;7>Gk|&e41m?9B;A8knJg_q{52OM|rT&v#&T##EGxC>;bC2|$sPyz%Z7mo|O zcP1sfk)IgyGP6oO^2ccK$Yp50xfd>asTU&ce9)C>c|^1g8`kGk=40ycp=N6bjDJ2S z$@g2Q!=+SwCHz5eHK7C5gQEv+9+|v`Sxa?l9;7O`<+p6Dq-Q1#GAGBnf`*ak(k{cU zl9!E3(oIUBtA334a!)vgG;0*!QTV5cjKVH#^ekfu)YKn{8b2+yw1paVgc@z~5Y#HY zyLJoSG};|V66?yuv_}@Yg<4PMX4eIJ z;+2IkiAs0#J!`I?J{7_R9I&4Y1ANr+grzs_j7#v{!m{G|3yKzAjFEDyuRmAUjmZUc~wHl(XGeI~< z7SL-HM2-O*igJm@HE-jpNvbK+D0U@X$}x(b=wg;d%Rq@QqF;^Ui}NQu`Jm7||LQ|E z1#F#$cJz|P6&ndr1u$NsDS2KwEOo%uQvEokb>F~Z!~{`hnB>4dVDbolLTxyYr*&-Y zCF&YiVPuP+W<7q>$PwytZ2|BO6K|?@5gwt?mxrgkXj6|C7)_aLyq7I-mDX6GW?I;q z>>K*d%ZbrkTzJ={Ko)6?_Nj7Jr|;x}ma?-d96!eszXfY0)0~L`b3RxNZdl^aaNxO` zu+heoSAbsrJE3TR5E0^S?7mrpz%gcRj_~!9g>1kHF1xK-UpvN#d!>VDMj?htrB|J6 z(ppUp>5Dm=(Key2;`BVZq382x7-#q zE%INb?V&x#s27)e7yEXV<2K|YiQzt7^e=A6ep?#oJdjPO9NQ4tHk3;DE*v{AgBGrj zk!y52TQ+H*wc%hRgTaUr!*t4;HESF7!3&yKtac}?iLu_%3dv(P*QihRzBM?ZYdh?$ z7b4TwPX??Z_vGf0pS9BTwYc*eSBEs$uy(OyH$W=L>f#J>N}-72T;P?tQUbh7EJMvr zvK!TR=eRXH;jZ`x2)4{BCv>3m7=Ft zMhmS13cxBNdM(2pjMtfhapUC9GwPH2I1EW9zp-n{&s{9=r*MF`yzMvp@3sHld4~y6IKH|?1K;lpiV5(I z2m01;lMWZ7kK-$S5MvDwTJcAbI>^8l9}T_3#zc=izMFzMGPKpkcK#eWY=}`eB`mEM zmS(fQXCHYu3_rGuxHShKF4|}taZ~xh*=9X2B1>j1w|HM9nl6o7i89>p*Y=^te#k*1 zq7zNBN5zQIp_Y4Db_;?MJH5sIY;?nW#5y}f8&UR+_Z)iFZ~WZvCOhHlaK8WcbwGFZ ztelsXOUTI;?B~UYwSbExQ}GRt%oADuRat5$JI~7GRfT=sD|~@>DRxI9S#k8fc_`uc zn8>3h0+ChVm(R8)#3>Xo(}L>nXk`~nlw`8Z zUufzS1`QX~O^sY&!_$MDvOwMg$T*RisCYb1GUtoKxT_Iv=Vg=ID1_?d@ghKMo>W5s z6dU{U1A4?0J{mlj6}rpl+~YAEox}B~$!h})WH9ZFv)D!Ao6Ci#GY=b%+DJMexvpog zOV|*4 z`XV^;#5B0Cln48MTeg+OqM(|dE$qfQL#q?mSj8-RU5y6G@OQkHprRkPN?xrCT4KU~ z9tCrNMhW=%;#%oQbuZJ}IFO<6Eq-9DI4tsn-9KbpCEAg&oNZlK;Se?;a6-U{8XSYv z>XvGCPdjOBGacTf^pnC<_TJBx5>rGS;l+4`L4UPbe|sqs{eYZw1;%x>>mQ04(^eim zAZvWTkA91o@T-QS(z5vnHPT0Zl^_!JCB)j@5df9e0NdcU0u(;}dvM^w_>TrAr}Ex) z^pH1*E2f{#my!b_TlqB$5s3eWaV$vIQb0rb>jcmTx2M;S?e3$_m!#?yK>r)7pAY11H$(zSB3LvnKJS+2XYb%QfC8O) z2l%F{|2xk29{1pchtHOJSf=`?CaR)*1l1ws7csm}AnTPaT<>$+DCRAtAZ5cDljwr! z=riVUw+_G-Q2MwpHcBt?4$pt^GXHKL>>fd=3tc}ta%erq=qzkNAHpwGQI!9p=wEdU z;)@KT8&Kmnsxw2t?K7MR3al&6;fpl(*K0?&5Ackp{>wP%?V4K3etmiDAxX5t4-2iY zAgXU@q|M=e5$51k&-s3FG^#HyYB!*a@o2eq5s#Y=yY66|wxG|$aQ+v~=Fbo6M&X;^ z=aa#;I#k1w{w~7a{EqpW7*IBex>QitWW6lRUbEf~fZn|Qs^yv(FowC`D5M)?8MqaU zD$|Nu{R-F{tE!%V6D$?#7Ri@;&=;0$JX5d>?aB#Q_9HL{$%@ASZvYGY9&tk}h?k}r zxOapsXS|utkj{o4{2t;=$iR_AcMP^PtFXpv-d_7Id~OR=RSbk-?u zjVhZoea-&x%q)B4zIEDp%zgd*x#j!o%@>GI4q+YazK03XwLMz~wUp&m40|K(Y2;r!|$lu71 zI1suWo&feCj7VJIREYazvOmlNe^9dgQ*Do#9hkSf4$i)sPFNkV6U`no+lu^117Nm) z3zz`p%PGL9j|&Kmn--$C9|xhkz}L$iY{dqjuGRxJ5bNv&V#c0y(f`okPe=VP8Hm+f z416`dE22HNmAH4;DP;X9E|2;l1FC-*>;1J|0IY+%d`}9Z_pKBBs(`tF583eQ{(02x zFUVOOKKQ^MNk+jr`I(NM(}L|zmB=E2`mXTYFek|h-0ma7+^ql(h|QiOZRUkQAMmZf zzb)6k7dP~WO*^>YV#x(S=0(37PFmoi(&lsM+!xwg9f7$4A5Z6B6dCY>8!8AR`m)QJ zArDx8UyPZ^!trhR&=bu$IzoEB&s{(D$P>)D*`W+Y@nFq)&Y7v*?uwMAh1Vf4v%2Q~l_jGkt?W5{q^xUGI?kx<8yuFaPWAZ5PB zjN>lnG!~50OthHtROC-o?!%QhI_~k>5>L2Gdu~s=g+%h61nZ%y^M~S`X*EiwrxwFO_r(h7sOpV#K*x-m>P8lA zUlqtz?X(uo<(lTl>5sOV1E^A#&&-`Zr)Uha4i-+1*ZclXqVBaKorQtstHCOzzs;A5 zZOd%j78q7jG#yJ<1uR~p!UCJ?Iz!*lEJ$L(E;uwTDxPk&6q?t093bm%)Vst_xppRd znr_QRV@p>ehUPW{k*7IzT%5a);;LfWTbL@_Ev0`eR6?5M;QX*VcsH0d*>4;&YBY%1 zY-L-^ELs42&Wr&L_fdBBRlIoZ7@hix-BLROcDF@DSv)A_`!q`IU6USx%f?T7Hw>p+ zB()v`ZIRBkwvn*x^lZ9W9r(K>TTH%drlLjg6q&}Z0Xr!mh?mBsPSa5&Ur+NR; zp5TqG*Srlnj^Br3s@|`VRw5}-Qbch5#At60D>6gdT&riLaxwGP-1#4V#>Z*fP%)0r zDOZQlub;S7L0hFS{Zf&4!rgo}*L9T`UJUH|fxZzN?_-oOd^UdukSpg&uCZwUQ5no& zD%-f<^acdoS58cuv^eKj32lZ+~ic%U= zXXS5^E_wud@XB8So_Bpnh7UZc{0}=N`-6J8_;;h?L1(uk;{R~N5P))HA*={d@94pB zLlR(cOR68@hWF(}45#%{>g3DswH%M4W21mmukL6t;pyswH$IX z5p!{7UK0uI>P?{W!F*yYu!o3KCKw4wiJ!$YW+;!QQ5hM5HX2*cB~zjZCP+)qjoBxF zP$4Iz>xUB~Fi8FnVebHB39{@DPusR_+qSJ~+qP}nc2B#fZBN^_r)}%&-TUtSV)wnf z{}YiX;zU%O%F6mxW>!{JCIJO;nq+uf6taYPK4BVsYqDVSs z6cx!y3=0j5t8xapK%G$%=^d~4H-Z@?GzbzfEMVwV4Sp)6F+x$AUR}Tec^rzwXiz11 zd7RP6NSb&`6g1i|bq9?Bv>`Q0VFm{*MO36te68x9t@zKs71yctj_|94{q_y{>-Yon z%Pjq`yc=!~#wLGWw(@3&vRgZJ|K=MPgf9RJGJ3OCE5K(Qkw>9XfUSvd9Fd@#Af#cx z3kH&a4-rmi?w3?NbiRI@;nqnLOKP3PYU5>}ai2xvy!O*|qn@jI;^b%fnv=OhbEMLm zb^RvpdFw67#=>ONB^Uvu?CkNy-shfY_s3-@J-wG}96l$;uKn^4}RW3vMj(&!#1- ztt4_4DJQMU<0x8`JM)ko%``8YOgg_yPqy!?Mo8*EC{7qRbmqCy=dS$pCFf8I=Tw?ONz|JbWeJ7|DeTN1hG4|_sfED2M~_)5nQkEtXamQC|bfToRfK_hI%D@$?IT3u6ef@|LP9SfS%f*<3JM%A~Krb1}o(QFkUr>n1Q0BA`XT&?-vgHWd!OSjti7DH_%7HK|az=$TI>DKJE)XujDpvKB+xrAU=Sm<5%P)?8{{|AZLMleNBGf8Ra2d(u3C7gwpDDPxsgjwcN-OayY9LS*4v`fc znus8B!Y7yH*il|#dulo&+(eLSZ4m{I!Lc@QcluYLnUFO^$ZX#LqLvjt896a zH`^^vXy>)mjF1yENx8515AMg+kfswEMGR}9)%l?Xb=q9qXP{tTuymlXr-!-YkRSGM zdU(~t>TJe66jqt}W>&f~R52(f6qbqXN~DvT$r6gW5IT6l#~X`55kc;UKTsNYovZjX zVyE`FVw|Ix!99PK=62X$A&u$pvSTC}CH9RVT1VWN{axfPT!0!tM)z$?)W2hy7#v=k zW@%Z;{~=^cxYKsg0YxNh`GYh`cpzRLu)P9TNj9#NyS5qE74c4x?&W7<;Tn0+ci3sq(al>FPDf1ORl7v@*iH0fxMV)#^k^Hk&6A5vTbx>8%j2i{xciY>eS%+&w zP4Rbq5V>dHnpdSg(9L-1ztQ@uG&Bh3h60YyK6eCcwKz@Vc#RJet>eHCDz5YzQb5kg zi-#AX`NL`Uh7QVLSL?}!R2PDHU<`Bti?8|p{F$vCl6Ec=gwN#N;t|>V3AFvmcm;vj zg#Bc+t&3oyCWxcD%W0QZi^%`Y063|#4+pRnXiC%qMHlv>vJ5Y3ogJNb(4JpiRfrSB z1M(FA!VRlEq{y2wHZQ@E z@K%exjsXFlxKq@C?YZe?S;iVI9l(VK$#jV!@XW)6tKA>NfQ^xB#U()}(o@nYj6nz4 z5FC?>HwNDx_t$Y-Z2YKW(R~*6fLmxVFGJ|29oS1e$K4RuwCU0GxoaNXcs`pwSy9>) zLPR)0=5eH>5g#!k*-xVJmwag-;<|ySb#uf*aqvOGMT8v!jO(TAXLSjoDgJUG{0^|7 zV$y&>r{I1oaHpU%G1^~Do@*-;giO%Z`Z?u#=kZ9j&3V@u+IuFlLlX5-8?}jo^aa0V zYuN#vF9U|wW$QGz*`dIPU^|OrAsaUII@fGEGfgi*e}bDSghdF0%M*|Su!zuLKh0Wy zJowEjfCIf<0>JQyTO=GRZU-aFo49TV)>Do@01m4XPpmDEzS=Q`6L7{S& z=O0hQF6D5XoYky*b@eayG)mZ9Bp!JR+3oDv-Z_BJQb9ktJE6w{ct`MrM&N~12PaU3 zBA|45=dmi{o6QvmA^->qf|r38W$w)eb_r~Sn$KwMGr3VhJYfu&1&_IXo#niB3d zuF^t&68D{Egz5du;2OQi)D(#zIWSPAl zEbG@uc^$wViHyWx+(-yXC~ckJo*1`Fwfgwtoe*+j0-#9H9(g5|?-bquxtUc$lD*KH zP64pLf%|>cqOvRoz@)|mnl{@rZ&HPZszmA5^{jL~L;be7YS^)qU=5^hv_{utbnO!! ztP5JZUg8)$aUqE{MF|yZUDae&1)y%Z!!X4;=8Js6u7wPD;ZX8r0DY1%v?S819{v-yrX1>omt%`K6+Q3S(hvP1Tbl;#bYA7Rgs zsbx`^?Ar%k+1hTjhG*Z61o5FIFIt0P-uHoeU$AXZc8ie|YK}e}-KOBOcRChRq&bLC zpIwp2DgE1UV%Ng2YwJHq#VV7Xysi&Q^(HjDz-V+Ojuy6dS*(v`;2SrI*K?oW)bu4Z z-q>Af#{)rYK24fcdpqB8%hGb(Pj2@3=sF49={gIwy$<~6s#9*7ou6-d)_V1BL;Wog z9|wMHN%$NWlcjzh#f(kaxb6B*pX1?uZaj}2HOK|)J})l3=6;;x=_-A^NMDJ;MYG7l z`Mk=%(0$nN1*4pnx?apcUN%Z+`?|bnT)uy~{13a+nCBq(ky2C87q_UHTR5aSA6s}A zZzhjb(SakZOS!a-oxTVCzj&>G-yYGuU!0?u;x4vKW!u2=c;3E!#gf~l%DdEGKD66o zs?$3wJ9!gY9%qJgpGT6Z+HrX~GVUASyKI-_@VFOm0yRVN+BOU8k$44)>d^6wAYHYet zj@^#8(O|nfeN2WkK6!8N$6ZIzM)n3>ajSVh*H_Z-FMil&)5>Vod|a8g9WlYMtp+Z@ruh^6@wxEBSX@f8gHht_Jve zp0_&PeZ4XBVs?K$z?z-fbUPZQuRG&0Kg2D`y#Hp>d9cp75kv`^D)=p+!Y{7(_{DBqk3ltjNW_#3{aH%o14#B-Bkif z?4#SmS*_dEN|d@M_pMQ+X>jq9{$SU_>Rx)=;A%4wBLA?CjL#6`=#{V?aHj( z5Bk8_1nRe%nzLoHG88Y=CU(gc$x$V8h{zw6Q4B?_5-p7eZSoUk7zQ7EMc%n$Jz(gh zZN6q-Wd(ju=PVP~=`|pqB{f;Wk_7T-_tKijgyWvd5r;m&Ldzk-a8wamUHm>tBZG~N zQ5QbB&@*-FyYu+I0AuHwVCUlElLu?gbr@+oOgVJ#Vns3>Rx-U#QGC@iMe z4G`d8Sz!wJ;qPXP8vQwNuihN?DIbli@tqxbXgMq+S~r; z^OXngo1KHX3*Eo#hyCv+U$tic)#Tr4iyZ&c82r!w>khWC5C;7AE#)hr9LE1{Y@qLG zZfIj}LTlKmqHUW=kL-P-(8N{5ctlE zB+)Cbl289+?}@evT4&zj)jc@o zKVFB_(Q3r^qX{e#GXc+=#g)>~MKgSt@8B0tp7)Y-5>5C%wh~8mG;hbgA6j#%W~$61 z#cs(s;n7yAQm=KY0KEeJ8>jz*9@4P~cqYKNZsl>dL6{s(%-4pvo4QgZ$D z&|Q~m!ijN;6oz&<0TAK1aYI7fvfv`-dyGOUQOpPV7N6PNi`G3ZfVvs%50B3c8@=Pd zL;v}`J>@g zVlqk21b_gdXH-q#@@9?87blNag~*?Z+6*s4{$!RVs`6PdidX zd#}i2#}TxKTzaxcx0X94YkDcrTuN#XQ=lx`fDE&6-(Oc|QYwG_R8nwzzuw0hXFb9S z8F@E4cD*nj#yH+*M06dy-1DH`v+;fF_h6vt_P4Mc-^AtkXa`Aq+q?v`mv4XRmhZN; zs)1)o;ij*=Z#oP{ptIRA#lni`g3N~@ z0sj*h{2Rs$ogAIbos4O}Ahw}yZM((_|8b#f4-Vf0u-|cJ*kofi=K?<6DwN;@3FzM> z%YYC*FHRYGbkcnplVnBVa%NPgr1xSFv5Rwg&>AXsdD*Xi)T&x1blQ_LOHj1hV>N`- zDqB&pgf(rOuHVgqq3grd{o-V|j3v%8ISRZd9rNNzV=Q(bT3E-Gm1Y&Y_yBs>8CL{> z2WhQET_08Y_z0%yCJy68yRa-#B^yZf4ut*&c|^Hz7vFsgzuDVJ1y#Sh9dh)oOZ-N= z)Gl5VQQl`ZN9tu zw3Zj1=#pX*+y(F{}kcwA~;0x&(3E_^X5=6;3n}r%;ga!gcGQWVT zTXKpM)~aS@^Uo?UDX#QGjq{p*I6(Ab#lI?R!=~!5RZNgwbqSi$tU30wl|$+i3(BDO z5k_L@I4 zr(_Z{o(a5%a|e6Zkn%=W!<8=oMg7qsk(K)q#r;BOpk>1_V|xbI@O%FNQ=X(o37$=G zGHEK+{IyLYMB2*uK~If%I@$iP;Dri{&G5*?Y93fiLYZ21ieKj$|)b5k!0LFod*ClSkSyuImu8=kQw=Sy)yAHiSBR6q=73(W<= ztsDrvRJ83*jaCU1a~T$w6C9ka=}@u7OiO2geVz8F2ZD`^6-NaY%1IAb zMec|=`B0(?&4Q}mH8|8xDL~(vV*l9m6ZNVa%wfxUbK;BSwTs&tzaSTg5E~L7Ae19} zedL*bcPrajN*3$7Y#aLu1C@0r$amic57^_kT+e*6_=l#mN5!6+BfgxYPnir(H<)o( z{-8BK_V!2apc=#JD#s(J?E$I1McwDB)6b|+%WWXSwj2@d5hk1Csp!aYF+n>@cy@Y|F=5*zqLE91P$%T zgxt&@>K%Btn4?^>v98fWi$8olAZs8&IZxU_!rt6OqC&?)AqFuHYPLWZW==yB3lIAQ zV;3?7!~*zlx*hFqKilPtiOny<|KH3wntvtgG}gDXqy2*%9Sp5wdLe`8p@lEsBeprH z&mMo(x3G#Y+tA2dBfhy>YRYRlLNL*d3x~pE#|rJP5S5vXxP0YtwEZ@p5EU;}tlj*Y z7kRe3qFfYCqCgxu-Cf}di9#*0^!#;#a}^?9AH(;VC-Ql?UN~HO*&$^=xSv`e-Q}AG z^H-mz$Xz5v>xy!Rs9~J%k2GjXXDCH&Lf+8amlKBHf zMqU=+vl|=jAhbC51Rxb79q66NCD;{{wB_zs-EzyFP1F7Tr&KBU`_WA zaef)hxru*S>$2)Uxhk)U2Aj?B(9&v?D8~plPJb>I!Q_w68s8sC7=PoAi>jyyAQBWN z;9wvlqhR1@Pm^M6EWqzB6DDeFA+9SE-VztfZz1mH-=asY7)x>RTXUdHp8|1d7xkhK zgDj^OUr_L?#fhAaWI=H{Z$b9^hVXr`7(`W%=9(kjeFYOgH-H<4HIEpjnt)F;7V2*! ztBJ`aLi-xk*w;SiA4lfs{O8E36cwcU>EV5*YFbhSAj?m2y%3ZKAgH8~=SvkNQ%Y#5 zWh66Ti}$-doi|SUH_rrG;@aufAHC7kn#>xDw1-c>qY&)NXo{%?4QRIRff;LxU5m5@ zT2!7wHDGXeln|%rHah3nU`}OEH8Q@sC&0kingWAY-={=+GZ7{!_;j(~R~HSLl(pEQ zN(v(@nq4G41J^0F+0FXV=wk;A1XCx+-`OWur!pH~eQ1w`38dmqu_FY^prYf~?}}nh zZF5Q_5Ph>WM-3sc`NRvPmv0hvGnQdm=r-CHA_}_%*Nbvkf1_|+E!c#<$kk23J1w>A zx)+p|x-S@1Vz^)n+8rQk$`+yGk`_H|A>=Umnb1oJ(Tys^Z^QRjB;ULe!&rO?74EB& z&R3!8zwa{twfc-*jBT9$088Wlh;*w4{1<-U;)xfMGUyL>dD4ikCdk*GKQA%L^#A2xCCIoPN-vol0_#qwFgnRHuHGl51?@YYMCjr*`tU zM|h=p--N&Oa)mI6peWP7;oO%-k5Yy% zbg|R6hSO5jyCk@r7k=0i<_l&;IC3a8wg7bQCSaC{APrK74vrJF-6%n^5KA&y4O`*! zyU*kZh0r^CoHx`7;Ewxyl6Tw-j@@gk)wjyKI03YIV#00V8rIYs5WLm412IqE-T|vL zx=bPDah4fFHPQ17hmy@H+Ek|iaNf%PitIZnZ$n62)&}r!e&!qsrTliJD6O;26nF=I z4QP2CY!e59i|%!m_G<+*G5-ByquHtbR8olqNciO5!hDSKVc3Xz*^}~-xQTZYQ^t44 z)~b%k+*d_$=H%--+h1qtflrxG{A+eJzli9cXX(Fa92%(!s*(TEISqgH$rb$(|I|6^ zigbGX74+oq2~pMv$=aj7;4<|E7wrGWh>4^1pI}kaw(R=~H9V^A z8^h->ct8CNh3SRtV-0206&767VM*H*#}bMEcqCa}QEl&B2K}C~?kG3a$*}~ZQX9_? z7wV23fiIC!74ixr)rDe#Gy{Uq1mKC@qlG_R$Clwn6xz`R=c#L(6V7~b5$1VYPrWe( zaBMrhBuH@WVu^CxLii8@CC1Ac%@WI;2&i`cs!@D1XR@Fhz@NVc2g^`wruR$8j1SdC z4pzefnwWkW(4xUMZYvpdm*>U&r=uuT7FA{?0VD`!n_x)|pk9|hZQe$ClXGz9rA8^l ztA@sDy;P~pZWp$tWX&lD8khKiqxrbEgS$ght56Bk>-=ddCS5B;dX%_zrn5e#<2mU|I*T0}7 z{|A(2RKB1DeDYQ8Kne)>D<}82f9)}2cp(WxzJ9@d!3qDrF=1tH@E^M`I|14E6+C#P z=$jO|Lg0Qv-p#Mp2R2ko;EbzxZ9a{VHK8TgvaOGdFLwTyDA=pmI&-cZE1z z@MS=M<0us`pPA!}8-ah4q_wTlUnL33$f#st$#UP);-o|cc@rtgT;NExHz#?Ua1Uc3 z&?lc0QW8*&lL`pGZ+iN_gz&+-_MrOJUBW*J;g2r=ix4K{{;g2|s}uM4RWMLY)J5Fb zQiNUtHy~gpoB(72_BT0DX&*^&eKGV$gqZM){ub%@mp%SsO4rHQ(aG`ulE$8tVx&~W zmrR_PZWA0Ol@#|J855b46s2w%rS9?tU-WWxM87<9c>=V*b$!_eUwi-MTg86`T8RH! z*EY6a*{;6OOzUXqU~cDRt!!yK#}EJJ(`{fct;_8497U~Q2AsS~EC~n*AW8d>k!7)^ z;d&HOcGunD>4FbZK-n4UYU1Lu+uPt&<#fU-2%V=JFhkFl{BsoG-F#1=&xx`71Gu#u z>v*9}&cC%Xs6kZ+)k-(Ik}6P=fK(0!SQUi#NQ&~HmQccBbCaKaVsE!~4>18%mV zd*SmzQZ*OcTqp0iYO6qx{S?s9RB{7aD?- z*CtsX5D03D2!oh)V)JJWm)4Vjthfsr5P7^GRP&xUJ8=VSC2aAy*H`a9O>T1(+ z5sz_wNka*CBDLQ$i;{-J4KB#{Fpr%u$e|8qgPlKf&dICFkr*;)4#(J*_DBRwuO)Q& z-|Virc|3y(iOiZnpi4`~8Cf+mwg?i9v$Lr}9N@KhexZoa$#aiPa}X#bQ4Gs2z@(SQ zhln<~=2VC~LHJmrH`2id7AVMbb#N<7$*-FedKlI}?ObJet%T|~ z*4Vc+ANVSSph+#(kZF}XXY|3kTD;*z4lY5^spXubAU5#S{=fmha^Upx_`D4*4r~qy z`8eI1=oZ^Q88&ksY873NI=8aF@pR*oR7Jmq?nCefEJlmd&dl-p!Xh)bm+a>0nJi)t zmid4XE&Q26Q1)I=wr<-V@NP3jEG+IOoktIS{kjy`a!JDzMk#V|0Og~}6QTQ8J0>oF z$F2Ak`zHLVL&@-u%f{Ts(MjLRO84JMw6Jl3>$kmJMEFL-R=A{%Yg z(yHScfCaXrvB8@iTOpK)2Z$9j1>7}Xl8@A9x60^9_9TdSyFPAs56{8XBxGpSCvXaQ z!-=;hF2d<_`+yi)mkAg5;gwtCxahy_p;Id9LH(o_r5)P+anCo-Mqq-c|luh_~GuP z^8Do5oUZ^qTV%OMxdWFO1*?yG3wMi*T1;#<7trAnV_uGf3<_a#3nFh_WsfU90*OtD z9K;N9$TS`=-hvY+CbbPS@3JTCkN=f@4Eqy&QIz5~eOB+y1+cM+a0^u48Y3a~1~(tE z4OQ|S(1g=}eOVh11@f5wF@y+G+wWqJs@0)SqDb!^$&+-%`E!FE`80C1^VMmt`r;y1F@7Ie`u zIUbmU(JO%~LsxLeVoeULa*niSPFECa(x+*Duav(6xSUWZh3tHr`RV8x}U}cXbqL zKbEkyR}W*QoKuf7+uoB76B;+ygU_1rPHQ@4fLS0>e{b@6f=}hHW6qrEQamTOHsRkK zh{1g1#zkQo?-&VUQ<=X$iIR=c6`EkkY&v0xIwZ(l5UZZR1WZz~44QI;h57k@VALz5#YgGyJM%6dC0fs|1 zyD6V}?%B6-THPD|-h96>IF}*Q*TJRKpmR@x^gb5XZ5fie*Hg}>IgboN6F@4EDNC+u z@w8T3XkVb=mX|b^j*?-O*h6THPA8fhoy>M-^!{@Ae7z*9Gb| zgmoMAwO`@< zjYL!MpTdJ##)|mIUOlp(4;%3bBxCiNjo!aQu6yxjH!gYIhxeMdu<4?Cb0FQv4_NFH z%@tM(#bWjDHo#FSk09){8QqLwN@2IYTVO%A`@S_jg^>g?i7;-Wbofu{rhq} zzG{=+fuv!*ZR-pi(FG9@hX+(CmQ7pgsA}~WHvmBv3y6=gj8*c(*aT$@P*k_>(X%|J zP8;?HLF=ASA@%+aCF&_Yw1XUCv{urgxuK8PFidjwv6LXFWrBs)SifBWVN3aD;1&zl z>K%!>NhDmXG88Ql&UgpBCjl*W=Se_3h0qLOas$iQs7{}bC2Wf7K!T?kmvz*`PghJ# z7k3^*Vi+Psgr>ZMJBtanVwFRo5%vQ=M6F;yaj4RSV-s+TSll$F7$%(hq=4D7_>iIK z(L2ZlB3UmBbv(pB#d7SkW+_K9xMw8j317v$_>C%%D zbh}#%f#zk)xMu%s7mkIGGE$?v*9b>g-#Of<3Os0i@uIrvXuAyRUK0tSJ|0$4J;VCd zurY)Z=_qTfTEd#sWaEGV3lc|41@Z>IyNQw@>{?ZmF}!`9SG+QLnUv#}%w_U|6tq+E z{(`L2dSGYmL19dWot)6~VjHp>NqX#S6~HX2mRAk1S}FqH;72>T{E* z*rR?5`usEr(~ErkJkJORZE#(=3I)nNRke_|vBc15 zJyH=ma*8l_oOwH@-eM;OeekxVg8onx75YWK3}k&m(~R~x=^oJF3w#hx zpu*MCtlqUYEhTk3fC%rhDP3n#!IH|1x(ZK^1U)XZuDE}@KfPD!(+5U%4$?IAiF7Vi zQ3waIX~jGmybw89k+Q}%q=~k61U3bIfyovQjH%JV>7Z=_*obvc(Z|YK757iL-q145lQ?=`8Imo zn|v3hTnF95b=~$YFS?0$CnQ>b zcsQ9g_*(aRg}%%4@rR2R73N}xUE}M4{pz%pDZAeF-uFd!5kWHJf$g(k8mfHv*OS5V zkmQe) zVDGmW5-N^r9qmQNQ>JIsGLe)46+_GJ$XB+Q{{mL@6wf8FIDZ>*(GgkDG`rX@(kRvp zoam?hrhQJV8E3!(^Knd(YUAV)J51@i&kdrg@>|9gRBoc4e874ca62u2mfL#H1l5Z4 z*PQi|;ZvV|U_{D}Lc~vo#MosagYa4+Cd}G#FP@V0^x228l@O4!FPx4xsi6Tavp~(t zS{RH!7hc?p_H*at(Toyf@D{O|51v8m^0@;7L^%whSupMW;3#kkw}^7Io{$s#pk_9h zA(liXX`P;|e@UUd)-Cs3EUJy1r2q#09*a^J=0T zdXPw(GelGe@3l{Qm!RR^FM4*v8p`^4#)7iFaQ}@9nJy<^-d+}+^L zZFFnP4+b%xh#>3GdDn{WFiTqdr+d=CS$dR&W3INOo+hxq*+J@np8~ZQ{9P~eH z2)LAH7ZuBbZ3i(jZ7mo{GsRuany1`wpuevzhrL5DKUo(4Qlf>{KjO{M zqmRi$1(_41!exLJUH5xLuxx0Pr4tk!sAMX*@ZPOwRfGN&7Hs`vx^UYYZ+B_Qu3{<& z-4m>^))TaH&ukuy@pQ>_0fMV0riqq`mWJ3UV)CP=kj>b!O1rHsWl0{3XA-aio4xc{ zh9w`e75kj~1CH9HNqX@hoVc$R?rfdu;k3;lTpd{m}~g%68WjJNS2OF^Cx4K7&-{c{UF& znf+m4x$4bfm+~>B!5Pgrc{-=)tT;eDxibEAsZ}Gl@5d7rMRG6tdCF^h30S4~_7W8%XHZyJp@WL@i=y zkeaJI2AcIeQsc3ovh{o@{ml$m)wNWz7eMJg+EK8&f59)o$Q zDJN6}O@Lg~pgJk>++p`+XM)uj>(7at?1S-R;Nz37;~B@Afz8AR75K)3zLmg30VG71 zg>_(`dbj(rQ~VqEBxZYY*3`bEngV=c0^|eN8vOWdU4EM@yc~9R6J-Ce9u$H|Ac!Yj zVnjW9%(i#kUZgP;sem7Gfm&DeUI1#Rz^j%RKN{Iu*yCu(()w-bHN~t(@+nNG{X^Pg zLTrFCpe^oeBvulEF?M(S)Sfmh@o&f{P3ho3QHI0yEVhv>r5A}aHWBN z-Al@_vO@Ky2mqyBS{^NKq7Va4KL)FAW@kCirmH!>e4ozoJ#F+FP+@9+_#Ch%W#_DZy| z177Ml5WEet^n7X_emx0rZ=<(NR;TSppr3>?eWy358@LT%Ms(zsd15OL8HPxZV@8YN zI*uWm+kEO^x>e69gjO&~w)!z$$_=Z@E44g+98Gp{SLo?qJLSX;66ER1Oc{HL(*wle z7=Cz4mMgo~Jcb%sLt@QVqBD!3D&~QdCu}lnCRwaNN@@Y406HbjS$SMLf{^))AVU^e z^k_eg<7-zMk<$cDl|FXId~^$&PA!`Tk?|%t#daz7pXXpmD=GWnGqOy0*Yg(jKFyz5IkndcN@ zEcs?XCMJzfB&4Kj3eXyNMxH5zqb}kKty<0X9{%Lxk{kYQJjM4$1BMT}9eb*C4&-Om zLiyPWJ$Tat`)REcm~n;c7B;&n?E6cn8~6GC+Fw=U#!sMDjzG~h>eGb8t%adO<6()v zVh3~+*k4!$KF9|4sp$3Yb99qt>hp404-1{}VsQka=W+J@E9P+G)@EdXApwrce! zOB;d|2cw4H?uQsn# zg?p(Ev%KfF*HF{Pvv@g6Z{Sb!0oV-iWHaL3(zF;j^nDU>9O||96wWyeY39ttn`JTt zNwFMYXp)j%=~J}`hhIFh3>;&z&Lhh(iM#P$-c}IZ70XIUt30O{fG?fnC@rBi#oO@J zIGNVU2Si~$=v04U;L$jiTeqW4KU$u+9WM>Jx0O3M16hF2Z2{*Jtn1C|Csq$u4o_KD z-D|PP|Hb2goQ!`hj5q?pRe7~p049>e%KtNBecs+1iqJwd@I$TK%wp~yk@o8eyXUc2loFm`+^FfF%Rn#!PXl#^}_M63alTWTz;4mm$r9_pA9e&=;nzvslw2 zVwYo94d5LhI`w@TIiF_#)rp6nO^?IH-G zaw6g-MJH7%Zj?(Jh7VR@5FSfvsSO20C&dMRowErpKNesEsRqznJPG)VM{m+}$GEhnf5 z5{_vKQy#Jm(jI)WrKesmD}45J@adQ^Ow@i*ARMG_guY2lv^|-nX^XPOFALWO!~8g_ z-s{5@9aQa zFGi7kq92%HvS1)@D+q^OF0{Umj>{~Wue=FHu+ zBiDU1hl1YX`l>5l53RJXsNC@3%7ui_lT!EWo~`^b)V)@T2Z6_2TPcE4oeUrMOz$tQ*Ih2s*aoLs`or<=;T#cxexu6EN@7`m%9uLq?} zW#yrJm!4Kf8xQQhifr!8WRjIhh`5_n&#FIEfUEDvlpwlF zsYL;+1&4U6v1BEnf#}44`5CBlORB9K-u*_Cc8h9`TcGsZ5qoSRPco2D;~*88uaM4+ zPKS;2;5sF5@pN~4dL&G!B=)LR=k+8je2E`HH<;@3MUPe*Es52K!=7-{bTkz$UP}vF z_Fr5E)^}&wIfTqsW!$y6cZ612OAw->q7UBbs8FZ9sHSd};&BZXiv@vl=~ZVoVFLi2 zO)^tD*geQpd@hWoi?~B_=$OUTm_w-1TY1AY>~)75Iz+>~x!S`&g(j!J$AX2eG1#%E zXfB!}f73OsJ#j5hQCV+tc)^|u3Ib7Uc&3y0Z`_3SWBy$DP>I(r? z*7sNfW7`C1Aq6#LiwM1M)3IqEk2-#YBBPR|)@a8CtKz^Hx7%4#;LCuU{>G2qP+7=3 z;JjIW-woQpqTxiPU>C!t5YmE@sNmf*>X|=!E!KGGL6CpwXw`PUG;7rdudbyT2$yWm z!;zNGHERz7Aww)9xz57TYSplEb5IC@YuN`23@D8tahE;%2I;#+;ye1oD_R>zpT1c9 zh{6Tz)Tp_8&w1B;zw_3=dJ+0OkBqscM&0R{si2sBmUix{>r&i~laLTm`G^aiu|`MY zB%D-BqqGd2Y9x-#?R}Wyab>L1&ZXSaAV-+B3!ZB45Zg2NO{IaZd8e4Bt!0*UfD|VY z5(_Rs@+~3Kq+afa{T<^9*u0mg{mJJw-4@2*KmBU9yHYr(&&PfbD$noc3-v0Ou1E3Q z?Y{ZhLJ%Y_c!IzCma>p@w0Dhnk?L)E6p|w^8enX+qa7>iO~=l4A6?zXl;Wa6sgg$A z`;x}hXs^w*TqQ_81LaYjtX|n2NZw0dm1~mqqePs`!~aSm3@8>OlKh?MEnP~*@q#QQ zGz3pS1>=QzBlQjUQXac604i;dI@XXgMkvlJIHF1((3x0SLpVKBmNvEJSE4M@BuT3w z*+oy6aI)!ZXiqrEk1GUnFUUs%|B4m~I0BqEM36Ky62VOQc+ys5JWb(qRI&;H6`tJa z;W??time|n#BVOksmXZEXIL=aBc(77_vUhGXwJV2x@St`QH{}A?Cw}R==<#sz~7RX z{2g1Mf-7Yv*AeQfOg)oS=U{^qWJjj3e2X+%5Pn~^l@cvLA=z?I8qcc-`2df(=p=5cPp zsPRST?-TQC+^m=^`{NBBVq~P{>=DcgH*eY2zO_9!jrz~579fnLr1KMYr`F;J=hf92 z-18Je0Y}Fj9E8#mNkhQecyt7YxNNwfq#023p7TQ4<)Xl=6ex7J(1SRMQKiD%FDM8} zSQfSvZJRCFp6~u_UAWp&WJ<2wH=Zep4k;$B0ZXR0E}vU+{JwHu3C5~F;)_0(rn`%~ zJ|Yy1D+gZyQu0YJK&XLr(=9`Hxy!A8k>QuxcEJfKBrQp64zIiOtlFQOJJ;|uOZht2 z22U?0-;VrlCg1W1(d#xddHZhlQ!1023OFSNuu&zS&TFBnko@|2G`R>8lM+RAtl+)0 zKz%DTCllIgL#!O_mT;ZvRLs-&Z+EG%*+~;Bzch->S2*RLkGUP)9sehc;vLfo8$gd7 z{Kz{9A<_wy-gB7zX#J06<&=5=w$dxiWj5t5?|Sc)aSKuvNeTmW4Zzm z881(ax>}w*N;c~|VBSRTQkRmd9@a(7j6R+RC|URz2C|_SPLof$@whsX^!^oB%&)0) z@QlNB!Ef;5Q$J`;pQnag_LKO zL}Z?2x~^-Ml%!lrnTmwWN>Y(bMM5OX7-jCIGE?M{-@c^NJbqGHzeYeUKZrEqttTKCk0EkPGPy60mcOpjqd8gBdU&(A=_oOcTXurMJl)m zS5A6prnIEQ@ANF#9!~SA&G>KiTW<}&Uuso9k$WeDF!8Tg?dW0K zZB_GIh7O;@bE?qw*G1vV9=42*$38kn%eUy8?bkB<)~9#QU%pj3Os0KT7rkGYa&ZlR zy5GaTx=@kL;o-d>-Ll2*B=9@aQ@Xf>>YHrLoEp{M!l9+yDQqfuL%ri^#KGz}bCC_6 zXCu;%XT*2yno`Vpw%=>SBx0vhos}qhkKHUU9rdZ6hW*h_9r5Mwtj;GZaTY`z!Q#1N zPvo{>vRM?WJ0|HLSw&^W{Po7+4o6Uss_q-geR9<_9mUSdN9pKA-=CY7e=9{Do;Q12 z=?t|GUsm$mz{1EGKJnvpX)W7E2HPG9aNmrvI4>k_=6AzopF6^N(%+1VH4EW?Vs7cBb?OAe;!AGGyGROA zp-_1YDAXRZYWH<^xAXAzl33k}lCX8Ni!;*k=#gcv_-f2daf06|-wi9x`;6W9&EYbW zjL#MwE^+ZrN%z%C`N#O9V)`oo+9h)AL64Q!rI-GH1q98g=rTSo5;aNYXPMW?$n)bf zc|MaZv?!kU$r0_`8{Jps+IjPS*wJsVu@CHhbnc##d93Z_tRWyRng906#Lea<(^!{X zjMlZIoECiGTA9s}x{nO@YYA#)zVzVv+jPoQT2PNJljl^JPStF~FeN>%|B&Oqf`?LPq^@KM$C*X6gY1YVuKl73)U@W;4e_`^<>$eSxW9&zz|EJ7aKo z{)5b^^f`ac=ioN%lDMgjw*p=j=PuB8T^zHR+F3JpI4AI8PiX%UJM{-fxk`erS<1#C zUuH#{yjpJM`)LY_QHQ3Q*BJ5fJ@n=t4SqM1K&kzBOM3SL?M0tAkMqXf-*`tZ?@=Sd{@I=`;T(hKjaC$@X0jsW^EOZ zp_A;42#VX0J1&4e#d+Ohs6#}f)&d(|)n}_}IUG2B#n>}=gesY*duZ@rQ8G`t)J*8X z;l+@;8$71HGoeEp$1m4Dy=1%ZR&P*9v(JHQY;I7Piuk8vodsj&%xsFqHf?g1hpPF< zyxCY!O4~MOVf%eHU3=^ic(g3@il+H<{p!4kmJ&*l?Qy9lYH?Y1b}emq+0u<|d-<(6 zyGG{wX{637%E?tIM>N-FLfWZsy=@S(tNAJ{~>Nc5Cy++uVtqIj&TJ0rnwNhX7px5x! zr=}0Hpn~|%g^!!^Q(a6ZMCW!jDOb9F3%&jUE3(t<`jCl)o~CG`a-|7_p4iN@-n$$l zY9rrL=N4I-na}}oa~t5*#DYJzSkr*_-Y9T7lK+>(0r5FloFyT=jo@<(!QLzy_#82s zD&@Bs7TpzWjFpdC#?uUPhoP)gAZIm92Il3y|09CWb5~+ zwyWA*=A6&HDWvZ{*Bwo#v;kY>8-Bi*)>K{m#@o7Xs(}Z^o6d!4%JWziX$Iuh-4|l_ zymjzZvBgF=rr2cqW83Q<`(8903}ZEUq1rdD9B=rwHv0}+qf*S&=k|qwC3A{FMJGyz zNG>x~XTFoP{g&A^mD9RWo~PqN+cD1_{V}B*Y_%GqPF;-K8K8z+U~LY%LqDy5pa&%% zo|uO!wi(`h0;lHacH-VZvjuygQ=9x~``OD1ZsiiWouf8}yc=iO(Zgr2D|^%NH#R6K z# zw`<*W8FIMOBxq`Le>C@G?LTq%cK?t@-TF`;=ebXRx~Y^RptZbd*>q7{UEHYS)f1fC*$20SpKmJj?68~P4_KN*#^g9=(+8Tje zSEtMj6x#&407cfH|N6xbG*U(V^`y{DgEi%f=E>Hrfy~umn+qcC zyA7&7DwUc)?hY<07|-s%)-!TBl{@AVs?=IlE!5P>6=!+D%sq~Qa`yC|5z2&D=Onvx zYWU2Y>EEP&h?1t__NVkbcg>wGx)d9sPa30etl2J z9Xau1Qa?M3OUXF!+@jV%ZLU9M>w$7dt34{XyX|LF!@Q@BWtr5cRHDNzrI^YKuj}e8 zFz~mtePYPRG?HX@M5EJ|CFA?OyAH>+9e0^<`U`k|n|gPurT|@K2Jw#sc_HcL!0lCVgr&8OyMwKz zt%n=N+0`DqQp*ueEcDA-&;v9D&@IB=4@3+GVy-7*FqY7jSa8^G+a%}X;0j-9pdiAG zg<$le$S`k=7ou3Pr}e34!Mt|`q7jmCAkvNfNF*=!z2y6e=`U&I0e$2GnkNRnRzLuP zN(%PmfvU*|632i&EP}BkTvrT9C;_I&I>p(7yh?D?i`dzT#~Pv(Ky4iB)Mf;1YdvZu z+5nLh9!u%Y4m5ugyn+ceJt3j4nIMv|tJf*R2eQQb$tP82aC!|OzzqK240t1QeC)Ag z?84_lPf{zum4m22P*va$oXrI!91gK_y1kW*F9lJkdKGx*ToMo{cJ?G}tBX%_EC)=Y z9JwnB`OWg!NdtmdUz^;Ja{!qKZS7e~S9nO_}XZ>_gzy1&~i(3 zv-AQhY&*aMbLkIWga#7N!x7QawqHB)b^-V<6hILYQ6aIUbrCRb_Bd;>Mo0G4XR6dD zvVild2S!HBI&~C@nX$BfWhUr|?Skz%Km6;1hHSBs{J2(-;r%eM9!;n^`t-GZ|Eo zQUfay5u$ORxr_wFg8tfgJ2-fe$3CMF2#yCx)K<8={St?O^+a|@4MoRGMuFnYKtU|x znO>ZRKmoCEWTw=Qi=!+8EGU!}FcIQW=(~dqTM4W~8U=CXU;%Dm|JuOa62F@cK1D=% zdOKt7Eq%P)5$8gT{Eat+owAEyniBh@Y`DB)sFyp={M;AX|HemyBe1*90M=o1!3wq*U9i313q)%Ab9O42$aM{ z8no6wI1OI;38IN{ucNIME%q#7vaT3AX*tY z8Y$u61wbI25kG0+*2;mvg@YH}fM{p|GPGYMIN&sRwF!t8CP;$zyY>W}1+R$!vBZSQ zvPdZoUfTcyJ|YGDRp|gO8a%%qA_a(&5RE`0B^W$Y9>Oh?!u^pq4;K!evkuXoi;)oS zR|Y$r1kWOeNP&CEkVthCo&pZxn8ZooekFv%1%qdHL!=OLBvOLGbFv{En*?dWpbTxe zc<@APh!;(Y2TQw#6X7|i5V3AA2?CU4!@R92j{`F86e&s1#&#% z+y*!m9^wvB-IT~uR|4MQGmhfY2i07|Ij`!XXB*Kdgn5;pZ?A+4}hZLnb %% @copyright 2008 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc String Formatting for Erlang, inspired by Python 2.6 %% (PEP 3101). diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochifmt_records.erl b/rabbitmq-server/deps/mochiweb/src/mochifmt_records.erl similarity index 50% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochifmt_records.erl rename to rabbitmq-server/deps/mochiweb/src/mochifmt_records.erl index 7d166ff..3dccaa4 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochifmt_records.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochifmt_records.erl @@ -1,5 +1,23 @@ %% @author Bob Ippolito %% @copyright 2008 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc Formatter that understands records. %% diff --git a/rabbitmq-server/deps/mochiweb/src/mochifmt_std.erl b/rabbitmq-server/deps/mochiweb/src/mochifmt_std.erl new file mode 100644 index 0000000..6067451 --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/src/mochifmt_std.erl @@ -0,0 +1,51 @@ +%% @author Bob Ippolito +%% @copyright 2008 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. + +%% @doc Template module for a mochifmt formatter. + +-module(mochifmt_std). +-author('bob@mochimedia.com'). +-export([new/0, format/3, get_value/3, format_field/3, get_field/3, convert_field/3]). + +new() -> + {?MODULE}. + +format(Format, Args, {?MODULE}=THIS) -> + mochifmt:format(Format, Args, THIS). + +get_field(Key, Args, {?MODULE}=THIS) -> + mochifmt:get_field(Key, Args, THIS). + +convert_field(Key, Args, {?MODULE}) -> + mochifmt:convert_field(Key, Args). + +get_value(Key, Args, {?MODULE}) -> + mochifmt:get_value(Key, Args). + +format_field(Arg, Format, {?MODULE}=THIS) -> + mochifmt:format_field(Arg, Format, THIS). + +%% +%% Tests +%% +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). +-endif. diff --git a/rabbitmq-server/deps/mochiweb/src/mochiglobal.erl b/rabbitmq-server/deps/mochiweb/src/mochiglobal.erl new file mode 100644 index 0000000..8df007f --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/src/mochiglobal.erl @@ -0,0 +1,127 @@ +%% @author Bob Ippolito +%% @copyright 2010 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. + + +%% @doc Abuse module constant pools as a "read-only shared heap" (since erts 5.6) +%% [1]. +-module(mochiglobal). +-author("Bob Ippolito "). +-export([get/1, get/2, put/2, delete/1]). + +-spec get(atom()) -> any() | undefined. +%% @equiv get(K, undefined) +get(K) -> + get(K, undefined). + +-spec get(atom(), T) -> any() | T. +%% @doc Get the term for K or return Default. +get(K, Default) -> + get(K, Default, key_to_module(K)). + +get(_K, Default, Mod) -> + try Mod:term() + catch error:undef -> + Default + end. + +-spec put(atom(), any()) -> ok. +%% @doc Store term V at K, replaces an existing term if present. +put(K, V) -> + put(K, V, key_to_module(K)). + +put(_K, V, Mod) -> + Bin = compile(Mod, V), + code:purge(Mod), + {module, Mod} = code:load_binary(Mod, atom_to_list(Mod) ++ ".erl", Bin), + ok. + +-spec delete(atom()) -> boolean(). +%% @doc Delete term stored at K, no-op if non-existent. +delete(K) -> + delete(K, key_to_module(K)). + +delete(_K, Mod) -> + code:purge(Mod), + code:delete(Mod). + +-spec key_to_module(atom()) -> atom(). +key_to_module(K) -> + list_to_atom("mochiglobal:" ++ atom_to_list(K)). + +-spec compile(atom(), any()) -> binary(). +compile(Module, T) -> + {ok, Module, Bin} = compile:forms(forms(Module, T), + [verbose, report_errors]), + Bin. + +-spec forms(atom(), any()) -> [erl_syntax:syntaxTree()]. +forms(Module, T) -> + [erl_syntax:revert(X) || X <- term_to_abstract(Module, term, T)]. + +-spec term_to_abstract(atom(), atom(), any()) -> [erl_syntax:syntaxTree()]. +term_to_abstract(Module, Getter, T) -> + [%% -module(Module). + erl_syntax:attribute( + erl_syntax:atom(module), + [erl_syntax:atom(Module)]), + %% -export([Getter/0]). + erl_syntax:attribute( + erl_syntax:atom(export), + [erl_syntax:list( + [erl_syntax:arity_qualifier( + erl_syntax:atom(Getter), + erl_syntax:integer(0))])]), + %% Getter() -> T. + erl_syntax:function( + erl_syntax:atom(Getter), + [erl_syntax:clause([], none, [erl_syntax:abstract(T)])])]. + +%% +%% Tests +%% +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). +get_put_delete_test() -> + K = '$$test$$mochiglobal', + delete(K), + ?assertEqual( + bar, + get(K, bar)), + try + ?MODULE:put(K, baz), + ?assertEqual( + baz, + get(K, bar)), + ?MODULE:put(K, wibble), + ?assertEqual( + wibble, + ?MODULE:get(K)) + after + delete(K) + end, + ?assertEqual( + bar, + get(K, bar)), + ?assertEqual( + undefined, + ?MODULE:get(K)), + ok. +-endif. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochihex.erl b/rabbitmq-server/deps/mochiweb/src/mochihex.erl similarity index 65% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochihex.erl rename to rabbitmq-server/deps/mochiweb/src/mochihex.erl index 796f3ad..91b2789 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochihex.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochihex.erl @@ -1,5 +1,23 @@ %% @author Bob Ippolito %% @copyright 2006 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc Utilities for working with hexadecimal strings. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochijson.erl b/rabbitmq-server/deps/mochiweb/src/mochijson.erl similarity index 94% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochijson.erl rename to rabbitmq-server/deps/mochiweb/src/mochijson.erl index d283189..fb9b1dc 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochijson.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochijson.erl @@ -1,5 +1,23 @@ %% @author Bob Ippolito %% @copyright 2006 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc Yet another JSON (RFC 4627) library for Erlang. -module(mochijson). diff --git a/rabbitmq-server/deps/mochiweb/src/mochijson2.erl b/rabbitmq-server/deps/mochiweb/src/mochijson2.erl new file mode 100644 index 0000000..3d880db --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/src/mochijson2.erl @@ -0,0 +1,907 @@ +%% @author Bob Ippolito +%% @copyright 2007 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. + +%% @doc Yet another JSON (RFC 4627) library for Erlang. mochijson2 works +%% with binaries as strings, arrays as lists (without an {array, _}) +%% wrapper and it only knows how to decode UTF-8 (and ASCII). +%% +%% JSON terms are decoded as follows (javascript -> erlang): +%%
    +%%
  • {"key": "value"} -> +%% {struct, [{<<"key">>, <<"value">>}]}
  • +%%
  • ["array", 123, 12.34, true, false, null] -> +%% [<<"array">>, 123, 12.34, true, false, null] +%%
  • +%%
+%%
    +%%
  • Strings in JSON decode to UTF-8 binaries in Erlang
  • +%%
  • Objects decode to {struct, PropList}
  • +%%
  • Numbers decode to integer or float
  • +%%
  • true, false, null decode to their respective terms.
  • +%%
+%% The encoder will accept the same format that the decoder will produce, +%% but will also allow additional cases for leniency: +%%
    +%%
  • atoms other than true, false, null will be considered UTF-8 +%% strings (even as a proplist key) +%%
  • +%%
  • {json, IoList} will insert IoList directly into the output +%% with no validation +%%
  • +%%
  • {array, Array} will be encoded as Array +%% (legacy mochijson style) +%%
  • +%%
  • A non-empty raw proplist will be encoded as an object as long +%% as the first pair does not have an atom key of json, struct, +%% or array +%%
  • +%%
+ +-module(mochijson2). +-author('bob@mochimedia.com'). +-export([encoder/1, encode/1]). +-export([decoder/1, decode/1, decode/2]). + +%% This is a macro to placate syntax highlighters.. +-define(Q, $\"). +-define(ADV_COL(S, N), S#decoder{offset=N+S#decoder.offset, + column=N+S#decoder.column}). +-define(INC_COL(S), S#decoder{offset=1+S#decoder.offset, + column=1+S#decoder.column}). +-define(INC_LINE(S), S#decoder{offset=1+S#decoder.offset, + column=1, + line=1+S#decoder.line}). +-define(INC_CHAR(S, C), + case C of + $\n -> + S#decoder{column=1, + line=1+S#decoder.line, + offset=1+S#decoder.offset}; + _ -> + S#decoder{column=1+S#decoder.column, + offset=1+S#decoder.offset} + end). +-define(IS_WHITESPACE(C), + (C =:= $\s orelse C =:= $\t orelse C =:= $\r orelse C =:= $\n)). + +%% @type json_string() = atom | binary() +%% @type json_number() = integer() | float() +%% @type json_array() = [json_term()] +%% @type json_object() = {struct, [{json_string(), json_term()}]} +%% @type json_eep18_object() = {[{json_string(), json_term()}]} +%% @type json_iolist() = {json, iolist()} +%% @type json_term() = json_string() | json_number() | json_array() | +%% json_object() | json_eep18_object() | json_iolist() + +-record(encoder, {handler=null, + utf8=false}). + +-record(decoder, {object_hook=null, + offset=0, + line=1, + column=1, + state=null}). + +%% @spec encoder([encoder_option()]) -> function() +%% @doc Create an encoder/1 with the given options. +%% @type encoder_option() = handler_option() | utf8_option() +%% @type utf8_option() = boolean(). Emit unicode as utf8 (default - false) +encoder(Options) -> + State = parse_encoder_options(Options, #encoder{}), + fun (O) -> json_encode(O, State) end. + +%% @spec encode(json_term()) -> iolist() +%% @doc Encode the given as JSON to an iolist. +encode(Any) -> + json_encode(Any, #encoder{}). + +%% @spec decoder([decoder_option()]) -> function() +%% @doc Create a decoder/1 with the given options. +decoder(Options) -> + State = parse_decoder_options(Options, #decoder{}), + fun (O) -> json_decode(O, State) end. + +%% @spec decode(iolist(), [{format, proplist | eep18 | struct}]) -> json_term() +%% @doc Decode the given iolist to Erlang terms using the given object format +%% for decoding, where proplist returns JSON objects as [{binary(), json_term()}] +%% proplists, eep18 returns JSON objects as {[binary(), json_term()]}, and struct +%% returns them as-is. +decode(S, Options) -> + json_decode(S, parse_decoder_options(Options, #decoder{})). + +%% @spec decode(iolist()) -> json_term() +%% @doc Decode the given iolist to Erlang terms. +decode(S) -> + json_decode(S, #decoder{}). + +%% Internal API + +parse_encoder_options([], State) -> + State; +parse_encoder_options([{handler, Handler} | Rest], State) -> + parse_encoder_options(Rest, State#encoder{handler=Handler}); +parse_encoder_options([{utf8, Switch} | Rest], State) -> + parse_encoder_options(Rest, State#encoder{utf8=Switch}). + +parse_decoder_options([], State) -> + State; +parse_decoder_options([{object_hook, Hook} | Rest], State) -> + parse_decoder_options(Rest, State#decoder{object_hook=Hook}); +parse_decoder_options([{format, Format} | Rest], State) + when Format =:= struct orelse Format =:= eep18 orelse Format =:= proplist -> + parse_decoder_options(Rest, State#decoder{object_hook=Format}). + +json_encode(true, _State) -> + <<"true">>; +json_encode(false, _State) -> + <<"false">>; +json_encode(null, _State) -> + <<"null">>; +json_encode(I, _State) when is_integer(I) -> + integer_to_list(I); +json_encode(F, _State) when is_float(F) -> + mochinum:digits(F); +json_encode(S, State) when is_binary(S); is_atom(S) -> + json_encode_string(S, State); +json_encode([{K, _}|_] = Props, State) when (K =/= struct andalso + K =/= array andalso + K =/= json) -> + json_encode_proplist(Props, State); +json_encode({struct, Props}, State) when is_list(Props) -> + json_encode_proplist(Props, State); +json_encode({Props}, State) when is_list(Props) -> + json_encode_proplist(Props, State); +json_encode({}, State) -> + json_encode_proplist([], State); +json_encode(Array, State) when is_list(Array) -> + json_encode_array(Array, State); +json_encode({array, Array}, State) when is_list(Array) -> + json_encode_array(Array, State); +json_encode({json, IoList}, _State) -> + IoList; +json_encode(Bad, #encoder{handler=null}) -> + exit({json_encode, {bad_term, Bad}}); +json_encode(Bad, State=#encoder{handler=Handler}) -> + json_encode(Handler(Bad), State). + +json_encode_array([], _State) -> + <<"[]">>; +json_encode_array(L, State) -> + F = fun (O, Acc) -> + [$,, json_encode(O, State) | Acc] + end, + [$, | Acc1] = lists:foldl(F, "[", L), + lists:reverse([$\] | Acc1]). + +json_encode_proplist([], _State) -> + <<"{}">>; +json_encode_proplist(Props, State) -> + F = fun ({K, V}, Acc) -> + KS = json_encode_string(K, State), + VS = json_encode(V, State), + [$,, VS, $:, KS | Acc] + end, + [$, | Acc1] = lists:foldl(F, "{", Props), + lists:reverse([$\} | Acc1]). + +json_encode_string(A, State) when is_atom(A) -> + L = atom_to_list(A), + case json_string_is_safe(L) of + true -> + [?Q, L, ?Q]; + false -> + json_encode_string_unicode(xmerl_ucs:from_utf8(L), State, [?Q]) + end; +json_encode_string(B, State) when is_binary(B) -> + case json_bin_is_safe(B) of + true -> + [?Q, B, ?Q]; + false -> + json_encode_string_unicode(xmerl_ucs:from_utf8(B), State, [?Q]) + end; +json_encode_string(I, _State) when is_integer(I) -> + [?Q, integer_to_list(I), ?Q]; +json_encode_string(L, State) when is_list(L) -> + case json_string_is_safe(L) of + true -> + [?Q, L, ?Q]; + false -> + json_encode_string_unicode(L, State, [?Q]) + end. + +json_string_is_safe([]) -> + true; +json_string_is_safe([C | Rest]) -> + case C of + ?Q -> + false; + $\\ -> + false; + $\b -> + false; + $\f -> + false; + $\n -> + false; + $\r -> + false; + $\t -> + false; + C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF -> + false; + C when C < 16#7f -> + json_string_is_safe(Rest); + _ -> + false + end. + +json_bin_is_safe(<<>>) -> + true; +json_bin_is_safe(<>) -> + case C of + ?Q -> + false; + $\\ -> + false; + $\b -> + false; + $\f -> + false; + $\n -> + false; + $\r -> + false; + $\t -> + false; + C when C >= 0, C < $\s; C >= 16#7f -> + false; + C when C < 16#7f -> + json_bin_is_safe(Rest) + end. + +json_encode_string_unicode([], _State, Acc) -> + lists:reverse([$\" | Acc]); +json_encode_string_unicode([C | Cs], State, Acc) -> + Acc1 = case C of + ?Q -> + [?Q, $\\ | Acc]; + %% Escaping solidus is only useful when trying to protect + %% against "" injection attacks which are only + %% possible when JSON is inserted into a HTML document + %% in-line. mochijson2 does not protect you from this, so + %% if you do insert directly into HTML then you need to + %% uncomment the following case or escape the output of encode. + %% + %% $/ -> + %% [$/, $\\ | Acc]; + %% + $\\ -> + [$\\, $\\ | Acc]; + $\b -> + [$b, $\\ | Acc]; + $\f -> + [$f, $\\ | Acc]; + $\n -> + [$n, $\\ | Acc]; + $\r -> + [$r, $\\ | Acc]; + $\t -> + [$t, $\\ | Acc]; + C when C >= 0, C < $\s -> + [unihex(C) | Acc]; + C when C >= 16#7f, C =< 16#10FFFF, State#encoder.utf8 -> + [xmerl_ucs:to_utf8(C) | Acc]; + C when C >= 16#7f, C =< 16#10FFFF, not State#encoder.utf8 -> + [unihex(C) | Acc]; + C when C < 16#7f -> + [C | Acc]; + _ -> + exit({json_encode, {bad_char, C}}) + end, + json_encode_string_unicode(Cs, State, Acc1). + +hexdigit(C) when C >= 0, C =< 9 -> + C + $0; +hexdigit(C) when C =< 15 -> + C + $a - 10. + +unihex(C) when C < 16#10000 -> + <> = <>, + Digits = [hexdigit(D) || D <- [D3, D2, D1, D0]], + [$\\, $u | Digits]; +unihex(C) when C =< 16#10FFFF -> + N = C - 16#10000, + S1 = 16#d800 bor ((N bsr 10) band 16#3ff), + S2 = 16#dc00 bor (N band 16#3ff), + [unihex(S1), unihex(S2)]. + +json_decode(L, S) when is_list(L) -> + json_decode(iolist_to_binary(L), S); +json_decode(B, S) -> + {Res, S1} = decode1(B, S), + {eof, _} = tokenize(B, S1#decoder{state=trim}), + Res. + +decode1(B, S=#decoder{state=null}) -> + case tokenize(B, S#decoder{state=any}) of + {{const, C}, S1} -> + {C, S1}; + {start_array, S1} -> + decode_array(B, S1); + {start_object, S1} -> + decode_object(B, S1) + end. + +make_object(V, #decoder{object_hook=N}) when N =:= null orelse N =:= struct -> + V; +make_object({struct, P}, #decoder{object_hook=eep18}) -> + {P}; +make_object({struct, P}, #decoder{object_hook=proplist}) -> + P; +make_object(V, #decoder{object_hook=Hook}) -> + Hook(V). + +decode_object(B, S) -> + decode_object(B, S#decoder{state=key}, []). + +decode_object(B, S=#decoder{state=key}, Acc) -> + case tokenize(B, S) of + {end_object, S1} -> + V = make_object({struct, lists:reverse(Acc)}, S1), + {V, S1#decoder{state=null}}; + {{const, K}, S1} -> + {colon, S2} = tokenize(B, S1), + {V, S3} = decode1(B, S2#decoder{state=null}), + decode_object(B, S3#decoder{state=comma}, [{K, V} | Acc]) + end; +decode_object(B, S=#decoder{state=comma}, Acc) -> + case tokenize(B, S) of + {end_object, S1} -> + V = make_object({struct, lists:reverse(Acc)}, S1), + {V, S1#decoder{state=null}}; + {comma, S1} -> + decode_object(B, S1#decoder{state=key}, Acc) + end. + +decode_array(B, S) -> + decode_array(B, S#decoder{state=any}, []). + +decode_array(B, S=#decoder{state=any}, Acc) -> + case tokenize(B, S) of + {end_array, S1} -> + {lists:reverse(Acc), S1#decoder{state=null}}; + {start_array, S1} -> + {Array, S2} = decode_array(B, S1), + decode_array(B, S2#decoder{state=comma}, [Array | Acc]); + {start_object, S1} -> + {Array, S2} = decode_object(B, S1), + decode_array(B, S2#decoder{state=comma}, [Array | Acc]); + {{const, Const}, S1} -> + decode_array(B, S1#decoder{state=comma}, [Const | Acc]) + end; +decode_array(B, S=#decoder{state=comma}, Acc) -> + case tokenize(B, S) of + {end_array, S1} -> + {lists:reverse(Acc), S1#decoder{state=null}}; + {comma, S1} -> + decode_array(B, S1#decoder{state=any}, Acc) + end. + +tokenize_string(B, S=#decoder{offset=O}) -> + case tokenize_string_fast(B, O) of + {escape, O1} -> + Length = O1 - O, + S1 = ?ADV_COL(S, Length), + <<_:O/binary, Head:Length/binary, _/binary>> = B, + tokenize_string(B, S1, lists:reverse(binary_to_list(Head))); + O1 -> + Length = O1 - O, + <<_:O/binary, String:Length/binary, ?Q, _/binary>> = B, + {{const, String}, ?ADV_COL(S, Length + 1)} + end. + +tokenize_string_fast(B, O) -> + case B of + <<_:O/binary, ?Q, _/binary>> -> + O; + <<_:O/binary, $\\, _/binary>> -> + {escape, O}; + <<_:O/binary, C1, _/binary>> when C1 < 128 -> + tokenize_string_fast(B, 1 + O); + <<_:O/binary, C1, C2, _/binary>> when C1 >= 194, C1 =< 223, + C2 >= 128, C2 =< 191 -> + tokenize_string_fast(B, 2 + O); + <<_:O/binary, C1, C2, C3, _/binary>> when C1 >= 224, C1 =< 239, + C2 >= 128, C2 =< 191, + C3 >= 128, C3 =< 191 -> + tokenize_string_fast(B, 3 + O); + <<_:O/binary, C1, C2, C3, C4, _/binary>> when C1 >= 240, C1 =< 244, + C2 >= 128, C2 =< 191, + C3 >= 128, C3 =< 191, + C4 >= 128, C4 =< 191 -> + tokenize_string_fast(B, 4 + O); + _ -> + throw(invalid_utf8) + end. + +tokenize_string(B, S=#decoder{offset=O}, Acc) -> + case B of + <<_:O/binary, ?Q, _/binary>> -> + {{const, iolist_to_binary(lists:reverse(Acc))}, ?INC_COL(S)}; + <<_:O/binary, "\\\"", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$\" | Acc]); + <<_:O/binary, "\\\\", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$\\ | Acc]); + <<_:O/binary, "\\/", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$/ | Acc]); + <<_:O/binary, "\\b", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$\b | Acc]); + <<_:O/binary, "\\f", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$\f | Acc]); + <<_:O/binary, "\\n", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$\n | Acc]); + <<_:O/binary, "\\r", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$\r | Acc]); + <<_:O/binary, "\\t", _/binary>> -> + tokenize_string(B, ?ADV_COL(S, 2), [$\t | Acc]); + <<_:O/binary, "\\u", C3, C2, C1, C0, Rest/binary>> -> + C = erlang:list_to_integer([C3, C2, C1, C0], 16), + if C > 16#D7FF, C < 16#DC00 -> + %% coalesce UTF-16 surrogate pair + <<"\\u", D3, D2, D1, D0, _/binary>> = Rest, + D = erlang:list_to_integer([D3,D2,D1,D0], 16), + [CodePoint] = xmerl_ucs:from_utf16be(<>), + Acc1 = lists:reverse(xmerl_ucs:to_utf8(CodePoint), Acc), + tokenize_string(B, ?ADV_COL(S, 12), Acc1); + true -> + Acc1 = lists:reverse(xmerl_ucs:to_utf8(C), Acc), + tokenize_string(B, ?ADV_COL(S, 6), Acc1) + end; + <<_:O/binary, C1, _/binary>> when C1 < 128 -> + tokenize_string(B, ?INC_CHAR(S, C1), [C1 | Acc]); + <<_:O/binary, C1, C2, _/binary>> when C1 >= 194, C1 =< 223, + C2 >= 128, C2 =< 191 -> + tokenize_string(B, ?ADV_COL(S, 2), [C2, C1 | Acc]); + <<_:O/binary, C1, C2, C3, _/binary>> when C1 >= 224, C1 =< 239, + C2 >= 128, C2 =< 191, + C3 >= 128, C3 =< 191 -> + tokenize_string(B, ?ADV_COL(S, 3), [C3, C2, C1 | Acc]); + <<_:O/binary, C1, C2, C3, C4, _/binary>> when C1 >= 240, C1 =< 244, + C2 >= 128, C2 =< 191, + C3 >= 128, C3 =< 191, + C4 >= 128, C4 =< 191 -> + tokenize_string(B, ?ADV_COL(S, 4), [C4, C3, C2, C1 | Acc]); + _ -> + throw(invalid_utf8) + end. + +tokenize_number(B, S) -> + case tokenize_number(B, sign, S, []) of + {{int, Int}, S1} -> + {{const, list_to_integer(Int)}, S1}; + {{float, Float}, S1} -> + {{const, list_to_float(Float)}, S1} + end. + +tokenize_number(B, sign, S=#decoder{offset=O}, []) -> + case B of + <<_:O/binary, $-, _/binary>> -> + tokenize_number(B, int, ?INC_COL(S), [$-]); + _ -> + tokenize_number(B, int, S, []) + end; +tokenize_number(B, int, S=#decoder{offset=O}, Acc) -> + case B of + <<_:O/binary, $0, _/binary>> -> + tokenize_number(B, frac, ?INC_COL(S), [$0 | Acc]); + <<_:O/binary, C, _/binary>> when C >= $1 andalso C =< $9 -> + tokenize_number(B, int1, ?INC_COL(S), [C | Acc]) + end; +tokenize_number(B, int1, S=#decoder{offset=O}, Acc) -> + case B of + <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 -> + tokenize_number(B, int1, ?INC_COL(S), [C | Acc]); + _ -> + tokenize_number(B, frac, S, Acc) + end; +tokenize_number(B, frac, S=#decoder{offset=O}, Acc) -> + case B of + <<_:O/binary, $., C, _/binary>> when C >= $0, C =< $9 -> + tokenize_number(B, frac1, ?ADV_COL(S, 2), [C, $. | Acc]); + <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E -> + tokenize_number(B, esign, ?INC_COL(S), [$e, $0, $. | Acc]); + _ -> + {{int, lists:reverse(Acc)}, S} + end; +tokenize_number(B, frac1, S=#decoder{offset=O}, Acc) -> + case B of + <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 -> + tokenize_number(B, frac1, ?INC_COL(S), [C | Acc]); + <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E -> + tokenize_number(B, esign, ?INC_COL(S), [$e | Acc]); + _ -> + {{float, lists:reverse(Acc)}, S} + end; +tokenize_number(B, esign, S=#decoder{offset=O}, Acc) -> + case B of + <<_:O/binary, C, _/binary>> when C =:= $- orelse C=:= $+ -> + tokenize_number(B, eint, ?INC_COL(S), [C | Acc]); + _ -> + tokenize_number(B, eint, S, Acc) + end; +tokenize_number(B, eint, S=#decoder{offset=O}, Acc) -> + case B of + <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 -> + tokenize_number(B, eint1, ?INC_COL(S), [C | Acc]) + end; +tokenize_number(B, eint1, S=#decoder{offset=O}, Acc) -> + case B of + <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 -> + tokenize_number(B, eint1, ?INC_COL(S), [C | Acc]); + _ -> + {{float, lists:reverse(Acc)}, S} + end. + +tokenize(B, S=#decoder{offset=O}) -> + case B of + <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C) -> + tokenize(B, ?INC_CHAR(S, C)); + <<_:O/binary, "{", _/binary>> -> + {start_object, ?INC_COL(S)}; + <<_:O/binary, "}", _/binary>> -> + {end_object, ?INC_COL(S)}; + <<_:O/binary, "[", _/binary>> -> + {start_array, ?INC_COL(S)}; + <<_:O/binary, "]", _/binary>> -> + {end_array, ?INC_COL(S)}; + <<_:O/binary, ",", _/binary>> -> + {comma, ?INC_COL(S)}; + <<_:O/binary, ":", _/binary>> -> + {colon, ?INC_COL(S)}; + <<_:O/binary, "null", _/binary>> -> + {{const, null}, ?ADV_COL(S, 4)}; + <<_:O/binary, "true", _/binary>> -> + {{const, true}, ?ADV_COL(S, 4)}; + <<_:O/binary, "false", _/binary>> -> + {{const, false}, ?ADV_COL(S, 5)}; + <<_:O/binary, "\"", _/binary>> -> + tokenize_string(B, ?INC_COL(S)); + <<_:O/binary, C, _/binary>> when (C >= $0 andalso C =< $9) + orelse C =:= $- -> + tokenize_number(B, S); + <<_:O/binary>> -> + trim = S#decoder.state, + {eof, S} + end. +%% +%% Tests +%% +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + + +%% testing constructs borrowed from the Yaws JSON implementation. + +%% Create an object from a list of Key/Value pairs. + +obj_new() -> + {struct, []}. + +is_obj({struct, Props}) -> + F = fun ({K, _}) when is_binary(K) -> true end, + lists:all(F, Props). + +obj_from_list(Props) -> + Obj = {struct, Props}, + ?assert(is_obj(Obj)), + Obj. + +%% Test for equivalence of Erlang terms. +%% Due to arbitrary order of construction, equivalent objects might +%% compare unequal as erlang terms, so we need to carefully recurse +%% through aggregates (tuples and objects). + +equiv({struct, Props1}, {struct, Props2}) -> + equiv_object(Props1, Props2); +equiv(L1, L2) when is_list(L1), is_list(L2) -> + equiv_list(L1, L2); +equiv(N1, N2) when is_number(N1), is_number(N2) -> N1 == N2; +equiv(B1, B2) when is_binary(B1), is_binary(B2) -> B1 == B2; +equiv(A, A) when A =:= true orelse A =:= false orelse A =:= null -> true. + +%% Object representation and traversal order is unknown. +%% Use the sledgehammer and sort property lists. + +equiv_object(Props1, Props2) -> + L1 = lists:keysort(1, Props1), + L2 = lists:keysort(1, Props2), + Pairs = lists:zip(L1, L2), + true = lists:all(fun({{K1, V1}, {K2, V2}}) -> + equiv(K1, K2) and equiv(V1, V2) + end, Pairs). + +%% Recursively compare tuple elements for equivalence. + +equiv_list([], []) -> + true; +equiv_list([V1 | L1], [V2 | L2]) -> + equiv(V1, V2) andalso equiv_list(L1, L2). + +decode_test() -> + [1199344435545.0, 1] = decode(<<"[1199344435545.0,1]">>), + <<16#F0,16#9D,16#9C,16#95>> = decode([34,"\\ud835","\\udf15",34]). + +e2j_vec_test() -> + test_one(e2j_test_vec(utf8), 1). + +test_one([], _N) -> + %% io:format("~p tests passed~n", [N-1]), + ok; +test_one([{E, J} | Rest], N) -> + %% io:format("[~p] ~p ~p~n", [N, E, J]), + true = equiv(E, decode(J)), + true = equiv(E, decode(encode(E))), + test_one(Rest, 1+N). + +e2j_test_vec(utf8) -> + [ + {1, "1"}, + {3.1416, "3.14160"}, %% text representation may truncate, trail zeroes + {-1, "-1"}, + {-3.1416, "-3.14160"}, + {12.0e10, "1.20000e+11"}, + {1.234E+10, "1.23400e+10"}, + {-1.234E-10, "-1.23400e-10"}, + {10.0, "1.0e+01"}, + {123.456, "1.23456E+2"}, + {10.0, "1e1"}, + {<<"foo">>, "\"foo\""}, + {<<"foo", 5, "bar">>, "\"foo\\u0005bar\""}, + {<<"">>, "\"\""}, + {<<"\n\n\n">>, "\"\\n\\n\\n\""}, + {<<"\" \b\f\r\n\t\"">>, "\"\\\" \\b\\f\\r\\n\\t\\\"\""}, + {obj_new(), "{}"}, + {obj_from_list([{<<"foo">>, <<"bar">>}]), "{\"foo\":\"bar\"}"}, + {obj_from_list([{<<"foo">>, <<"bar">>}, {<<"baz">>, 123}]), + "{\"foo\":\"bar\",\"baz\":123}"}, + {[], "[]"}, + {[[]], "[[]]"}, + {[1, <<"foo">>], "[1,\"foo\"]"}, + + %% json array in a json object + {obj_from_list([{<<"foo">>, [123]}]), + "{\"foo\":[123]}"}, + + %% json object in a json object + {obj_from_list([{<<"foo">>, obj_from_list([{<<"bar">>, true}])}]), + "{\"foo\":{\"bar\":true}}"}, + + %% fold evaluation order + {obj_from_list([{<<"foo">>, []}, + {<<"bar">>, obj_from_list([{<<"baz">>, true}])}, + {<<"alice">>, <<"bob">>}]), + "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}"}, + + %% json object in a json array + {[-123, <<"foo">>, obj_from_list([{<<"bar">>, []}]), null], + "[-123,\"foo\",{\"bar\":[]},null]"} + ]. + +%% test utf8 encoding +encoder_utf8_test() -> + %% safe conversion case (default) + [34,"\\u0001","\\u0442","\\u0435","\\u0441","\\u0442",34] = + encode(<<1,"\321\202\320\265\321\201\321\202">>), + + %% raw utf8 output (optional) + Enc = mochijson2:encoder([{utf8, true}]), + [34,"\\u0001",[209,130],[208,181],[209,129],[209,130],34] = + Enc(<<1,"\321\202\320\265\321\201\321\202">>). + +input_validation_test() -> + Good = [ + {16#00A3, <>}, %% pound + {16#20AC, <>}, %% euro + {16#10196, <>} %% denarius + ], + lists:foreach(fun({CodePoint, UTF8}) -> + Expect = list_to_binary(xmerl_ucs:to_utf8(CodePoint)), + Expect = decode(UTF8) + end, Good), + + Bad = [ + %% 2nd, 3rd, or 4th byte of a multi-byte sequence w/o leading byte + <>, + %% missing continuations, last byte in each should be 80-BF + <>, + <>, + <>, + %% we don't support code points > 10FFFF per RFC 3629 + <>, + %% escape characters trigger a different code path + <> + ], + lists:foreach( + fun(X) -> + ok = try decode(X) catch invalid_utf8 -> ok end, + %% could be {ucs,{bad_utf8_character_code}} or + %% {json_encode,{bad_char,_}} + {'EXIT', _} = (catch encode(X)) + end, Bad). + +inline_json_test() -> + ?assertEqual(<<"\"iodata iodata\"">>, + iolist_to_binary( + encode({json, [<<"\"iodata">>, " iodata\""]}))), + ?assertEqual({struct, [{<<"key">>, <<"iodata iodata">>}]}, + decode( + encode({struct, + [{key, {json, [<<"\"iodata">>, " iodata\""]}}]}))), + ok. + +big_unicode_test() -> + UTF8Seq = list_to_binary(xmerl_ucs:to_utf8(16#0001d120)), + ?assertEqual( + <<"\"\\ud834\\udd20\"">>, + iolist_to_binary(encode(UTF8Seq))), + ?assertEqual( + UTF8Seq, + decode(iolist_to_binary(encode(UTF8Seq)))), + ok. + +custom_decoder_test() -> + ?assertEqual( + {struct, [{<<"key">>, <<"value">>}]}, + (decoder([]))("{\"key\": \"value\"}")), + F = fun ({struct, [{<<"key">>, <<"value">>}]}) -> win end, + ?assertEqual( + win, + (decoder([{object_hook, F}]))("{\"key\": \"value\"}")), + ok. + +atom_test() -> + %% JSON native atoms + [begin + ?assertEqual(A, decode(atom_to_list(A))), + ?assertEqual(iolist_to_binary(atom_to_list(A)), + iolist_to_binary(encode(A))) + end || A <- [true, false, null]], + %% Atom to string + ?assertEqual( + <<"\"foo\"">>, + iolist_to_binary(encode(foo))), + ?assertEqual( + <<"\"\\ud834\\udd20\"">>, + iolist_to_binary(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))), + ok. + +key_encode_test() -> + %% Some forms are accepted as keys that would not be strings in other + %% cases + ?assertEqual( + <<"{\"foo\":1}">>, + iolist_to_binary(encode({struct, [{foo, 1}]}))), + ?assertEqual( + <<"{\"foo\":1}">>, + iolist_to_binary(encode({struct, [{<<"foo">>, 1}]}))), + ?assertEqual( + <<"{\"foo\":1}">>, + iolist_to_binary(encode({struct, [{"foo", 1}]}))), + ?assertEqual( + <<"{\"foo\":1}">>, + iolist_to_binary(encode([{foo, 1}]))), + ?assertEqual( + <<"{\"foo\":1}">>, + iolist_to_binary(encode([{<<"foo">>, 1}]))), + ?assertEqual( + <<"{\"foo\":1}">>, + iolist_to_binary(encode([{"foo", 1}]))), + ?assertEqual( + <<"{\"\\ud834\\udd20\":1}">>, + iolist_to_binary( + encode({struct, [{[16#0001d120], 1}]}))), + ?assertEqual( + <<"{\"1\":1}">>, + iolist_to_binary(encode({struct, [{1, 1}]}))), + ok. + +unsafe_chars_test() -> + Chars = "\"\\\b\f\n\r\t", + [begin + ?assertEqual(false, json_string_is_safe([C])), + ?assertEqual(false, json_bin_is_safe(<>)), + ?assertEqual(<>, decode(encode(<>))) + end || C <- Chars], + ?assertEqual( + false, + json_string_is_safe([16#0001d120])), + ?assertEqual( + false, + json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8(16#0001d120)))), + ?assertEqual( + [16#0001d120], + xmerl_ucs:from_utf8( + binary_to_list( + decode(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))))), + ?assertEqual( + false, + json_string_is_safe([16#110000])), + ?assertEqual( + false, + json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8([16#110000])))), + %% solidus can be escaped but isn't unsafe by default + ?assertEqual( + <<"/">>, + decode(<<"\"\\/\"">>)), + ok. + +int_test() -> + ?assertEqual(0, decode("0")), + ?assertEqual(1, decode("1")), + ?assertEqual(11, decode("11")), + ok. + +large_int_test() -> + ?assertEqual(<<"-2147483649214748364921474836492147483649">>, + iolist_to_binary(encode(-2147483649214748364921474836492147483649))), + ?assertEqual(<<"2147483649214748364921474836492147483649">>, + iolist_to_binary(encode(2147483649214748364921474836492147483649))), + ok. + +float_test() -> + ?assertEqual(<<"-2147483649.0">>, iolist_to_binary(encode(-2147483649.0))), + ?assertEqual(<<"2147483648.0">>, iolist_to_binary(encode(2147483648.0))), + ok. + +handler_test() -> + ?assertEqual( + {'EXIT',{json_encode,{bad_term,{x,y}}}}, + catch encode({x,y})), + F = fun ({x,y}) -> [] end, + ?assertEqual( + <<"[]">>, + iolist_to_binary((encoder([{handler, F}]))({x, y}))), + ok. + +encode_empty_test_() -> + [{A, ?_assertEqual(<<"{}">>, iolist_to_binary(encode(B)))} + || {A, B} <- [{"eep18 {}", {}}, + {"eep18 {[]}", {[]}}, + {"{struct, []}", {struct, []}}]]. + +encode_test_() -> + P = [{<<"k">>, <<"v">>}], + JSON = iolist_to_binary(encode({struct, P})), + [{atom_to_list(F), + ?_assertEqual(JSON, iolist_to_binary(encode(decode(JSON, [{format, F}]))))} + || F <- [struct, eep18, proplist]]. + +format_test_() -> + P = [{<<"k">>, <<"v">>}], + JSON = iolist_to_binary(encode({struct, P})), + [{atom_to_list(F), + ?_assertEqual(A, decode(JSON, [{format, F}]))} + || {F, A} <- [{struct, {struct, P}}, + {eep18, {P}}, + {proplist, P}]]. + +-endif. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochilists.erl b/rabbitmq-server/deps/mochiweb/src/mochilists.erl similarity index 72% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochilists.erl rename to rabbitmq-server/deps/mochiweb/src/mochilists.erl index d93b241..24fa2f3 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochilists.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochilists.erl @@ -1,5 +1,23 @@ %% @copyright Copyright (c) 2010 Mochi Media, Inc. %% @author David Reid +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc Utility functions for dealing with proplists. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochilogfile2.erl b/rabbitmq-server/deps/mochiweb/src/mochilogfile2.erl similarity index 77% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochilogfile2.erl rename to rabbitmq-server/deps/mochiweb/src/mochilogfile2.erl index b4a7e3c..6ff8fec 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochilogfile2.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochilogfile2.erl @@ -1,5 +1,23 @@ %% @author Bob Ippolito %% @copyright 2010 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc Write newline delimited log files, ensuring that if a truncated %% entry is found on log open then it is fixed before writing. Uses diff --git a/rabbitmq-server/deps/mochiweb/src/mochinum.erl b/rabbitmq-server/deps/mochiweb/src/mochinum.erl new file mode 100644 index 0000000..d687370 --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/src/mochinum.erl @@ -0,0 +1,372 @@ +%% @copyright 2007 Mochi Media, Inc. +%% @author Bob Ippolito +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. + +%% @doc Useful numeric algorithms for floats that cover some deficiencies +%% in the math module. More interesting is digits/1, which implements +%% the algorithm from: +%% http://www.cs.indiana.edu/~burger/fp/index.html +%% See also "Printing Floating-Point Numbers Quickly and Accurately" +%% in Proceedings of the SIGPLAN '96 Conference on Programming Language +%% Design and Implementation. + +-module(mochinum). +-author("Bob Ippolito "). +-export([digits/1, frexp/1, int_pow/2, int_ceil/1]). + +%% IEEE 754 Float exponent bias +-define(FLOAT_BIAS, 1022). +-define(MIN_EXP, -1074). +-define(BIG_POW, 4503599627370496). + +%% External API + +%% @spec digits(number()) -> string() +%% @doc Returns a string that accurately represents the given integer or float +%% using a conservative amount of digits. Great for generating +%% human-readable output, or compact ASCII serializations for floats. +digits(N) when is_integer(N) -> + integer_to_list(N); +digits(0.0) -> + "0.0"; +digits(Float) -> + {Frac1, Exp1} = frexp_int(Float), + [Place0 | Digits0] = digits1(Float, Exp1, Frac1), + {Place, Digits} = transform_digits(Place0, Digits0), + R = insert_decimal(Place, Digits), + case Float < 0 of + true -> + [$- | R]; + _ -> + R + end. + +%% @spec frexp(F::float()) -> {Frac::float(), Exp::float()} +%% @doc Return the fractional and exponent part of an IEEE 754 double, +%% equivalent to the libc function of the same name. +%% F = Frac * pow(2, Exp). +frexp(F) -> + frexp1(unpack(F)). + +%% @spec int_pow(X::integer(), N::integer()) -> Y::integer() +%% @doc Moderately efficient way to exponentiate integers. +%% int_pow(10, 2) = 100. +int_pow(_X, 0) -> + 1; +int_pow(X, N) when N > 0 -> + int_pow(X, N, 1). + +%% @spec int_ceil(F::float()) -> integer() +%% @doc Return the ceiling of F as an integer. The ceiling is defined as +%% F when F == trunc(F); +%% trunc(F) when F < 0; +%% trunc(F) + 1 when F > 0. +int_ceil(X) -> + T = trunc(X), + case (X - T) of + Pos when Pos > 0 -> T + 1; + _ -> T + end. + + +%% Internal API + +int_pow(X, N, R) when N < 2 -> + R * X; +int_pow(X, N, R) -> + int_pow(X * X, N bsr 1, case N band 1 of 1 -> R * X; 0 -> R end). + +insert_decimal(0, S) -> + "0." ++ S; +insert_decimal(Place, S) when Place > 0 -> + L = length(S), + case Place - L of + 0 -> + S ++ ".0"; + N when N < 0 -> + {S0, S1} = lists:split(L + N, S), + S0 ++ "." ++ S1; + N when N < 6 -> + %% More places than digits + S ++ lists:duplicate(N, $0) ++ ".0"; + _ -> + insert_decimal_exp(Place, S) + end; +insert_decimal(Place, S) when Place > -6 -> + "0." ++ lists:duplicate(abs(Place), $0) ++ S; +insert_decimal(Place, S) -> + insert_decimal_exp(Place, S). + +insert_decimal_exp(Place, S) -> + [C | S0] = S, + S1 = case S0 of + [] -> + "0"; + _ -> + S0 + end, + Exp = case Place < 0 of + true -> + "e-"; + false -> + "e+" + end, + [C] ++ "." ++ S1 ++ Exp ++ integer_to_list(abs(Place - 1)). + + +digits1(Float, Exp, Frac) -> + Round = ((Frac band 1) =:= 0), + case Exp >= 0 of + true -> + BExp = 1 bsl Exp, + case (Frac =/= ?BIG_POW) of + true -> + scale((Frac * BExp * 2), 2, BExp, BExp, + Round, Round, Float); + false -> + scale((Frac * BExp * 4), 4, (BExp * 2), BExp, + Round, Round, Float) + end; + false -> + case (Exp =:= ?MIN_EXP) orelse (Frac =/= ?BIG_POW) of + true -> + scale((Frac * 2), 1 bsl (1 - Exp), 1, 1, + Round, Round, Float); + false -> + scale((Frac * 4), 1 bsl (2 - Exp), 2, 1, + Round, Round, Float) + end + end. + +scale(R, S, MPlus, MMinus, LowOk, HighOk, Float) -> + Est = int_ceil(math:log10(abs(Float)) - 1.0e-10), + %% Note that the scheme implementation uses a 326 element look-up table + %% for int_pow(10, N) where we do not. + case Est >= 0 of + true -> + fixup(R, S * int_pow(10, Est), MPlus, MMinus, Est, + LowOk, HighOk); + false -> + Scale = int_pow(10, -Est), + fixup(R * Scale, S, MPlus * Scale, MMinus * Scale, Est, + LowOk, HighOk) + end. + +fixup(R, S, MPlus, MMinus, K, LowOk, HighOk) -> + TooLow = case HighOk of + true -> + (R + MPlus) >= S; + false -> + (R + MPlus) > S + end, + case TooLow of + true -> + [(K + 1) | generate(R, S, MPlus, MMinus, LowOk, HighOk)]; + false -> + [K | generate(R * 10, S, MPlus * 10, MMinus * 10, LowOk, HighOk)] + end. + +generate(R0, S, MPlus, MMinus, LowOk, HighOk) -> + D = R0 div S, + R = R0 rem S, + TC1 = case LowOk of + true -> + R =< MMinus; + false -> + R < MMinus + end, + TC2 = case HighOk of + true -> + (R + MPlus) >= S; + false -> + (R + MPlus) > S + end, + case TC1 of + false -> + case TC2 of + false -> + [D | generate(R * 10, S, MPlus * 10, MMinus * 10, + LowOk, HighOk)]; + true -> + [D + 1] + end; + true -> + case TC2 of + false -> + [D]; + true -> + case R * 2 < S of + true -> + [D]; + false -> + [D + 1] + end + end + end. + +unpack(Float) -> + <> = <>, + {Sign, Exp, Frac}. + +frexp1({_Sign, 0, 0}) -> + {0.0, 0}; +frexp1({Sign, 0, Frac}) -> + Exp = log2floor(Frac), + <> = <>, + {Frac1, -(?FLOAT_BIAS) - 52 + Exp}; +frexp1({Sign, Exp, Frac}) -> + <> = <>, + {Frac1, Exp - ?FLOAT_BIAS}. + +log2floor(Int) -> + log2floor(Int, 0). + +log2floor(0, N) -> + N; +log2floor(Int, N) -> + log2floor(Int bsr 1, 1 + N). + + +transform_digits(Place, [0 | Rest]) -> + transform_digits(Place, Rest); +transform_digits(Place, Digits) -> + {Place, [$0 + D || D <- Digits]}. + + +frexp_int(F) -> + case unpack(F) of + {_Sign, 0, Frac} -> + {Frac, ?MIN_EXP}; + {_Sign, Exp, Frac} -> + {Frac + (1 bsl 52), Exp - 53 - ?FLOAT_BIAS} + end. + +%% +%% Tests +%% +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + +int_ceil_test() -> + ?assertEqual(1, int_ceil(0.0001)), + ?assertEqual(0, int_ceil(0.0)), + ?assertEqual(1, int_ceil(0.99)), + ?assertEqual(1, int_ceil(1.0)), + ?assertEqual(-1, int_ceil(-1.5)), + ?assertEqual(-2, int_ceil(-2.0)), + ok. + +int_pow_test() -> + ?assertEqual(1, int_pow(1, 1)), + ?assertEqual(1, int_pow(1, 0)), + ?assertEqual(1, int_pow(10, 0)), + ?assertEqual(10, int_pow(10, 1)), + ?assertEqual(100, int_pow(10, 2)), + ?assertEqual(1000, int_pow(10, 3)), + ok. + +digits_test() -> + ?assertEqual("0", + digits(0)), + ?assertEqual("0.0", + digits(0.0)), + ?assertEqual("1.0", + digits(1.0)), + ?assertEqual("-1.0", + digits(-1.0)), + ?assertEqual("0.1", + digits(0.1)), + ?assertEqual("0.01", + digits(0.01)), + ?assertEqual("0.001", + digits(0.001)), + ?assertEqual("1.0e+6", + digits(1000000.0)), + ?assertEqual("0.5", + digits(0.5)), + ?assertEqual("4503599627370496.0", + digits(4503599627370496.0)), + %% small denormalized number + %% 4.94065645841246544177e-324 =:= 5.0e-324 + <> = <<0,0,0,0,0,0,0,1>>, + ?assertEqual("5.0e-324", + digits(SmallDenorm)), + ?assertEqual(SmallDenorm, + list_to_float(digits(SmallDenorm))), + %% large denormalized number + %% 2.22507385850720088902e-308 + <> = <<0,15,255,255,255,255,255,255>>, + ?assertEqual("2.225073858507201e-308", + digits(BigDenorm)), + ?assertEqual(BigDenorm, + list_to_float(digits(BigDenorm))), + %% small normalized number + %% 2.22507385850720138309e-308 + <> = <<0,16,0,0,0,0,0,0>>, + ?assertEqual("2.2250738585072014e-308", + digits(SmallNorm)), + ?assertEqual(SmallNorm, + list_to_float(digits(SmallNorm))), + %% large normalized number + %% 1.79769313486231570815e+308 + <> = <<127,239,255,255,255,255,255,255>>, + ?assertEqual("1.7976931348623157e+308", + digits(LargeNorm)), + ?assertEqual(LargeNorm, + list_to_float(digits(LargeNorm))), + %% issue #10 - mochinum:frexp(math:pow(2, -1074)). + ?assertEqual("5.0e-324", + digits(math:pow(2, -1074))), + ok. + +frexp_test() -> + %% zero + ?assertEqual({0.0, 0}, frexp(0.0)), + %% one + ?assertEqual({0.5, 1}, frexp(1.0)), + %% negative one + ?assertEqual({-0.5, 1}, frexp(-1.0)), + %% small denormalized number + %% 4.94065645841246544177e-324 + <> = <<0,0,0,0,0,0,0,1>>, + ?assertEqual({0.5, -1073}, frexp(SmallDenorm)), + %% large denormalized number + %% 2.22507385850720088902e-308 + <> = <<0,15,255,255,255,255,255,255>>, + ?assertEqual( + {0.99999999999999978, -1022}, + frexp(BigDenorm)), + %% small normalized number + %% 2.22507385850720138309e-308 + <> = <<0,16,0,0,0,0,0,0>>, + ?assertEqual({0.5, -1021}, frexp(SmallNorm)), + %% large normalized number + %% 1.79769313486231570815e+308 + <> = <<127,239,255,255,255,255,255,255>>, + ?assertEqual( + {0.99999999999999989, 1024}, + frexp(LargeNorm)), + %% issue #10 - mochinum:frexp(math:pow(2, -1074)). + ?assertEqual( + {0.5, -1073}, + frexp(math:pow(2, -1074))), + ok. + +-endif. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochitemp.erl b/rabbitmq-server/deps/mochiweb/src/mochitemp.erl similarity index 85% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochitemp.erl rename to rabbitmq-server/deps/mochiweb/src/mochitemp.erl index f64876d..bd3c965 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochitemp.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochitemp.erl @@ -1,7 +1,25 @@ %% @author Bob Ippolito %% @copyright 2010 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. -%% @doc Create temporary files and directories. +%% @doc Create temporary files and directories. Requires crypto to be started. -module(mochitemp). -export([gettempdir/0]). @@ -87,7 +105,7 @@ rngchars(N) -> [rngchar() | rngchars(N - 1)]. rngchar() -> - rngchar(mochiweb_util:rand_uniform(0, tuple_size(?SAFE_CHARS))). + rngchar(crypto:rand_uniform(0, tuple_size(?SAFE_CHARS))). rngchar(C) -> element(1 + C, ?SAFE_CHARS). @@ -177,6 +195,7 @@ gettempdir_cwd_test() -> ok. rngchars_test() -> + crypto:start(), ?assertEqual( "", rngchars(0)), @@ -198,6 +217,7 @@ rngchar_test() -> ok. mkdtemp_n_failonce_test() -> + crypto:start(), D = mkdtemp(), Path = filename:join([D, "testdir"]), %% Toggle the existence of a dir so that it fails @@ -244,6 +264,7 @@ make_dir_fail_test() -> ok. mkdtemp_test() -> + crypto:start(), D = mkdtemp(), ?assertEqual( true, @@ -254,6 +275,7 @@ mkdtemp_test() -> ok. rmtempdir_test() -> + crypto:start(), D1 = mkdtemp(), ?assertEqual( true, diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiutf8.erl b/rabbitmq-server/deps/mochiweb/src/mochiutf8.erl similarity index 84% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiutf8.erl rename to rabbitmq-server/deps/mochiweb/src/mochiutf8.erl index c9d2751..bf0e7cc 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiutf8.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiutf8.erl @@ -1,5 +1,23 @@ %% @copyright 2010 Mochi Media, Inc. %% @author Bob Ippolito +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc Algorithm to convert any binary to a valid UTF-8 sequence by ignoring %% invalid bytes. @@ -11,11 +29,11 @@ %% External API -%% -type unichar_low() :: 0..16#d7ff. -%% -type unichar_high() :: 16#e000..16#10ffff. -%% -type unichar() :: unichar_low() | unichar_high(). +-type unichar_low() :: 0..16#d7ff. +-type unichar_high() :: 16#e000..16#10ffff. +-type unichar() :: unichar_low() | unichar_high(). -%% -spec codepoint_to_bytes(unichar()) -> binary(). +-spec codepoint_to_bytes(unichar()) -> binary(). %% @doc Convert a unicode codepoint to UTF-8 bytes. codepoint_to_bytes(C) when (C >= 16#00 andalso C =< 16#7f) -> %% U+0000 - U+007F - 7 bits @@ -40,12 +58,12 @@ codepoint_to_bytes(C) when (C >= 16#010000 andalso C =< 16#10FFFF) -> 2#10:2, B1:6, 2#10:2, B0:6>>. -%% -spec codepoints_to_bytes([unichar()]) -> binary(). +-spec codepoints_to_bytes([unichar()]) -> binary(). %% @doc Convert a list of codepoints to a UTF-8 binary. codepoints_to_bytes(L) -> <<<<(codepoint_to_bytes(C))/binary>> || C <- L>>. -%% -spec read_codepoint(binary()) -> {unichar(), binary(), binary()}. +-spec read_codepoint(binary()) -> {unichar(), binary(), binary()}. read_codepoint(Bin = <<2#0:1, C:7, Rest/binary>>) -> %% U+0000 - U+007F - 7 bits <> = Bin, @@ -82,32 +100,32 @@ read_codepoint(Bin = <<2#11110:5, B3:3, {C, B, Rest} end. -%% -spec codepoint_foldl(fun((unichar(), _) -> _), _, binary()) -> _. +-spec codepoint_foldl(fun((unichar(), _) -> _), _, binary()) -> _. codepoint_foldl(F, Acc, <<>>) when is_function(F, 2) -> Acc; codepoint_foldl(F, Acc, Bin) -> {C, _, Rest} = read_codepoint(Bin), codepoint_foldl(F, F(C, Acc), Rest). -%% -spec bytes_foldl(fun((binary(), _) -> _), _, binary()) -> _. +-spec bytes_foldl(fun((binary(), _) -> _), _, binary()) -> _. bytes_foldl(F, Acc, <<>>) when is_function(F, 2) -> Acc; bytes_foldl(F, Acc, Bin) -> {_, B, Rest} = read_codepoint(Bin), bytes_foldl(F, F(B, Acc), Rest). -%% -spec bytes_to_codepoints(binary()) -> [unichar()]. +-spec bytes_to_codepoints(binary()) -> [unichar()]. bytes_to_codepoints(B) -> lists:reverse(codepoint_foldl(fun (C, Acc) -> [C | Acc] end, [], B)). -%% -spec len(binary()) -> non_neg_integer(). +-spec len(binary()) -> non_neg_integer(). len(<<>>) -> 0; len(B) -> {_, _, Rest} = read_codepoint(B), 1 + len(Rest). -%% -spec valid_utf8_bytes(B::binary()) -> binary(). +-spec valid_utf8_bytes(B::binary()) -> binary(). %% @doc Return only the bytes in B that represent valid UTF-8. Uses %% the following recursive algorithm: skip one byte if B does not %% follow UTF-8 syntax (a 1-4 byte encoding of some number), @@ -118,7 +136,7 @@ valid_utf8_bytes(B) when is_binary(B) -> %% Internal API -%% -spec binary_skip_bytes(binary(), [non_neg_integer()]) -> binary(). +-spec binary_skip_bytes(binary(), [non_neg_integer()]) -> binary(). %% @doc Return B, but skipping the 0-based indexes in L. binary_skip_bytes(B, []) -> B; @@ -126,7 +144,7 @@ binary_skip_bytes(B, L) -> binary_skip_bytes(B, L, 0, []). %% @private -%% -spec binary_skip_bytes(binary(), [non_neg_integer()], non_neg_integer(), iolist()) -> binary(). +-spec binary_skip_bytes(binary(), [non_neg_integer()], non_neg_integer(), iolist()) -> binary(). binary_skip_bytes(B, [], _N, Acc) -> iolist_to_binary(lists:reverse([B | Acc])); binary_skip_bytes(<<_, RestB/binary>>, [N | RestL], N, Acc) -> @@ -134,13 +152,13 @@ binary_skip_bytes(<<_, RestB/binary>>, [N | RestL], N, Acc) -> binary_skip_bytes(<>, L, N, Acc) -> binary_skip_bytes(RestB, L, 1 + N, [C | Acc]). -%% -spec invalid_utf8_indexes(binary()) -> [non_neg_integer()]. +-spec invalid_utf8_indexes(binary()) -> [non_neg_integer()]. %% @doc Return the 0-based indexes in B that are not valid UTF-8. invalid_utf8_indexes(B) -> invalid_utf8_indexes(B, 0, []). %% @private. -%% -spec invalid_utf8_indexes(binary(), non_neg_integer(), [non_neg_integer()]) -> [non_neg_integer()]. +-spec invalid_utf8_indexes(binary(), non_neg_integer(), [non_neg_integer()]) -> [non_neg_integer()]. invalid_utf8_indexes(<>, N, Acc) when C < 16#80 -> %% U+0000 - U+007F - 7 bits invalid_utf8_indexes(Rest, 1 + N, Acc); diff --git a/rabbitmq-server/deps/mochiweb/src/mochiweb.app.src b/rabbitmq-server/deps/mochiweb/src/mochiweb.app.src new file mode 100644 index 0000000..f20c719 --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb.app.src @@ -0,0 +1,8 @@ +{application,mochiweb, + [{description,"MochiMedia Web Server"}, + {vsn,"2.13.0"}, + {modules,[]}, + {registered,[]}, + {env,[]}, + {applications,[kernel,stdlib,crypto,inets,ssl,xmerl,compiler, + syntax_tools]}]}. diff --git a/rabbitmq-server/deps/mochiweb/src/mochiweb.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb.erl new file mode 100644 index 0000000..14480c2 --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb.erl @@ -0,0 +1,101 @@ +%% @author Bob Ippolito +%% @copyright 2007 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. + +%% @doc Start and stop the MochiWeb server. + +-module(mochiweb). +-author('bob@mochimedia.com'). + +-export([new_request/1, new_response/1]). +-export([all_loaded/0, all_loaded/1, reload/0]). +-export([ensure_started/1]). + +reload() -> + [c:l(Module) || Module <- all_loaded()]. + +all_loaded() -> + all_loaded(filename:dirname(code:which(?MODULE))). + +all_loaded(Base) when is_atom(Base) -> + []; +all_loaded(Base) -> + FullBase = Base ++ "/", + F = fun ({_Module, Loaded}, Acc) when is_atom(Loaded) -> + Acc; + ({Module, Loaded}, Acc) -> + case lists:prefix(FullBase, Loaded) of + true -> + [Module | Acc]; + false -> + Acc + end + end, + lists:foldl(F, [], code:all_loaded()). + +%% See the erlang:decode_packet/3 docs for the full type +-spec uri(HttpUri :: term()) -> string(). +uri({abs_path, Uri}) -> + Uri; +%% TODO: +%% This makes it hard to implement certain kinds of proxies with mochiweb, +%% perhaps a field could be added to the mochiweb_request record to preserve +%% this information in raw_path. +uri({absoluteURI, _Protocol, _Host, _Port, Uri}) -> + Uri; +%% From http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2 +uri('*') -> + "*"; +%% Erlang decode_packet will return this for requests like `CONNECT host:port` +uri({scheme, Hostname, Port}) -> + Hostname ++ ":" ++ Port; +uri(HttpString) when is_list(HttpString) -> + HttpString. + +%% @spec new_request( {Socket, Request, Headers} +%% | {Socket, Opts, Request, Headers} ) -> MochiWebRequest +%% @doc Return a mochiweb_request data structure. +new_request({Socket, {Method, HttpUri, Version}, Headers}) -> + new_request({Socket, [], {Method, HttpUri, Version}, Headers}); + +new_request({Socket, Opts, {Method, HttpUri, Version}, Headers}) -> + mochiweb_request:new(Socket, + Opts, + Method, + uri(HttpUri), + Version, + mochiweb_headers:make(Headers)). + +%% @spec new_response({Request, integer(), Headers}) -> MochiWebResponse +%% @doc Return a mochiweb_response data structure. +new_response({Request, Code, Headers}) -> + mochiweb_response:new(Request, + Code, + mochiweb_headers:make(Headers)). + +%% @spec ensure_started(App::atom()) -> ok +%% @doc Start the given App if it has not been started already. +ensure_started(App) -> + case application:start(App) of + ok -> + ok; + {error, {already_started, App}} -> + ok + end. diff --git a/rabbitmq-server/deps/mochiweb/src/mochiweb_acceptor.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_acceptor.erl new file mode 100644 index 0000000..44ce91f --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_acceptor.erl @@ -0,0 +1,83 @@ +%% @author Bob Ippolito +%% @copyright 2010 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. + +%% @doc MochiWeb acceptor. + +-module(mochiweb_acceptor). +-author('bob@mochimedia.com'). + +-include("internal.hrl"). + +-export([start_link/3, start_link/4, init/4]). + +-define(EMFILE_SLEEP_MSEC, 100). + +start_link(Server, Listen, Loop) -> + start_link(Server, Listen, Loop, []). + +start_link(Server, Listen, Loop, Opts) -> + proc_lib:spawn_link(?MODULE, init, [Server, Listen, Loop, Opts]). + +do_accept(Server, Listen) -> + T1 = os:timestamp(), + case mochiweb_socket:transport_accept(Listen) of + {ok, Socket} -> + gen_server:cast(Server, {accepted, self(), timer:now_diff(os:timestamp(), T1)}), + mochiweb_socket:finish_accept(Socket); + Other -> + Other + end. + +init(Server, Listen, Loop, Opts) -> + case catch do_accept(Server, Listen) of + {ok, Socket} -> + call_loop(Loop, Socket, Opts); + {error, Err} when Err =:= closed orelse + Err =:= esslaccept orelse + Err =:= timeout -> + exit(normal); + Other -> + %% Mitigate out of file descriptor scenario by sleeping for a + %% short time to slow error rate + case Other of + {error, emfile} -> + receive + after ?EMFILE_SLEEP_MSEC -> + ok + end; + _ -> + ok + end, + error_logger:error_report( + [{application, mochiweb}, + "Accept failed error", + lists:flatten(io_lib:format("~p", [Other]))]), + exit({error, accept_failed}) + end. + +call_loop({M, F}, Socket, Opts) -> + M:F(Socket, Opts); +call_loop({M, F, [A1]}, Socket, Opts) -> + M:F(Socket, Opts, A1); +call_loop({M, F, A}, Socket, Opts) -> + erlang:apply(M, F, [Socket, Opts | A]); +call_loop(Loop, Socket, Opts) -> + Loop(Socket, Opts). diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_base64url.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_base64url.erl similarity index 71% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_base64url.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_base64url.erl index ab5aaec..e6a8e13 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_base64url.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_base64url.erl @@ -1,5 +1,27 @@ +%% @author Bob Ippolito +%% @copyright 2013 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. + -module(mochiweb_base64url). -export([encode/1, decode/1]). + %% @doc URL and filename safe base64 variant with no padding, %% also known as "base64url" per RFC 4648. %% @@ -8,13 +30,13 @@ %% '_' is used in place of '/' (63), %% padding is implicit rather than explicit ('='). --spec encode(iolist()) -> binary(). +-spec encode(iolist() | binary()) -> binary(). encode(B) when is_binary(B) -> encode_binary(B); encode(L) when is_list(L) -> encode_binary(iolist_to_binary(L)). --spec decode(iolist()) -> binary(). +-spec decode(iolist() | binary()) -> binary(). decode(B) when is_binary(B) -> decode_binary(B); decode(L) when is_list(L) -> diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_charref.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_charref.erl similarity index 98% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_charref.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_charref.erl index 665d0f9..143452e 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_charref.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_charref.erl @@ -1,5 +1,23 @@ %% @author Bob Ippolito %% @copyright 2007 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc Converts HTML 5 charrefs and entities to codepoints (or lists of code points). -module(mochiweb_charref). @@ -11,7 +29,7 @@ %% codepoint, or return undefined on failure. %% The input should not include an ampersand or semicolon. %% charref("#38") = 38, charref("#x26") = 38, charref("amp") = 38. -%% -spec charref(binary() | string()) -> integer() | [integer()] | undefined. +-spec charref(binary() | string()) -> integer() | [integer()] | undefined. charref(B) when is_binary(B) -> charref(binary_to_list(B)); charref([$#, C | L]) when C =:= $x orelse C =:= $X -> diff --git a/rabbitmq-server/deps/mochiweb/src/mochiweb_clock.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_clock.erl new file mode 100644 index 0000000..4f101c5 --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_clock.erl @@ -0,0 +1,101 @@ +%% Copyright (c) 2011-2014, Loïc Hoguin +%% Copyright (c) 2015, Robert Kowalski +%% +%% Permission to use, copy, modify, and/or distribute this software for any +%% purpose with or without fee is hereby granted, provided that the above +%% copyright notice and this permission notice appear in all copies. +%% +%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +%% While a gen_server process runs in the background to update +%% the cache of formatted dates every second, all API calls are +%% local and directly read from the ETS cache table, providing +%% fast time and date computations. + +-module(mochiweb_clock). + +-behaviour(gen_server). + +%% API. +-export([start_link/0]). +-export([start/0]). +-export([stop/0]). +-export([rfc1123/0]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, {}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). + +-spec start() -> {ok, pid()}. +start() -> + gen_server:start({local, ?MODULE}, ?MODULE, [], []). + +-spec stop() -> stopped. +stop() -> + gen_server:call(?MODULE, stop). + +-spec rfc1123() -> string(). +rfc1123() -> + case ets:lookup(?MODULE, rfc1123) of + [{rfc1123, Date}] -> + Date; + [] -> + "" + end. + +%% gen_server. + +-spec init([]) -> {ok, #state{}}. +init([]) -> + ?MODULE = ets:new(?MODULE, [named_table, protected, {read_concurrency, true}]), + handle_info(update_date, #state{}), + timer:send_interval(1000, update_date), + {ok, #state{}}. + +-type from() :: {pid(), term()}. +-spec handle_call + (stop, from(), State) -> {stop, normal, stopped, State} + when State::#state{}. +handle_call(stop, _From, State) -> + {stop, normal, stopped, State}; +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +-spec handle_cast(_, State) -> {noreply, State} when State::#state{}. +handle_cast(_Msg, State) -> + {noreply, State}. + +-spec handle_info(any(), State) -> {noreply, State} when State::#state{}. +handle_info(update_date, State) -> + Date = httpd_util:rfc1123_date(), + ets:insert(?MODULE, {rfc1123, Date}), + {noreply, State}; +handle_info(_Info, State) -> + {noreply, State}. + +-spec terminate(_, _) -> ok. +terminate(_Reason, _State) -> + ok. + +-spec code_change(_, State, _) -> {ok, State} when State::#state{}. +code_change(_OldVsn, State, _Extra) -> + {ok, State}. + diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_cookies.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_cookies.erl similarity index 90% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_cookies.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_cookies.erl index 1cc4e91..9539041 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_cookies.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_cookies.erl @@ -1,5 +1,23 @@ %% @author Emad El-Haraty %% @copyright 2007 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc HTTP Cookie parsing and generating (RFC 2109, RFC 2965). diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_cover.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_cover.erl similarity index 64% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_cover.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_cover.erl index aa075d5..ebc2c18 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_cover.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_cover.erl @@ -1,5 +1,23 @@ %% @author Bob Ippolito %% @copyright 2010 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc Workarounds for various cover deficiencies. -module(mochiweb_cover). diff --git a/rabbitmq-server/deps/mochiweb/src/mochiweb_echo.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_echo.erl new file mode 100644 index 0000000..b14505c --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_echo.erl @@ -0,0 +1,59 @@ +%% @author Bob Ippolito +%% @copyright 2007 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. + +%% @doc Simple and stupid echo server to demo mochiweb_socket_server. + +-module(mochiweb_echo). +-author('bob@mochimedia.com'). +-export([start/0, stop/0, loop/1]). + +stop() -> + mochiweb_socket_server:stop(?MODULE). + +start() -> + mochiweb_socket_server:start([{link, false} | options()]). + +options() -> + [{name, ?MODULE}, + {port, 6789}, + {ip, "127.0.0.1"}, + {max, 1}, + {loop, {?MODULE, loop}}]. + +loop(Socket) -> + case mochiweb_socket:recv(Socket, 0, 30000) of + {ok, Data} -> + case mochiweb_socket:send(Socket, Data) of + ok -> + loop(Socket); + _ -> + exit(normal) + end; + _Other -> + exit(normal) + end. + +%% +%% Tests +%% +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). +-endif. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_headers.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_headers.erl similarity index 93% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_headers.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_headers.erl index b49cf9e..457758f 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_headers.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_headers.erl @@ -1,5 +1,23 @@ %% @author Bob Ippolito %% @copyright 2007 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc Case preserving (but case insensitive) HTTP Header dictionary. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_html.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_html.erl similarity index 91% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_html.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_html.erl index 3732924..3c5c4f9 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_html.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_html.erl @@ -1,5 +1,23 @@ %% @author Bob Ippolito %% @copyright 2007 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc Loosely tokenizes and generates parse trees for HTML 4. -module(mochiweb_html). @@ -621,13 +639,42 @@ find_gt(Bin, S=#decoder{offset=O}, HasSlash) -> tokenize_charref(Bin, S=#decoder{offset=O}) -> try - tokenize_charref(Bin, S, O) + case tokenize_charref_raw(Bin, S, O) of + {C1, S1=#decoder{offset=O1}} when C1 >= 16#D800 andalso C1 =< 16#DFFF -> + %% Surrogate pair + tokeninize_charref_surrogate_pair(Bin, S1, C1); + {Unichar, S1} when is_integer(Unichar) -> + {{data, mochiutf8:codepoint_to_bytes(Unichar), false}, + S1}; + {Unichars, S1} when is_list(Unichars) -> + {{data, unicode:characters_to_binary(Unichars), false}, + S1} + end catch throw:invalid_charref -> {{data, <<"&">>, false}, S} end. -tokenize_charref(Bin, S=#decoder{offset=O}, Start) -> +tokeninize_charref_surrogate_pair(Bin, S=#decoder{offset=O}, C1) -> + case Bin of + <<_:O/binary, $&, _/binary>> -> + case tokenize_charref_raw(Bin, ?INC_COL(S), O + 1) of + {C2, S1} when C2 >= 16#D800 andalso C1 =< 16#DFFF -> + {{data, + unicode:characters_to_binary( + <>, + utf16, + utf8), + false}, + S1}; + _ -> + throw(invalid_charref) + end; + _ -> + throw(invalid_charref) + end. + +tokenize_charref_raw(Bin, S=#decoder{offset=O}, Start) -> case Bin of <<_:O/binary>> -> throw(invalid_charref); @@ -640,17 +687,9 @@ tokenize_charref(Bin, S=#decoder{offset=O}, Start) -> <<_:O/binary, $;, _/binary>> -> Len = O - Start, <<_:Start/binary, Raw:Len/binary, _/binary>> = Bin, - Data = case mochiweb_charref:charref(Raw) of - undefined -> - throw(invalid_charref); - Unichar when is_integer(Unichar) -> - mochiutf8:codepoint_to_bytes(Unichar); - Unichars when is_list(Unichars) -> - unicode:characters_to_binary(Unichars) - end, - {{data, Data, false}, ?INC_COL(S)}; + {mochiweb_charref:charref(Raw), ?INC_COL(S)}; _ -> - tokenize_charref(Bin, ?INC_COL(S), Start) + tokenize_charref_raw(Bin, ?INC_COL(S), Start) end. tokenize_doctype(Bin, S) -> diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_http.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_http.erl similarity index 61% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_http.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_http.erl index ae6410f..568019f 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_http.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_http.erl @@ -1,12 +1,30 @@ %% @author Bob Ippolito %% @copyright 2007 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc HTTP server. -module(mochiweb_http). -author('bob@mochimedia.com'). -export([start/1, start_link/1, stop/0, stop/1]). --export([loop/2]). +-export([loop/3]). -export([after_response/2, reentry/1]). -export([parse_range_request/1, range_skip_length/2]). @@ -17,6 +35,12 @@ -define(DEFAULTS, [{name, ?MODULE}, {port, 8888}]). +-ifdef(gen_tcp_r15b_workaround). +r15b_workaround() -> true. +-else. +r15b_workaround() -> false. +-endif. + parse_options(Options) -> {loop, HttpLoop} = proplists:lookup(loop, Options), Loop = {?MODULE, loop, [HttpLoop]}, @@ -34,51 +58,48 @@ stop(Name) -> %% Option = {name, atom()} | {ip, string() | tuple()} | {backlog, integer()} %% | {nodelay, boolean()} | {acceptor_pool_size, integer()} %% | {ssl, boolean()} | {profile_fun, undefined | (Props) -> ok} -%% | {link, false} +%% | {link, false} | {recbuf, undefined | non_negative_integer()} %% @doc Start a mochiweb server. %% profile_fun is used to profile accept timing. %% After each accept, if defined, profile_fun is called with a proplist of a subset of the mochiweb_socket_server state and timing information. %% The proplist is as follows: [{name, Name}, {port, Port}, {active_sockets, ActiveSockets}, {timing, Timing}]. %% @end start(Options) -> + ok = ensure_started(mochiweb_clock), mochiweb_socket_server:start(parse_options(Options)). start_link(Options) -> + ok = ensure_started(mochiweb_clock), mochiweb_socket_server:start_link(parse_options(Options)). -loop(Socket, Body) -> - ok = mochiweb_socket:setopts(Socket, [{packet, http}]), - request(Socket, Body). +ensure_started(M) -> + case M:start() of + {ok, _Pid} -> + ok; + {error, {already_started, _Pid}} -> + ok + end. --ifdef(gen_tcp_r15b_workaround). --define(R15B_GEN_TCP_FIX, {tcp_error,_,emsgsize} -> - % R15B02 returns this then closes the socket, so close and exit - mochiweb_socket:close(Socket), - exit(normal); - ). --else. --define(R15B_GEN_TCP_FIX,). --endif. +loop(Socket, Opts, Body) -> + ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, http}])), + request(Socket, Opts, Body). -request(Socket, Body) -> - ok = mochiweb_socket:setopts(Socket, [{active, once}]), +request(Socket, Opts, Body) -> + ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{active, once}])), receive {Protocol, _, {http_request, Method, Path, Version}} when Protocol == http orelse Protocol == ssl -> - ok = mochiweb_socket:setopts(Socket, [{packet, httph}]), - headers(Socket, {Method, Path, Version}, [], Body, 0); + ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, httph}])), + headers(Socket, Opts, {Method, Path, Version}, [], Body, 0); {Protocol, _, {http_error, "\r\n"}} when Protocol == http orelse Protocol == ssl -> - request(Socket, Body); + request(Socket, Opts, Body); {Protocol, _, {http_error, "\n"}} when Protocol == http orelse Protocol == ssl -> - request(Socket, Body); + request(Socket, Opts, Body); {tcp_closed, _} -> mochiweb_socket:close(Socket), exit(normal); {ssl_closed, _} -> mochiweb_socket:close(Socket), - exit(normal); - ?R15B_GEN_TCP_FIX - _Other -> - handle_invalid_request(Socket) + exit(normal) after ?REQUEST_RECV_TIMEOUT -> mochiweb_socket:close(Socket), exit(normal) @@ -89,26 +110,25 @@ reentry(Body) -> ?MODULE:after_response(Body, Req) end. -headers(Socket, Request, Headers, _Body, ?MAX_HEADERS) -> +headers(Socket, Opts, Request, Headers, _Body, ?MAX_HEADERS) -> %% Too many headers sent, bad request. - ok = mochiweb_socket:setopts(Socket, [{packet, raw}]), - handle_invalid_request(Socket, Request, Headers); -headers(Socket, Request, Headers, Body, HeaderCount) -> - ok = mochiweb_socket:setopts(Socket, [{active, once}]), + ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, raw}])), + handle_invalid_request(Socket, Opts, Request, Headers); +headers(Socket, Opts, Request, Headers, Body, HeaderCount) -> + ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{active, once}])), receive {Protocol, _, http_eoh} when Protocol == http orelse Protocol == ssl -> - Req = new_request(Socket, Request, Headers), + Req = new_request(Socket, Opts, Request, Headers), call_body(Body, Req), ?MODULE:after_response(Body, Req); {Protocol, _, {http_header, _, Name, _, Value}} when Protocol == http orelse Protocol == ssl -> - headers(Socket, Request, [{Name, Value} | Headers], Body, + headers(Socket, Opts, Request, [{Name, Value} | Headers], Body, 1 + HeaderCount); {tcp_closed, _} -> mochiweb_socket:close(Socket), exit(normal); - ?R15B_GEN_TCP_FIX - _Other -> - handle_invalid_request(Socket, Request, Headers) + {tcp_error, _, emsgsize} = Other -> + handle_invalid_msg_request(Other, Socket, Opts, Request, Headers) after ?HEADERS_RECV_TIMEOUT -> mochiweb_socket:close(Socket), exit(normal) @@ -121,21 +141,27 @@ call_body({M, F}, Req) -> call_body(Body, Req) -> Body(Req). -%% -spec handle_invalid_request(term()) -> no_return(). -handle_invalid_request(Socket) -> - handle_invalid_request(Socket, {'GET', {abs_path, "/"}, {0,9}}, []), - exit(normal). +-spec handle_invalid_msg_request(term(), term(), term(), term(), term()) -> no_return(). +handle_invalid_msg_request(Msg, Socket, Opts, Request, RevHeaders) -> + case {Msg, r15b_workaround()} of + {{tcp_error,_,emsgsize}, true} -> + %% R15B02 returns this then closes the socket, so close and exit + mochiweb_socket:close(Socket), + exit(normal); + _ -> + handle_invalid_request(Socket, Opts, Request, RevHeaders) + end. -%% -spec handle_invalid_request(term(), term(), term()) -> no_return(). -handle_invalid_request(Socket, Request, RevHeaders) -> - Req = new_request(Socket, Request, RevHeaders), +-spec handle_invalid_request(term(), term(), term(), term()) -> no_return(). +handle_invalid_request(Socket, Opts, Request, RevHeaders) -> + Req = new_request(Socket, Opts, Request, RevHeaders), Req:respond({400, [], []}), mochiweb_socket:close(Socket), exit(normal). -new_request(Socket, Request, RevHeaders) -> - ok = mochiweb_socket:setopts(Socket, [{packet, raw}]), - mochiweb:new_request({Socket, Request, lists:reverse(RevHeaders)}). +new_request(Socket, Opts, Request, RevHeaders) -> + ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, raw}])), + mochiweb:new_request({Socket, Opts, Request, lists:reverse(RevHeaders)}). after_response(Body, Req) -> Socket = Req:get(socket), @@ -146,15 +172,14 @@ after_response(Body, Req) -> false -> Req:cleanup(), erlang:garbage_collect(), - ?MODULE:loop(Socket, Body) + ?MODULE:loop(Socket, mochiweb_request:get(opts, Req), Body) end. -parse_range_request("bytes=0-") -> - undefined; parse_range_request(RawRange) when is_list(RawRange) -> try "bytes=" ++ RangeString = RawRange, - Ranges = string:tokens(RangeString, ","), + RangeTokens = [string:strip(R) || R <- string:tokens(RangeString, ",")], + Ranges = [R || R <- RangeTokens, string:len(R) > 0], lists:map(fun ("-" ++ V) -> {none, list_to_integer(V)}; (R) -> @@ -181,11 +206,9 @@ range_skip_length(Spec, Size) -> {R, Size - R}; {_OutOfRange, none} -> invalid_range; - {Start, End} when 0 =< Start, Start =< End, End < Size -> - {Start, End - Start + 1}; - {Start, End} when 0 =< Start, Start =< End, End >= Size -> - {Start, Size - Start}; - {_OutOfRange, _End} -> + {Start, End} when Start >= 0, Start < Size, Start =< End -> + {Start, erlang:min(End + 1, Size) - Start}; + {_InvalidStart, _InvalidEnd} -> invalid_range end. @@ -202,7 +225,7 @@ range_test() -> ?assertEqual([{none, 20}], parse_range_request("bytes=-20")), %% trivial single range - ?assertEqual(undefined, parse_range_request("bytes=0-")), + ?assertEqual([{0, none}], parse_range_request("bytes=0-")), %% invalid, single ranges ?assertEqual(fail, parse_range_request("")), @@ -217,6 +240,19 @@ range_test() -> [{20, none}, {50, 100}, {none, 200}], parse_range_request("bytes=20-,50-100,-200")), + %% valid, multiple range with whitespace + ?assertEqual( + [{20, 30}, {50, 100}, {110, 200}], + parse_range_request("bytes=20-30, 50-100 , 110-200")), + + %% valid, multiple range with extra commas + ?assertEqual( + [{20, 30}, {50, 100}, {110, 200}], + parse_range_request("bytes=20-30,,50-100,110-200")), + ?assertEqual( + [{20, 30}, {50, 100}, {110, 200}], + parse_range_request("bytes=20-30, ,50-100,,,110-200")), + %% no ranges ?assertEqual([], parse_range_request("bytes=")), ok. @@ -236,6 +272,7 @@ range_skip_length_test() -> ?assertEqual({BodySize, 0}, range_skip_length({none, 0}, BodySize)), ?assertEqual({0, BodySize}, range_skip_length({none, BodySize}, BodySize)), ?assertEqual({0, BodySize}, range_skip_length({0, none}, BodySize)), + ?assertEqual({0, BodySize}, range_skip_length({0, BodySize + 1}, BodySize)), BodySizeLess1 = BodySize - 1, ?assertEqual({BodySizeLess1, 1}, range_skip_length({BodySize - 1, none}, BodySize)), @@ -263,6 +300,8 @@ range_skip_length_test() -> range_skip_length({-1, none}, BodySize)), ?assertEqual(invalid_range, range_skip_length({BodySize, none}, BodySize)), + ?assertEqual(invalid_range, + range_skip_length({BodySize + 1, BodySize + 5}, BodySize)), ok. -endif. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_io.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_io.erl similarity index 50% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_io.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_io.erl index 8454b43..15b6b3a 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_io.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_io.erl @@ -1,5 +1,23 @@ %% @author Bob Ippolito %% @copyright 2007 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc Utilities for dealing with IO devices (open files). diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_mime.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_mime.erl similarity index 90% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_mime.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_mime.erl index 7d9f249..949d957 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_mime.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_mime.erl @@ -1,5 +1,23 @@ %% @author Bob Ippolito %% @copyright 2007 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc Gives a good MIME type guess based on file extension. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_multipart.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_multipart.erl similarity index 96% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_multipart.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_multipart.erl index a4857d6..1d18ae2 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_multipart.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_multipart.erl @@ -1,5 +1,23 @@ %% @author Bob Ippolito %% @copyright 2007 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc Utilities for parsing multipart/form-data. @@ -38,7 +56,7 @@ parts_to_body([{Start, End, Body}], ContentType, Size) -> {HeaderList, Body}; parts_to_body(BodyList, ContentType, Size) when is_list(BodyList) -> parts_to_multipart_body(BodyList, ContentType, Size, - mochihex:to_hex(mochiweb_util:rand_bytes(8))). + mochihex:to_hex(crypto:rand_bytes(8))). %% @spec parts_to_multipart_body([bodypart()], ContentType::string(), %% Size::integer(), Boundary::string()) -> @@ -374,7 +392,7 @@ parse3(Transport) -> body_end, eof], TestCallback = fun (Next) -> test_callback(Next, Expect) end, - ServerFun = fun (Socket) -> + ServerFun = fun (Socket, _Opts) -> ok = mochiweb_socket:send(Socket, BinContent), exit(normal) end, @@ -410,7 +428,7 @@ parse2(Transport) -> body_end, eof], TestCallback = fun (Next) -> test_callback(Next, Expect) end, - ServerFun = fun (Socket) -> + ServerFun = fun (Socket, _Opts) -> ok = mochiweb_socket:send(Socket, BinContent), exit(normal) end, @@ -447,7 +465,7 @@ do_parse_form(Transport) -> "--AaB03x--", ""], "\r\n"), BinContent = iolist_to_binary(Content), - ServerFun = fun (Socket) -> + ServerFun = fun (Socket, _Opts) -> ok = mochiweb_socket:send(Socket, BinContent), exit(normal) end, @@ -500,7 +518,7 @@ do_parse(Transport) -> body_end, eof], TestCallback = fun (Next) -> test_callback(Next, Expect) end, - ServerFun = fun (Socket) -> + ServerFun = fun (Socket, _Opts) -> ok = mochiweb_socket:send(Socket, BinContent), exit(normal) end, @@ -552,7 +570,7 @@ parse_partial_body_boundary(Transport) -> body_end, eof], TestCallback = fun (Next) -> test_callback(Next, Expect) end, - ServerFun = fun (Socket) -> + ServerFun = fun (Socket, _Opts) -> ok = mochiweb_socket:send(Socket, BinContent), exit(normal) end, @@ -605,7 +623,7 @@ parse_large_header(Transport) -> body_end, eof], TestCallback = fun (Next) -> test_callback(Next, Expect) end, - ServerFun = fun (Socket) -> + ServerFun = fun (Socket, _Opts) -> ok = mochiweb_socket:send(Socket, BinContent), exit(normal) end, @@ -681,7 +699,7 @@ flash_parse(Transport) -> body_end, eof], TestCallback = fun (Next) -> test_callback(Next, Expect) end, - ServerFun = fun (Socket) -> + ServerFun = fun (Socket, _Opts) -> ok = mochiweb_socket:send(Socket, BinContent), exit(normal) end, @@ -729,7 +747,7 @@ flash_parse2(Transport) -> body_end, eof], TestCallback = fun (Next) -> test_callback(Next, Expect) end, - ServerFun = fun (Socket) -> + ServerFun = fun (Socket, _Opts) -> ok = mochiweb_socket:send(Socket, BinContent), exit(normal) end, @@ -856,7 +874,7 @@ multipart_parsing_benchmark() -> body_end, eof], TestCallback = fun (Next) -> test_callback(Next, Expect) end, - ServerFun = fun (Socket) -> + ServerFun = fun (Socket, _Opts) -> ok = mochiweb_socket:send(Socket, BinContent), exit(normal) end, diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_request.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_request.erl similarity index 76% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_request.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_request.erl index d967bdb..39890ce 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_request.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_request.erl @@ -1,5 +1,23 @@ %% @author Bob Ippolito %% @copyright 2007 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc MochiWeb HTTP Request abstraction. @@ -11,9 +29,9 @@ -define(QUIP, "Any of you quaids got a smint?"). --export([new/5]). +-export([new/5, new/6]). -export([get_header_value/2, get_primary_header_value/2, get_combined_header_value/2, get/2, dump/1]). --export([send/2, recv/2, recv/3, recv_body/1, recv_body/2, stream_body/4]). +-export([send/2, recv/2, recv/3, recv_body/1, recv_body/2, stream_body/4, stream_body/5]). -export([start_response/2, start_response_length/2, start_raw_response/2]). -export([respond/2, ok/2]). -export([not_found/1, not_found/2]). @@ -44,22 +62,27 @@ -define(IDLE_TIMEOUT, 300000). % Maximum recv_body() length of 1MB --define(MAX_RECV_BODY, 104857600). +-define(MAX_RECV_BODY, (1024*1024)). %% @spec new(Socket, Method, RawPath, Version, headers()) -> request() %% @doc Create a new request instance. new(Socket, Method, RawPath, Version, Headers) -> - {?MODULE, [Socket, Method, RawPath, Version, Headers]}. + new(Socket, [], Method, RawPath, Version, Headers). + +%% @spec new(Socket, Opts, Method, RawPath, Version, headers()) -> request() +%% @doc Create a new request instance. +new(Socket, Opts, Method, RawPath, Version, Headers) -> + {?MODULE, [Socket, Opts, Method, RawPath, Version, Headers]}. %% @spec get_header_value(K, request()) -> undefined | Value %% @doc Get the value of a given request header. -get_header_value(K, {?MODULE, [_Socket, _Method, _RawPath, _Version, Headers]}) -> +get_header_value(K, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) -> mochiweb_headers:get_value(K, Headers). -get_primary_header_value(K, {?MODULE, [_Socket, _Method, _RawPath, _Version, Headers]}) -> +get_primary_header_value(K, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) -> mochiweb_headers:get_primary_value(K, Headers). -get_combined_header_value(K, {?MODULE, [_Socket, _Method, _RawPath, _Version, Headers]}) -> +get_combined_header_value(K, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) -> mochiweb_headers:get_combined_value(K, Headers). %% @type field() = socket | scheme | method | raw_path | version | headers | peer | path | body_length | range @@ -70,24 +93,24 @@ get_combined_header_value(K, {?MODULE, [_Socket, _Method, _RawPath, _Version, He %% an ssl socket will be returned as {ssl, SslSocket}. %% You can use SslSocket with the ssl %% application, eg: ssl:peercert(SslSocket). -get(socket, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) -> +get(socket, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) -> Socket; -get(scheme, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) -> +get(scheme, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) -> case mochiweb_socket:type(Socket) of plain -> http; ssl -> https end; -get(method, {?MODULE, [_Socket, Method, _RawPath, _Version, _Headers]}) -> +get(method, {?MODULE, [_Socket, _Opts, Method, _RawPath, _Version, _Headers]}) -> Method; -get(raw_path, {?MODULE, [_Socket, _Method, RawPath, _Version, _Headers]}) -> +get(raw_path, {?MODULE, [_Socket, _Opts, _Method, RawPath, _Version, _Headers]}) -> RawPath; -get(version, {?MODULE, [_Socket, _Method, _RawPath, Version, _Headers]}) -> +get(version, {?MODULE, [_Socket, _Opts, _Method, _RawPath, Version, _Headers]}) -> Version; -get(headers, {?MODULE, [_Socket, _Method, _RawPath, _Version, Headers]}) -> +get(headers, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}) -> Headers; -get(peer, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +get(peer, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> case mochiweb_socket:peername(Socket) of {ok, {Addr={10, _, _, _}, _Port}} -> case get_header_value("x-forwarded-for", THIS) of @@ -108,7 +131,7 @@ get(peer, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> {error, enotconn} -> exit(normal) end; -get(path, {?MODULE, [_Socket, _Method, RawPath, _Version, _Headers]}) -> +get(path, {?MODULE, [_Socket, _Opts, _Method, RawPath, _Version, _Headers]}) -> case erlang:get(?SAVE_PATH) of undefined -> {Path0, _, _} = mochiweb_util:urlsplit_path(RawPath), @@ -118,7 +141,7 @@ get(path, {?MODULE, [_Socket, _Method, RawPath, _Version, _Headers]}) -> Cached -> Cached end; -get(body_length, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +get(body_length, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> case erlang:get(?SAVE_BODY_LENGTH) of undefined -> BodyLength = body_length(THIS), @@ -127,26 +150,29 @@ get(body_length, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THI {cached, Cached} -> Cached end; -get(range, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +get(range, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> case get_header_value(range, THIS) of undefined -> undefined; RawRange -> mochiweb_http:parse_range_request(RawRange) - end. + end; +get(opts, {?MODULE, [_Socket, Opts, _Method, _RawPath, _Version, _Headers]}) -> + Opts. %% @spec dump(request()) -> {mochiweb_request, [{atom(), term()}]} %% @doc Dump the internal representation to a "human readable" set of terms %% for debugging/inspection purposes. -dump({?MODULE, [_Socket, Method, RawPath, Version, Headers]}) -> +dump({?MODULE, [_Socket, Opts, Method, RawPath, Version, Headers]}) -> {?MODULE, [{method, Method}, {version, Version}, {raw_path, RawPath}, + {opts, Opts}, {headers, mochiweb_headers:to_list(Headers)}]}. %% @spec send(iodata(), request()) -> ok %% @doc Send data over the socket. -send(Data, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) -> +send(Data, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) -> case mochiweb_socket:send(Socket, Data) of ok -> ok; @@ -157,13 +183,13 @@ send(Data, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) -> %% @spec recv(integer(), request()) -> binary() %% @doc Receive Length bytes from the client as a binary, with the default %% idle timeout. -recv(Length, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +recv(Length, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> recv(Length, ?IDLE_TIMEOUT, THIS). %% @spec recv(integer(), integer(), request()) -> binary() %% @doc Receive Length bytes from the client as a binary, with the given %% Timeout in msec. -recv(Length, Timeout, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) -> +recv(Length, Timeout, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) -> case mochiweb_socket:recv(Socket, Length, Timeout) of {ok, Data} -> put(?SAVE_RECV, true), @@ -174,7 +200,7 @@ recv(Length, Timeout, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]} %% @spec body_length(request()) -> undefined | chunked | unknown_transfer_encoding | integer() %% @doc Infer body length from transfer-encoding and content-length headers. -body_length({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +body_length({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> case get_header_value("transfer-encoding", THIS) of undefined -> case get_combined_header_value("content-length", THIS) of @@ -193,13 +219,13 @@ body_length({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> %% @spec recv_body(request()) -> binary() %% @doc Receive the body of the HTTP request (defined by Content-Length). %% Will only receive up to the default max-body length of 1MB. -recv_body({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +recv_body({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> recv_body(?MAX_RECV_BODY, THIS). %% @spec recv_body(integer(), request()) -> binary() %% @doc Receive the body of the HTTP request (defined by Content-Length). %% Will receive up to MaxBody bytes. -recv_body(MaxBody, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +recv_body(MaxBody, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> case erlang:get(?SAVE_BODY) of undefined -> % we could use a sane constant for max chunk size @@ -219,11 +245,11 @@ recv_body(MaxBody, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=T Cached -> Cached end. -stream_body(MaxChunkSize, ChunkFun, FunState, {?MODULE,[_Socket,_Method,_RawPath,_Version,_Headers]}=THIS) -> +stream_body(MaxChunkSize, ChunkFun, FunState, {?MODULE,[_Socket,_Opts,_Method,_RawPath,_Version,_Headers]}=THIS) -> stream_body(MaxChunkSize, ChunkFun, FunState, undefined, THIS). stream_body(MaxChunkSize, ChunkFun, FunState, MaxBodyLength, - {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> + {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> Expect = case get_header_value("expect", THIS) of undefined -> undefined; @@ -254,7 +280,7 @@ stream_body(MaxChunkSize, ChunkFun, FunState, MaxBodyLength, MaxBodyLength when is_integer(MaxBodyLength), MaxBodyLength < Length -> exit({body_too_large, content_length}); _ -> - stream_unchunked_body(Length, ChunkFun, FunState, THIS) + stream_unchunked_body(MaxChunkSize,Length, ChunkFun, FunState, THIS) end end. @@ -263,23 +289,16 @@ stream_body(MaxChunkSize, ChunkFun, FunState, MaxBodyLength, %% @doc Start the HTTP response by sending the Code HTTP response and %% ResponseHeaders. The server will set header defaults such as Server %% and Date if not present in ResponseHeaders. -start_response({Code, ResponseHeaders}, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> - HResponse = mochiweb_headers:make(ResponseHeaders), - HResponse1 = mochiweb_headers:default_from_list(server_headers(), - HResponse), - start_raw_response({Code, HResponse1}, THIS). +start_response({Code, ResponseHeaders}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> + start_raw_response({Code, ResponseHeaders}, THIS). %% @spec start_raw_response({integer(), headers()}, request()) -> response() %% @doc Start the HTTP response by sending the Code HTTP response and %% ResponseHeaders. -start_raw_response({Code, ResponseHeaders}, {?MODULE, [_Socket, _Method, _RawPath, Version, _Headers]}=THIS) -> - F = fun ({K, V}, Acc) -> - [mochiweb_util:make_io(K), <<": ">>, V, <<"\r\n">> | Acc] - end, - End = lists:foldl(F, [<<"\r\n">>], - mochiweb_headers:to_list(ResponseHeaders)), - send([make_version(Version), make_code(Code), <<"\r\n">> | End], THIS), - mochiweb:new_response({THIS, Code, ResponseHeaders}). +start_raw_response({Code, ResponseHeaders}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> + {Header, Response} = format_response_header({Code, ResponseHeaders}, THIS), + send(Header, THIS), + Response. %% @spec start_response_length({integer(), ioheaders(), integer()}, request()) -> response() @@ -288,18 +307,42 @@ start_raw_response({Code, ResponseHeaders}, {?MODULE, [_Socket, _Method, _RawPat %% will set header defaults such as Server %% and Date if not present in ResponseHeaders. start_response_length({Code, ResponseHeaders, Length}, - {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> + {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> HResponse = mochiweb_headers:make(ResponseHeaders), HResponse1 = mochiweb_headers:enter("Content-Length", Length, HResponse), start_response({Code, HResponse1}, THIS). +%% @spec format_response_header({integer(), ioheaders()} | {integer(), ioheaders(), integer()}, request()) -> iolist() +%% @doc Format the HTTP response header, including the Code HTTP response and +%% ResponseHeaders including an optional Content-Length of Length. The server +%% will set header defaults such as Server +%% and Date if not present in ResponseHeaders. +format_response_header({Code, ResponseHeaders}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, Version, _Headers]}=THIS) -> + HResponse = mochiweb_headers:make(ResponseHeaders), + HResponse1 = mochiweb_headers:default_from_list(server_headers(), HResponse), + HResponse2 = case should_close(THIS) of + true -> + mochiweb_headers:enter("Connection", "close", HResponse1); + false -> + HResponse1 + end, + End = [[mochiweb_util:make_io(K), <<": ">>, V, <<"\r\n">>] + || {K, V} <- mochiweb_headers:to_list(HResponse2)], + Response = mochiweb:new_response({THIS, Code, HResponse2}), + {[make_version(Version), make_code(Code), <<"\r\n">> | [End, <<"\r\n">>]], Response}; +format_response_header({Code, ResponseHeaders, Length}, + {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> + HResponse = mochiweb_headers:make(ResponseHeaders), + HResponse1 = mochiweb_headers:enter("Content-Length", Length, HResponse), + format_response_header({Code, HResponse1}, THIS). + %% @spec respond({integer(), ioheaders(), iodata() | chunked | {file, IoDevice}}, request()) -> response() %% @doc Start the HTTP response with start_response, and send Body to the %% client (if the get(method) /= 'HEAD'). The Content-Length header %% will be set by the Body length, and the server will insert header %% defaults. respond({Code, ResponseHeaders, {file, IoDevice}}, - {?MODULE, [_Socket, Method, _RawPath, _Version, _Headers]}=THIS) -> + {?MODULE, [_Socket, _Opts, Method, _RawPath, _Version, _Headers]}=THIS) -> Length = mochiweb_io:iodevice_size(IoDevice), Response = start_response_length({Code, ResponseHeaders, Length}, THIS), case Method of @@ -311,7 +354,7 @@ respond({Code, ResponseHeaders, {file, IoDevice}}, IoDevice) end, Response; -respond({Code, ResponseHeaders, chunked}, {?MODULE, [_Socket, Method, _RawPath, Version, _Headers]}=THIS) -> +respond({Code, ResponseHeaders, chunked}, {?MODULE, [_Socket, _Opts, Method, _RawPath, Version, _Headers]}=THIS) -> HResponse = mochiweb_headers:make(ResponseHeaders), HResponse1 = case Method of 'HEAD' -> @@ -333,34 +376,32 @@ respond({Code, ResponseHeaders, chunked}, {?MODULE, [_Socket, Method, _RawPath, HResponse end, start_response({Code, HResponse1}, THIS); -respond({Code, ResponseHeaders, Body}, {?MODULE, [_Socket, Method, _RawPath, _Version, _Headers]}=THIS) -> - Response = start_response_length({Code, ResponseHeaders, iolist_size(Body)}, THIS), +respond({Code, ResponseHeaders, Body}, {?MODULE, [_Socket, _Opts, Method, _RawPath, _Version, _Headers]}=THIS) -> + {Header, Response} = format_response_header({Code, ResponseHeaders, iolist_size(Body)}, THIS), case Method of - 'HEAD' -> - ok; - _ -> - send(Body, THIS) + 'HEAD' -> send(Header, THIS); + _ -> send([Header, Body], THIS) end, Response. %% @spec not_found(request()) -> response() %% @doc Alias for not_found([]). -not_found({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +not_found({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> not_found([], THIS). %% @spec not_found(ExtraHeaders, request()) -> response() %% @doc Alias for respond({404, [{"Content-Type", "text/plain"} %% | ExtraHeaders], <<"Not found.">>}). -not_found(ExtraHeaders, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +not_found(ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> respond({404, [{"Content-Type", "text/plain"} | ExtraHeaders], <<"Not found.">>}, THIS). %% @spec ok({value(), iodata()} | {value(), ioheaders(), iodata() | {file, IoDevice}}, request()) -> %% response() %% @doc respond({200, [{"Content-Type", ContentType} | Headers], Body}). -ok({ContentType, Body}, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +ok({ContentType, Body}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> ok({ContentType, [], Body}, THIS); -ok({ContentType, ResponseHeaders, Body}, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +ok({ContentType, ResponseHeaders, Body}, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> HResponse = mochiweb_headers:make(ResponseHeaders), case THIS:get(range) of X when (X =:= undefined orelse X =:= fail) orelse Body =:= chunked -> @@ -393,7 +434,7 @@ ok({ContentType, ResponseHeaders, Body}, {?MODULE, [_Socket, _Method, _RawPath, %% @spec should_close(request()) -> bool() %% @doc Return true if the connection must be closed. If false, using %% Keep-Alive should be safe. -should_close({?MODULE, [_Socket, _Method, _RawPath, Version, _Headers]}=THIS) -> +should_close({?MODULE, [_Socket, _Opts, _Method, _RawPath, Version, _Headers]}=THIS) -> ForceClose = erlang:get(?SAVE_FORCE_CLOSE) =/= undefined, DidNotRecv = erlang:get(?SAVE_RECV) =:= undefined, ForceClose orelse Version < {1, 0} @@ -419,7 +460,7 @@ is_close(_) -> %% @spec cleanup(request()) -> ok %% @doc Clean up any junk in the process dictionary, required before continuing %% a Keep-Alive request. -cleanup({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}) -> +cleanup({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) -> L = [?SAVE_QS, ?SAVE_PATH, ?SAVE_RECV, ?SAVE_BODY, ?SAVE_BODY_LENGTH, ?SAVE_POST, ?SAVE_COOKIE, ?SAVE_FORCE_CLOSE], lists:foreach(fun(K) -> @@ -429,7 +470,7 @@ cleanup({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}) -> %% @spec parse_qs(request()) -> [{Key::string(), Value::string()}] %% @doc Parse the query string of the URL. -parse_qs({?MODULE, [_Socket, _Method, RawPath, _Version, _Headers]}) -> +parse_qs({?MODULE, [_Socket, _Opts, _Method, RawPath, _Version, _Headers]}) -> case erlang:get(?SAVE_QS) of undefined -> {_, QueryString, _} = mochiweb_util:urlsplit_path(RawPath), @@ -442,12 +483,12 @@ parse_qs({?MODULE, [_Socket, _Method, RawPath, _Version, _Headers]}) -> %% @spec get_cookie_value(Key::string, request()) -> string() | undefined %% @doc Get the value of the given cookie. -get_cookie_value(Key, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +get_cookie_value(Key, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> proplists:get_value(Key, parse_cookie(THIS)). %% @spec parse_cookie(request()) -> [{Key::string(), Value::string()}] %% @doc Parse the cookie header. -parse_cookie({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +parse_cookie({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> case erlang:get(?SAVE_COOKIE) of undefined -> Cookies = case get_header_value("cookie", THIS) of @@ -465,7 +506,7 @@ parse_cookie({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) - %% @spec parse_post(request()) -> [{Key::string(), Value::string()}] %% @doc Parse an application/x-www-form-urlencoded form POST. This %% has the side-effect of calling recv_body(). -parse_post({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +parse_post({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> case erlang:get(?SAVE_POST) of undefined -> Parsed = case recv_body(THIS) of @@ -489,7 +530,7 @@ parse_post({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> %% @doc The function is called for each chunk. %% Used internally by read_chunked_body. stream_chunked_body(MaxChunkSize, Fun, FunState, - {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> + {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> case read_chunk_length(THIS) of 0 -> Fun({0, read_chunk(0, THIS)}, FunState); @@ -501,27 +542,28 @@ stream_chunked_body(MaxChunkSize, Fun, FunState, stream_chunked_body(MaxChunkSize, Fun, NewState, THIS) end. -stream_unchunked_body(0, Fun, FunState, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}) -> +stream_unchunked_body(_MaxChunkSize, 0, Fun, FunState, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) -> Fun({0, <<>>}, FunState); -stream_unchunked_body(Length, Fun, FunState, - {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) when Length > 0 -> - PktSize = case Length > ?RECBUF_SIZE of - true -> - ?RECBUF_SIZE; - false -> - Length +stream_unchunked_body(MaxChunkSize, Length, Fun, FunState, + {?MODULE, [_Socket, Opts, _Method, _RawPath, _Version, _Headers]}=THIS) when Length > 0 -> + RecBuf = case mochilists:get_value(recbuf, Opts, ?RECBUF_SIZE) of + undefined -> %os controlled buffer size + MaxChunkSize; + Val -> + Val end, + PktSize=min(Length,RecBuf), Bin = recv(PktSize, THIS), NewState = Fun({PktSize, Bin}, FunState), - stream_unchunked_body(Length - PktSize, Fun, NewState, THIS). + stream_unchunked_body(MaxChunkSize, Length - PktSize, Fun, NewState, THIS). %% @spec read_chunk_length(request()) -> integer() %% @doc Read the length of the next HTTP chunk. -read_chunk_length({?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) -> - ok = mochiweb_socket:setopts(Socket, [{packet, line}]), +read_chunk_length({?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) -> + ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, line}])), case mochiweb_socket:recv(Socket, 0, ?IDLE_TIMEOUT) of {ok, Header} -> - ok = mochiweb_socket:setopts(Socket, [{packet, raw}]), + ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, raw}])), Splitter = fun (C) -> C =/= $\r andalso C =/= $\n andalso C =/= $ end, @@ -534,8 +576,8 @@ read_chunk_length({?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) -> %% @spec read_chunk(integer(), request()) -> Chunk::binary() | [Footer::binary()] %% @doc Read in a HTTP chunk of the given length. If Length is 0, then read the %% HTTP footers (as a list of binaries, since they're nominal). -read_chunk(0, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) -> - ok = mochiweb_socket:setopts(Socket, [{packet, line}]), +read_chunk(0, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) -> + ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, line}])), F = fun (F1, Acc) -> case mochiweb_socket:recv(Socket, 0, ?IDLE_TIMEOUT) of {ok, <<"\r\n">>} -> @@ -547,10 +589,10 @@ read_chunk(0, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) -> end end, Footers = F(F, []), - ok = mochiweb_socket:setopts(Socket, [{packet, raw}]), + ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, raw}])), put(?SAVE_RECV, true), Footers; -read_chunk(Length, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) -> +read_chunk(Length, {?MODULE, [Socket, _Opts, _Method, _RawPath, _Version, _Headers]}) -> case mochiweb_socket:recv(Socket, 2 + Length, ?IDLE_TIMEOUT) of {ok, <>} -> Chunk; @@ -559,23 +601,23 @@ read_chunk(Length, {?MODULE, [Socket, _Method, _RawPath, _Version, _Headers]}) - end. read_sub_chunks(Length, MaxChunkSize, Fun, FunState, - {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) when Length > MaxChunkSize -> + {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) when Length > MaxChunkSize -> Bin = recv(MaxChunkSize, THIS), NewState = Fun({size(Bin), Bin}, FunState), read_sub_chunks(Length - MaxChunkSize, MaxChunkSize, Fun, NewState, THIS); read_sub_chunks(Length, _MaxChunkSize, Fun, FunState, - {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> + {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> Fun({Length, read_chunk(Length, THIS)}, FunState). %% @spec serve_file(Path, DocRoot, request()) -> Response %% @doc Serve a file relative to DocRoot. -serve_file(Path, DocRoot, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +serve_file(Path, DocRoot, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> serve_file(Path, DocRoot, [], THIS). %% @spec serve_file(Path, DocRoot, ExtraHeaders, request()) -> Response %% @doc Serve a file relative to DocRoot. -serve_file(Path, DocRoot, ExtraHeaders, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +serve_file(Path, DocRoot, ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> case mochiweb_util:safe_relative_path(Path) of undefined -> not_found(ExtraHeaders, THIS); @@ -595,11 +637,11 @@ serve_file(Path, DocRoot, ExtraHeaders, {?MODULE, [_Socket, _Method, _RawPath, _ directory_index(FullPath) -> filename:join([FullPath, "index.html"]). -maybe_redirect([], FullPath, ExtraHeaders, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +maybe_redirect([], FullPath, ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> maybe_serve_file(directory_index(FullPath), ExtraHeaders, THIS); maybe_redirect(RelPath, FullPath, ExtraHeaders, - {?MODULE, [_Socket, _Method, _RawPath, _Version, Headers]}=THIS) -> + {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, Headers]}=THIS) -> case string:right(RelPath, 1) of "/" -> maybe_serve_file(directory_index(FullPath), ExtraHeaders, THIS); @@ -620,7 +662,7 @@ maybe_redirect(RelPath, FullPath, ExtraHeaders, respond({301, MoreHeaders, Body}, THIS) end. -maybe_serve_file(File, ExtraHeaders, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +maybe_serve_file(File, ExtraHeaders, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> case file:read_file_info(File) of {ok, FileInfo} -> LastModified = httpd_util:rfc1123_date(FileInfo#file_info.mtime), @@ -647,7 +689,7 @@ maybe_serve_file(File, ExtraHeaders, {?MODULE, [_Socket, _Method, _RawPath, _Ver server_headers() -> [{"Server", "MochiWeb/1.0 (" ++ ?QUIP ++ ")"}, - {"Date", httpd_util:rfc1123_date()}]. + {"Date", mochiweb_clock:rfc1123()}]. make_code(X) when is_integer(X) -> [integer_to_list(X), [" " | httpd_util:reason_phrase(X)]]; @@ -672,7 +714,12 @@ range_parts({file, IoDevice}, Ranges) -> LocNums = lists:foldr(F, [], Ranges), {ok, Data} = file:pread(IoDevice, LocNums), Bodies = lists:zipwith(fun ({Skip, Length}, PartialBody) -> - {Skip, Skip + Length - 1, PartialBody} + case Length of + 0 -> + {Skip, Skip, <<>>}; + _ -> + {Skip, Skip + Length - 1, PartialBody} + end end, LocNums, Data), {Bodies, Size}; @@ -714,7 +761,7 @@ range_parts(Body0, Ranges) -> %% accepted_encodings(["gzip", "deflate", "identity"]) -> %% ["deflate", "gzip", "identity"] %% -accepted_encodings(SupportedEncodings, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +accepted_encodings(SupportedEncodings, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> AcceptEncodingHeader = case get_header_value("Accept-Encoding", THIS) of undefined -> ""; @@ -752,7 +799,7 @@ accepted_encodings(SupportedEncodings, {?MODULE, [_Socket, _Method, _RawPath, _V %% 5) For an "Accept" header with value "text/*; q=0.0, */*": %% accepts_content_type("text/plain") -> false %% -accepts_content_type(ContentType1, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +accepts_content_type(ContentType1, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> ContentType = re:replace(ContentType1, "\\s", "", [global, {return, list}]), AcceptHeader = accept_header(THIS), case mochiweb_util:parse_qvalues(AcceptHeader) of @@ -801,7 +848,7 @@ accepts_content_type(ContentType1, {?MODULE, [_Socket, _Method, _RawPath, _Versi %% accepts_content_types(["application/json", "text/html"]) -> %% ["text/html", "application/json"] %% -accepted_content_types(Types1, {?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +accepted_content_types(Types1, {?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> Types = lists:map( fun(T) -> re:replace(T, "\\s", "", [global, {return, list}]) end, Types1), @@ -841,7 +888,7 @@ accepted_content_types(Types1, {?MODULE, [_Socket, _Method, _RawPath, _Version, [Type || {_Q, Type} <- lists:sort(SortFun, TypesQ)] end. -accept_header({?MODULE, [_Socket, _Method, _RawPath, _Version, _Headers]}=THIS) -> +accept_header({?MODULE, [_Socket, _Opts, _Method, _RawPath, _Version, _Headers]}=THIS) -> case get_header_value("Accept", THIS) of undefined -> "*/*"; diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_response.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_response.erl similarity index 68% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_response.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_response.erl index 308a26b..195e652 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_response.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_response.erl @@ -1,5 +1,23 @@ %% @author Bob Ippolito %% @copyright 2007 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc Response abstraction. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_session.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_session.erl similarity index 69% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_session.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_session.erl index ddf7c46..c9f88e2 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_session.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_session.erl @@ -1,4 +1,23 @@ %% @author Asier Azkuenaga Batiz +%% @copyright 2013 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: +%% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. %% @doc HTTP Cookie session. Note that the expiration time travels unencrypted %% as far as this module is concerned. In order to achieve more security, @@ -21,11 +40,11 @@ %% @doc Generates a secure encrypted binary convining all the parameters. The %% expiration time must be a 32-bit integer. -%% -spec generate_session_data( -%% ExpirationTime :: expiration_time(), -%% Data :: iolist(), -%% FSessionKey :: key_fun(), -%% ServerKey :: iolist()) -> binary(). +-spec generate_session_data( + ExpirationTime :: expiration_time(), + Data :: iolist(), + FSessionKey :: key_fun(), + ServerKey :: iolist()) -> binary(). generate_session_data(ExpirationTime, Data, FSessionKey, ServerKey) when is_integer(ExpirationTime), is_function(FSessionKey)-> BData = ensure_binary(Data), @@ -39,11 +58,11 @@ generate_session_data(ExpirationTime, Data, FSessionKey, ServerKey) %% @doc Convenience wrapper for generate_session_data that returns a %% mochiweb cookie with "id" as the key, a max_age of 20000 seconds, %% and the current local time as local time. -%% -spec generate_session_cookie( -%% ExpirationTime :: expiration_time(), -%% Data :: iolist(), -%% FSessionKey :: key_fun(), -%% ServerKey :: iolist()) -> header(). +-spec generate_session_cookie( + ExpirationTime :: expiration_time(), + Data :: iolist(), + FSessionKey :: key_fun(), + ServerKey :: iolist()) -> header(). generate_session_cookie(ExpirationTime, Data, FSessionKey, ServerKey) when is_integer(ExpirationTime), is_function(FSessionKey)-> CookieData = generate_session_data(ExpirationTime, Data, @@ -55,13 +74,13 @@ generate_session_cookie(ExpirationTime, Data, FSessionKey, ServerKey) calendar:universal_time())}]). %% TODO: This return type is messy to express in the type system. -%% -spec check_session_cookie( - %% ECookie :: binary(), - %% ExpirationTime :: string(), - %% FSessionKey :: key_fun(), - %% ServerKey :: iolist()) -> - %% {Success :: boolean(), - %% ExpTimeAndData :: [integer() | binary()]}. +-spec check_session_cookie( + ECookie :: binary(), + ExpirationTime :: string(), + FSessionKey :: key_fun(), + ServerKey :: iolist()) -> + {Success :: boolean(), + ExpTimeAndData :: [integer() | binary()]}. check_session_cookie(ECookie, ExpirationTime, FSessionKey, ServerKey) when is_binary(ECookie), is_integer(ExpirationTime), is_function(FSessionKey) -> @@ -83,7 +102,7 @@ check_session_cookie(_ECookie, _ExpirationTime, _FSessionKey, _ServerKey) -> {false, []}. %% 'Constant' time =:= operator for binary, to mitigate timing attacks. -%% -spec eq(binary(), binary()) -> boolean(). +-spec eq(binary(), binary()) -> boolean(). eq(A, B) when is_binary(A) andalso is_binary(B) -> eq(A, B, 0). @@ -94,30 +113,51 @@ eq(<<>>, <<>>, 0) -> eq(_As, _Bs, _Acc) -> false. -%% -spec ensure_binary(iolist()) -> binary(). +-spec ensure_binary(iolist()) -> binary(). ensure_binary(B) when is_binary(B) -> B; ensure_binary(L) when is_list(L) -> iolist_to_binary(L). -%% -spec encrypt_data(binary(), binary()) -> binary(). +-ifdef(crypto_compatibility). +-spec encrypt_data(binary(), binary()) -> binary(). encrypt_data(Data, Key) -> IV = crypto:rand_bytes(16), Crypt = crypto:aes_cfb_128_encrypt(Key, IV, Data), <>. -%% -spec decrypt_data(binary(), binary()) -> binary(). +-spec decrypt_data(binary(), binary()) -> binary(). decrypt_data(<>, Key) -> crypto:aes_cfb_128_decrypt(Key, IV, Crypt). -%% -spec gen_key(iolist(), iolist()) -> binary(). +-spec gen_key(iolist(), iolist()) -> binary(). gen_key(ExpirationTime, ServerKey)-> crypto:md5_mac(ServerKey, [ExpirationTime]). -%% -spec gen_hmac(iolist(), binary(), iolist(), binary()) -> binary(). +-spec gen_hmac(iolist(), binary(), iolist(), binary()) -> binary(). gen_hmac(ExpirationTime, Data, SessionKey, Key) -> crypto:sha_mac(Key, [ExpirationTime, Data, SessionKey]). +-else. +-spec encrypt_data(binary(), binary()) -> binary(). +encrypt_data(Data, Key) -> + IV = crypto:rand_bytes(16), + Crypt = crypto:block_encrypt(aes_cfb128, Key, IV, Data), + <>. + +-spec decrypt_data(binary(), binary()) -> binary(). +decrypt_data(<>, Key) -> + crypto:block_decrypt(aes_cfb128, Key, IV, Crypt). + +-spec gen_key(iolist(), iolist()) -> binary(). +gen_key(ExpirationTime, ServerKey)-> + crypto:hmac(md5, ServerKey, [ExpirationTime]). + +-spec gen_hmac(iolist(), binary(), iolist(), binary()) -> binary(). +gen_hmac(ExpirationTime, Data, SessionKey, Key) -> + crypto:hmac(sha, Key, [ExpirationTime, Data, SessionKey]). + +-endif. -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). diff --git a/rabbitmq-server/deps/mochiweb/src/mochiweb_socket.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_socket.erl new file mode 100644 index 0000000..1756b8e --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_socket.erl @@ -0,0 +1,148 @@ +%% @copyright 2010 Mochi Media, Inc. + +%% @doc MochiWeb socket - wrapper for plain and ssl sockets. + +-module(mochiweb_socket). + +-export([listen/4, + accept/1, transport_accept/1, finish_accept/1, + recv/3, send/2, close/1, port/1, peername/1, + setopts/2, getopts/2, type/1, exit_if_closed/1]). + +-define(ACCEPT_TIMEOUT, 2000). +-define(SSL_TIMEOUT, 10000). +-define(SSL_HANDSHAKE_TIMEOUT, 20000). + + +listen(Ssl, Port, Opts, SslOpts) -> + case Ssl of + true -> + Opts1 = add_unbroken_ciphers_default(Opts ++ SslOpts), + Opts2 = add_safe_protocol_versions(Opts1), + case ssl:listen(Port, Opts2) of + {ok, ListenSocket} -> + {ok, {ssl, ListenSocket}}; + {error, _} = Err -> + Err + end; + false -> + gen_tcp:listen(Port, Opts) + end. + +add_unbroken_ciphers_default(Opts) -> + Default = filter_unsecure_cipher_suites(ssl:cipher_suites()), + Ciphers = filter_broken_cipher_suites(proplists:get_value(ciphers, Opts, Default)), + [{ciphers, Ciphers} | proplists:delete(ciphers, Opts)]. + +filter_broken_cipher_suites(Ciphers) -> + case proplists:get_value(ssl_app, ssl:versions()) of + "5.3" ++ _ -> + lists:filter(fun(Suite) -> + string:left(atom_to_list(element(1, Suite)), 4) =/= "ecdh" + end, Ciphers); + _ -> + Ciphers + end. + +filter_unsecure_cipher_suites(Ciphers) -> + lists:filter(fun + ({_,des_cbc,_}) -> false; + ({_,_,md5}) -> false; + (_) -> true + end, + Ciphers). + +add_safe_protocol_versions(Opts) -> + case proplists:is_defined(versions, Opts) of + true -> + Opts; + false -> + Versions = filter_unsafe_protcol_versions(proplists:get_value(available, ssl:versions())), + [{versions, Versions} | Opts] + end. + +filter_unsafe_protcol_versions(Versions) -> + lists:filter(fun + (sslv3) -> false; + (_) -> true + end, + Versions). + +%% Provided for backwards compatibility only +accept(ListenSocket) -> + case transport_accept(ListenSocket) of + {ok, Socket} -> + finish_accept(Socket); + {error, _} = Err -> + Err + end. + +transport_accept({ssl, ListenSocket}) -> + case ssl:transport_accept(ListenSocket, ?SSL_TIMEOUT) of + {ok, Socket} -> + {ok, {ssl, Socket}}; + {error, _} = Err -> + Err + end; +transport_accept(ListenSocket) -> + gen_tcp:accept(ListenSocket, ?ACCEPT_TIMEOUT). + +finish_accept({ssl, Socket}) -> + case ssl:ssl_accept(Socket, ?SSL_HANDSHAKE_TIMEOUT) of + ok -> + {ok, {ssl, Socket}}; + {error, _} = Err -> + Err + end; +finish_accept(Socket) -> + {ok, Socket}. + +recv({ssl, Socket}, Length, Timeout) -> + ssl:recv(Socket, Length, Timeout); +recv(Socket, Length, Timeout) -> + gen_tcp:recv(Socket, Length, Timeout). + +send({ssl, Socket}, Data) -> + ssl:send(Socket, Data); +send(Socket, Data) -> + gen_tcp:send(Socket, Data). + +close({ssl, Socket}) -> + ssl:close(Socket); +close(Socket) -> + gen_tcp:close(Socket). + +port({ssl, Socket}) -> + case ssl:sockname(Socket) of + {ok, {_, Port}} -> + {ok, Port}; + {error, _} = Err -> + Err + end; +port(Socket) -> + inet:port(Socket). + +peername({ssl, Socket}) -> + ssl:peername(Socket); +peername(Socket) -> + inet:peername(Socket). + +setopts({ssl, Socket}, Opts) -> + ssl:setopts(Socket, Opts); +setopts(Socket, Opts) -> + inet:setopts(Socket, Opts). + +getopts({ssl, Socket}, Opts) -> + ssl:getopts(Socket, Opts); +getopts(Socket, Opts) -> + inet:getopts(Socket, Opts). + +type({ssl, _}) -> + ssl; +type(_) -> + plain. + +exit_if_closed({error, closed}) -> + exit(normal); +exit_if_closed(Res) -> + Res. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_socket_server.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_socket_server.erl similarity index 76% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_socket_server.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_socket_server.erl index a3d4da3..fd5e382 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_socket_server.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_socket_server.erl @@ -18,11 +18,11 @@ {port, loop, name=undefined, - %% NOTE: This is currently ignored. max=2048, ip=any, listen=null, nodelay=false, + recbuf=?RECBUF_SIZE, backlog=128, active_sockets=0, acceptor_pool_size=16, @@ -74,7 +74,16 @@ parse_options(State=#mochiweb_socket_server{}) -> parse_options(Options) -> parse_options(Options, #mochiweb_socket_server{}). -parse_options([], State) -> +parse_options([], State=#mochiweb_socket_server{acceptor_pool_size=PoolSize, + max=Max}) -> + case Max < PoolSize of + true -> + error_logger:info_report([{warning, "max is set lower than acceptor_pool_size"}, + {max, Max}, + {acceptor_pool_size, PoolSize}]); + false -> + ok + end, State; parse_options([{name, L} | Rest], State) when is_list(L) -> Name = {local, list_to_atom(L)}, @@ -108,13 +117,26 @@ parse_options([{backlog, Backlog} | Rest], State) -> parse_options(Rest, State#mochiweb_socket_server{backlog=Backlog}); parse_options([{nodelay, NoDelay} | Rest], State) -> parse_options(Rest, State#mochiweb_socket_server{nodelay=NoDelay}); +parse_options([{recbuf, RecBuf} | Rest], State) when is_integer(RecBuf) orelse + RecBuf == undefined -> + %% XXX: `recbuf' value which is passed to `gen_tcp' + %% and value reported by `inet:getopts(P, [recbuf])' may + %% differ. They depends on underlying OS. From linux mans: + %% + %% The kernel doubles this value (to allow space for + %% bookkeeping overhead) when it is set using setsockopt(2), + %% and this doubled value is returned by getsockopt(2). + %% + %% See: man 7 socket | grep SO_RCVBUF + %% + %% In case undefined is passed instead of the default buffer + %% size ?RECBUF_SIZE, no size is set and the OS can control it dynamically + parse_options(Rest, State#mochiweb_socket_server{recbuf=RecBuf}); parse_options([{acceptor_pool_size, Max} | Rest], State) -> MaxInt = ensure_int(Max), parse_options(Rest, State#mochiweb_socket_server{acceptor_pool_size=MaxInt}); parse_options([{max, Max} | Rest], State) -> - error_logger:info_report([{warning, "TODO: max is currently unsupported"}, - {max, Max}]), MaxInt = ensure_int(Max), parse_options(Rest, State#mochiweb_socket_server{max=MaxInt}); parse_options([{ssl, Ssl} | Rest], State) when is_boolean(Ssl) -> @@ -156,13 +178,14 @@ ipv6_supported() -> false end. -init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog, nodelay=NoDelay}) -> +init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog, + nodelay=NoDelay, recbuf=RecBuf}) -> process_flag(trap_exit, true), + BaseOpts = [binary, {reuseaddr, true}, {packet, 0}, {backlog, Backlog}, - {recbuf, ?RECBUF_SIZE}, {exit_on_close, false}, {active, false}, {nodelay, NoDelay}], @@ -177,33 +200,41 @@ init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog, nodelay=No {_, _, _, _, _, _, _, _} -> % IPv6 [inet6, {ip, Ip} | BaseOpts] end, - listen(Port, Opts, State). - -new_acceptor_pool(Listen, - State=#mochiweb_socket_server{acceptor_pool=Pool, - acceptor_pool_size=Size, - loop=Loop}) -> - F = fun (_, S) -> - Pid = mochiweb_acceptor:start_link(self(), Listen, Loop), - sets:add_element(Pid, S) - end, - Pool1 = lists:foldl(F, Pool, lists:seq(1, Size)), - State#mochiweb_socket_server{acceptor_pool=Pool1}. + OptsBuf=case RecBuf of + undefined -> + Opts; + _ -> + [{recbuf, RecBuf}|Opts] + end, + listen(Port, OptsBuf, State). + +new_acceptor_pool(State=#mochiweb_socket_server{acceptor_pool_size=Size}) -> + lists:foldl(fun (_, S) -> new_acceptor(S) end, State, lists:seq(1, Size)). + +new_acceptor(State=#mochiweb_socket_server{acceptor_pool=Pool, + recbuf=RecBuf, + loop=Loop, + listen=Listen}) -> + LoopOpts = [{recbuf, RecBuf}], + Pid = mochiweb_acceptor:start_link(self(), Listen, Loop, LoopOpts), + State#mochiweb_socket_server{ + acceptor_pool=sets:add_element(Pid, Pool)}. listen(Port, Opts, State=#mochiweb_socket_server{ssl=Ssl, ssl_opts=SslOpts}) -> case mochiweb_socket:listen(Ssl, Port, Opts, SslOpts) of {ok, Listen} -> {ok, ListenPort} = mochiweb_socket:port(Listen), - {ok, new_acceptor_pool( - Listen, - State#mochiweb_socket_server{listen=Listen, - port=ListenPort})}; + {ok, new_acceptor_pool(State#mochiweb_socket_server{ + listen=Listen, + port=ListenPort})}; {error, Reason} -> {stop, Reason} end. do_get(port, #mochiweb_socket_server{port=Port}) -> Port; +do_get(waiting_acceptors, #mochiweb_socket_server{acceptor_pool=Pool}) -> + sets:size(Pool); do_get(active_sockets, #mochiweb_socket_server{active_sockets=ActiveSockets}) -> ActiveSockets. @@ -271,16 +302,31 @@ code_change(_OldVsn, State, _Extra) -> recycle_acceptor(Pid, State=#mochiweb_socket_server{ acceptor_pool=Pool, - listen=Listen, - loop=Loop, + acceptor_pool_size=PoolSize, + max=Max, active_sockets=ActiveSockets}) -> - case sets:is_element(Pid, Pool) of - true -> - Acceptor = mochiweb_acceptor:start_link(self(), Listen, Loop), - Pool1 = sets:add_element(Acceptor, sets:del_element(Pid, Pool)), - State#mochiweb_socket_server{acceptor_pool=Pool1}; - false -> - State#mochiweb_socket_server{active_sockets=ActiveSockets - 1} + %% A socket is considered to be active from immediately after it + %% has been accepted (see the {accepted, Pid, Timing} cast above). + %% This function will be called when an acceptor is transitioning + %% to an active socket, or when either type of Pid dies. An acceptor + %% Pid will always be in the acceptor_pool set, and an active socket + %% will be in that set during the transition but not afterwards. + Pool1 = sets:del_element(Pid, Pool), + NewSize = sets:size(Pool1), + ActiveSockets1 = case NewSize =:= sets:size(Pool) of + %% Pid has died and it is not in the acceptor set, + %% it must be an active socket. + true -> max(0, ActiveSockets - 1); + false -> ActiveSockets + end, + State1 = State#mochiweb_socket_server{ + acceptor_pool=Pool1, + active_sockets=ActiveSockets1}, + %% Spawn a new acceptor only if it will not overrun the maximum socket + %% count or the maximum pool size. + case NewSize + ActiveSockets1 < Max andalso NewSize < PoolSize of + true -> new_acceptor(State1); + false -> State1 end. handle_info(Msg, State) when ?is_old_state(State) -> diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_util.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_util.erl similarity index 97% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_util.erl rename to rabbitmq-server/deps/mochiweb/src/mochiweb_util.erl index a0bc2bc..c606767 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_util.erl +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_util.erl @@ -13,7 +13,7 @@ -export([record_to_proplist/2, record_to_proplist/3]). -export([safe_relative_path/1, partition/2]). -export([parse_qvalues/1, pick_accepted_encodings/3]). --export([make_io/1, rand_bytes/1, rand_uniform/2]). +-export([make_io/1]). -define(PERCENT, 37). % $\% -define(FULLSTOP, 46). % $\. @@ -357,11 +357,16 @@ urlsplit_query([C | Rest], Acc) -> %% @spec guess_mime(string()) -> string() %% @doc Guess the mime type of a file by the extension of its filename. guess_mime(File) -> - case mochiweb_mime:from_extension(filename:extension(File)) of - undefined -> - "text/plain"; - Mime -> - Mime + case filename:basename(File) of + "crossdomain.xml" -> + "text/x-cross-domain-policy"; + Name -> + case mochiweb_mime:from_extension(filename:extension(Name)) of + undefined -> + "text/plain"; + Mime -> + Mime + end end. %% @spec parse_header(string()) -> {Type, [{K, V}]} @@ -581,12 +586,6 @@ make_io(Integer) when is_integer(Integer) -> make_io(Io) when is_list(Io); is_binary(Io) -> Io. -rand_bytes(Count) -> - list_to_binary([rand_uniform(0, 16#FF + 1) || _ <- lists:seq(1, Count)]). - -rand_uniform(Lo, Hi) -> - random:uniform(Hi - Lo) + Lo - 1. - %% %% Tests %% @@ -692,12 +691,14 @@ parse_header_test() -> ok. guess_mime_test() -> - "text/plain" = guess_mime(""), - "text/plain" = guess_mime(".text"), - "application/zip" = guess_mime(".zip"), - "application/zip" = guess_mime("x.zip"), - "text/html" = guess_mime("x.html"), - "application/xhtml+xml" = guess_mime("x.xhtml"), + ?assertEqual("text/plain", guess_mime("")), + ?assertEqual("text/plain", guess_mime(".text")), + ?assertEqual("application/zip", guess_mime(".zip")), + ?assertEqual("application/zip", guess_mime("x.zip")), + ?assertEqual("text/html", guess_mime("x.html")), + ?assertEqual("application/xhtml+xml", guess_mime("x.xhtml")), + ?assertEqual("text/x-cross-domain-policy", guess_mime("crossdomain.xml")), + ?assertEqual("text/x-cross-domain-policy", guess_mime("www/crossdomain.xml")), ok. path_split_test() -> diff --git a/rabbitmq-server/deps/mochiweb/src/mochiweb_websocket.erl b/rabbitmq-server/deps/mochiweb/src/mochiweb_websocket.erl new file mode 100644 index 0000000..ceb6bd6 --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/src/mochiweb_websocket.erl @@ -0,0 +1,281 @@ +-module(mochiweb_websocket). +-author('lukasz.lalik@zadane.pl'). + +%% The MIT License (MIT) + +%% Copyright (c) 2012 Zadane.pl sp. z o.o. + +%% Permission is hereby granted, free of charge, to any person obtaining a copy +%% of this software and associated documentation files (the "Software"), to deal +%% in the Software without restriction, including without limitation the rights +%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +%% copies of the Software, and to permit persons to whom the Software is +%% furnished to do so, subject to the following conditions: + +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. + +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +%% THE SOFTWARE. + +%% @doc Websockets module for Mochiweb. Based on Misultin websockets module. + +-export([loop/5, upgrade_connection/2, request/5]). +-export([send/3]). +-ifdef(TEST). +-compile(export_all). +-endif. + +loop(Socket, Body, State, WsVersion, ReplyChannel) -> + ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, 0}, {active, once}])), + proc_lib:hibernate(?MODULE, request, + [Socket, Body, State, WsVersion, ReplyChannel]). + +request(Socket, Body, State, WsVersion, ReplyChannel) -> + receive + {tcp_closed, _} -> + mochiweb_socket:close(Socket), + exit(normal); + {ssl_closed, _} -> + mochiweb_socket:close(Socket), + exit(normal); + {tcp_error, _, _} -> + mochiweb_socket:close(Socket), + exit(normal); + {Proto, _, WsFrames} when Proto =:= tcp orelse Proto =:= ssl -> + case parse_frames(WsVersion, WsFrames, Socket) of + close -> + mochiweb_socket:close(Socket), + exit(normal); + error -> + mochiweb_socket:close(Socket), + exit(normal); + Payload -> + NewState = call_body(Body, Payload, State, ReplyChannel), + loop(Socket, Body, NewState, WsVersion, ReplyChannel) + end; + _ -> + mochiweb_socket:close(Socket), + exit(normal) + end. + +call_body({M, F, A}, Payload, State, ReplyChannel) -> + erlang:apply(M, F, [Payload, State, ReplyChannel | A]); +call_body({M, F}, Payload, State, ReplyChannel) -> + M:F(Payload, State, ReplyChannel); +call_body(Body, Payload, State, ReplyChannel) -> + Body(Payload, State, ReplyChannel). + +send(Socket, Payload, hybi) -> + Prefix = <<1:1, 0:3, 1:4, (payload_length(iolist_size(Payload)))/binary>>, + mochiweb_socket:send(Socket, [Prefix, Payload]); +send(Socket, Payload, hixie) -> + mochiweb_socket:send(Socket, [0, Payload, 255]). + +upgrade_connection(Req, Body) -> + case make_handshake(Req) of + {Version, Response} -> + Req:respond(Response), + Socket = Req:get(socket), + ReplyChannel = fun (Payload) -> + ?MODULE:send(Socket, Payload, Version) + end, + Reentry = fun (State) -> + ?MODULE:loop(Socket, Body, State, Version, ReplyChannel) + end, + {Reentry, ReplyChannel}; + _ -> + mochiweb_socket:close(Req:get(socket)), + exit(normal) + end. + +make_handshake(Req) -> + SecKey = Req:get_header_value("sec-websocket-key"), + Sec1Key = Req:get_header_value("Sec-WebSocket-Key1"), + Sec2Key = Req:get_header_value("Sec-WebSocket-Key2"), + Origin = Req:get_header_value(origin), + if SecKey =/= undefined -> + hybi_handshake(SecKey); + Sec1Key =/= undefined andalso Sec2Key =/= undefined -> + Host = Req:get_header_value("Host"), + Path = Req:get(path), + Body = Req:recv(8), + Scheme = scheme(Req), + hixie_handshake(Scheme, Host, Path, Sec1Key, Sec2Key, Body, Origin); + true -> + error + end. + +hybi_handshake(SecKey) -> + BinKey = list_to_binary(SecKey), + Bin = <>, + Challenge = base64:encode(crypto:hash(sha, Bin)), + Response = {101, [{"Connection", "Upgrade"}, + {"Upgrade", "websocket"}, + {"Sec-Websocket-Accept", Challenge}], ""}, + {hybi, Response}. + +scheme(Req) -> + case mochiweb_request:get(scheme, Req) of + http -> + "ws://"; + https -> + "wss://" + end. + +hixie_handshake(Scheme, Host, Path, Key1, Key2, Body, Origin) -> + Ikey1 = [D || D <- Key1, $0 =< D, D =< $9], + Ikey2 = [D || D <- Key2, $0 =< D, D =< $9], + Blank1 = length([D || D <- Key1, D =:= 32]), + Blank2 = length([D || D <- Key2, D =:= 32]), + Part1 = erlang:list_to_integer(Ikey1) div Blank1, + Part2 = erlang:list_to_integer(Ikey2) div Blank2, + Ckey = <>, + Challenge = erlang:md5(Ckey), + Location = lists:concat([Scheme, Host, Path]), + Response = {101, [{"Upgrade", "WebSocket"}, + {"Connection", "Upgrade"}, + {"Sec-WebSocket-Origin", Origin}, + {"Sec-WebSocket-Location", Location}], + Challenge}, + {hixie, Response}. + +parse_frames(hybi, Frames, Socket) -> + try parse_hybi_frames(Socket, Frames, []) of + Parsed -> process_frames(Parsed, []) + catch + _:_ -> error + end; +parse_frames(hixie, Frames, _Socket) -> + try parse_hixie_frames(Frames, []) of + Payload -> Payload + catch + _:_ -> error + end. + +%% +%% Websockets internal functions for RFC6455 and hybi draft +%% +process_frames([], Acc) -> + lists:reverse(Acc); +process_frames([{Opcode, Payload} | Rest], Acc) -> + case Opcode of + 8 -> close; + _ -> + process_frames(Rest, [Payload | Acc]) + end. + +parse_hybi_frames(_, <<>>, Acc) -> + lists:reverse(Acc); +parse_hybi_frames(S, <<_Fin:1, + _Rsv:3, + Opcode:4, + _Mask:1, + PayloadLen:7, + MaskKey:4/binary, + Payload:PayloadLen/binary-unit:8, + Rest/binary>>, + Acc) when PayloadLen < 126 -> + Payload2 = hybi_unmask(Payload, MaskKey, <<>>), + parse_hybi_frames(S, Rest, [{Opcode, Payload2} | Acc]); +parse_hybi_frames(S, <<_Fin:1, + _Rsv:3, + Opcode:4, + _Mask:1, + 126:7, + PayloadLen:16, + MaskKey:4/binary, + Payload:PayloadLen/binary-unit:8, + Rest/binary>>, + Acc) -> + Payload2 = hybi_unmask(Payload, MaskKey, <<>>), + parse_hybi_frames(S, Rest, [{Opcode, Payload2} | Acc]); +parse_hybi_frames(Socket, <<_Fin:1, + _Rsv:3, + _Opcode:4, + _Mask:1, + 126:7, + _PayloadLen:16, + _MaskKey:4/binary, + _/binary-unit:8>> = PartFrame, + Acc) -> + ok = mochiweb_socket:exit_if_closed(mochiweb_socket:setopts(Socket, [{packet, 0}, {active, once}])), + receive + {tcp_closed, _} -> + mochiweb_socket:close(Socket), + exit(normal); + {ssl_closed, _} -> + mochiweb_socket:close(Socket), + exit(normal); + {tcp_error, _, _} -> + mochiweb_socket:close(Socket), + exit(normal); + {Proto, _, Continuation} when Proto =:= tcp orelse Proto =:= ssl -> + parse_hybi_frames(Socket, <>, + Acc); + _ -> + mochiweb_socket:close(Socket), + exit(normal) + after + 5000 -> + mochiweb_socket:close(Socket), + exit(normal) + end; +parse_hybi_frames(S, <<_Fin:1, + _Rsv:3, + Opcode:4, + _Mask:1, + 127:7, + 0:1, + PayloadLen:63, + MaskKey:4/binary, + Payload:PayloadLen/binary-unit:8, + Rest/binary>>, + Acc) -> + Payload2 = hybi_unmask(Payload, MaskKey, <<>>), + parse_hybi_frames(S, Rest, [{Opcode, Payload2} | Acc]). + +%% Unmasks RFC 6455 message +hybi_unmask(<>, MaskKey, Acc) -> + <> = MaskKey, + hybi_unmask(Rest, MaskKey, <>); +hybi_unmask(<>, MaskKey, Acc) -> + <> = MaskKey, + <>; +hybi_unmask(<>, MaskKey, Acc) -> + <> = MaskKey, + <>; +hybi_unmask(<>, MaskKey, Acc) -> + <> = MaskKey, + <>; +hybi_unmask(<<>>, _MaskKey, Acc) -> + Acc. + +payload_length(N) -> + case N of + N when N =< 125 -> << N >>; + N when N =< 16#ffff -> << 126, N:16 >>; + N when N =< 16#7fffffffffffffff -> << 127, N:64 >> + end. + + +%% +%% Websockets internal functions for hixie-76 websocket version +%% +parse_hixie_frames(<<>>, Frames) -> + lists:reverse(Frames); +parse_hixie_frames(<<0, T/binary>>, Frames) -> + {Frame, Rest} = parse_hixie(T, <<>>), + parse_hixie_frames(Rest, [Frame | Frames]). + +parse_hixie(<<255, Rest/binary>>, Buffer) -> + {Buffer, Rest}; +parse_hixie(<>, Buffer) -> + parse_hixie(T, <>). diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/reloader.erl b/rabbitmq-server/deps/mochiweb/src/reloader.erl similarity index 81% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/reloader.erl rename to rabbitmq-server/deps/mochiweb/src/reloader.erl index 8266b33..8130f45 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/reloader.erl +++ b/rabbitmq-server/deps/mochiweb/src/reloader.erl @@ -1,6 +1,24 @@ -%% @copyright 2007 Mochi Media, Inc. %% @author Matthew Dempsky +%% @copyright 2007 Mochi Media, Inc. +%% +%% Permission is hereby granted, free of charge, to any person obtaining a +%% copy of this software and associated documentation files (the "Software"), +%% to deal in the Software without restriction, including without limitation +%% the rights to use, copy, modify, merge, publish, distribute, sublicense, +%% and/or sell copies of the Software, and to permit persons to whom the +%% Software is furnished to do so, subject to the following conditions: %% +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. +%% +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +%% THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +%% DEALINGS IN THE SOFTWARE. + %% @doc Erlang module for automatically reloading modified modules %% during development. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp.template b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp.template similarity index 92% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp.template rename to rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp.template index 4942609..c56314c 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp.template +++ b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp.template @@ -13,6 +13,7 @@ {template, "mochiwebapp_skel/src/mochiapp_sup.erl", "{{dest}}/src/{{appid}}_sup.erl"}. {template, "mochiwebapp_skel/src/mochiapp_web.erl", "{{dest}}/src/{{appid}}_web.erl"}. {template, "mochiwebapp_skel/start-dev.sh", "{{dest}}/start-dev.sh"}. +{template, "mochiwebapp_skel/bench.sh", "{{dest}}/bench.sh"}. {template, "mochiwebapp_skel/priv/www/index.html", "{{dest}}/priv/www/index.html"}. {file, "../../.gitignore", "{{dest}}/.gitignore"}. {file, "../../Makefile", "{{dest}}/Makefile"}. @@ -20,3 +21,4 @@ {file, "../../rebar", "{{dest}}/rebar"}. {chmod, 8#755, "{{dest}}/rebar"}. {chmod, 8#755, "{{dest}}/start-dev.sh"}. +{chmod, 8#755, "{{dest}}/bench.sh"}. diff --git a/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/bench.sh b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/bench.sh new file mode 100755 index 0000000..eb6e9c9 --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/bench.sh @@ -0,0 +1,19 @@ +#!/bin/sh + +# workaround for rebar mustache template bug +DEFAULT_PORT={{port}} +HOST=${HOST:-127.0.0.1} +PORT=${PORT:-${DEFAULT_PORT}} + +BENCH_RUN="siege -q -c400 -r100 -b http://$HOST:$PORT/hello_world" + +sleep 120 + +echo "" +echo "" +for i in `seq 1 10`; +do + echo "Running test #$i:" + $BENCH_RUN + sleep 90 +done diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/priv/www/index.html b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/priv/www/index.html similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/priv/www/index.html rename to rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/priv/www/index.html diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/rebar.config b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/rebar.config similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/rebar.config rename to rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/rebar.config diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp.app.src b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp.app.src similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp.app.src rename to rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp.app.src diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp.erl b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp.erl similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp.erl rename to rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp.erl diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp_app.erl b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp_app.erl similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp_app.erl rename to rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp_app.erl diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp_deps.erl b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp_deps.erl similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp_deps.erl rename to rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp_deps.erl diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp_sup.erl b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp_sup.erl similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp_sup.erl rename to rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp_sup.erl diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp_web.erl b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp_web.erl similarity index 89% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp_web.erl rename to rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp_web.erl index 8976265..8429a88 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/src/mochiapp_web.erl +++ b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/src/mochiapp_web.erl @@ -26,6 +26,9 @@ loop(Req, DocRoot) -> case Req:get(method) of Method when Method =:= 'GET'; Method =:= 'HEAD' -> case Path of + "hello_world" -> + Req:respond({200, [{"Content-Type", "text/plain"}], + "Hello world!\n"}); _ -> Req:serve_file(Path, DocRoot) end; @@ -44,9 +47,8 @@ loop(Req, DocRoot) -> {type, Type}, {what, What}, {trace, erlang:get_stacktrace()}], error_logger:error_report(Report), - %% NOTE: mustache templates need \\ because they are not awesome. Req:respond({500, [{"Content-Type", "text/plain"}], - "request failed, sorry\\n"}) + "request failed, sorry\n"}) end. %% Internal API diff --git a/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/start-dev.sh b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/start-dev.sh new file mode 100755 index 0000000..65c1692 --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/support/templates/mochiwebapp_skel/start-dev.sh @@ -0,0 +1,7 @@ +#!/bin/sh +exec erl \ + -pa ebin deps/*/ebin \ + -boot start_sasl \ + -sname {{appid}}_dev \ + -s {{appid}} \ + -s reloader diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/test-materials/test_ssl_cert.pem b/rabbitmq-server/deps/mochiweb/support/test-materials/test_ssl_cert.pem similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/test-materials/test_ssl_cert.pem rename to rabbitmq-server/deps/mochiweb/support/test-materials/test_ssl_cert.pem diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/test-materials/test_ssl_key.pem b/rabbitmq-server/deps/mochiweb/support/test-materials/test_ssl_key.pem similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/test-materials/test_ssl_key.pem rename to rabbitmq-server/deps/mochiweb/support/test-materials/test_ssl_key.pem diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_base64url_tests.erl b/rabbitmq-server/deps/mochiweb/test/mochiweb_base64url_tests.erl similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_base64url_tests.erl rename to rabbitmq-server/deps/mochiweb/test/mochiweb_base64url_tests.erl diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_html_tests.erl b/rabbitmq-server/deps/mochiweb/test/mochiweb_html_tests.erl similarity index 99% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_html_tests.erl rename to rabbitmq-server/deps/mochiweb/test/mochiweb_html_tests.erl index 3d35400..f67759a 100644 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_html_tests.erl +++ b/rabbitmq-server/deps/mochiweb/test/mochiweb_html_tests.erl @@ -126,6 +126,12 @@ tokens_test() -> mochiweb_html:tokens(<<"not html < at all">>)), ok. +surrogate_test() -> + %% https://github.com/mochi/mochiweb/issues/164 + ?assertEqual( + [{data,<<240,159,152,138>>,false}], + mochiweb_html:tokens(<<"��">>)). + parse_test() -> D0 = <<" diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_http_tests.erl b/rabbitmq-server/deps/mochiweb/test/mochiweb_http_tests.erl similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_http_tests.erl rename to rabbitmq-server/deps/mochiweb/test/mochiweb_http_tests.erl diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_request_tests.erl b/rabbitmq-server/deps/mochiweb/test/mochiweb_request_tests.erl similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_request_tests.erl rename to rabbitmq-server/deps/mochiweb/test/mochiweb_request_tests.erl diff --git a/rabbitmq-server/deps/mochiweb/test/mochiweb_socket_server_tests.erl b/rabbitmq-server/deps/mochiweb/test/mochiweb_socket_server_tests.erl new file mode 100644 index 0000000..c64f5b7 --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/test/mochiweb_socket_server_tests.erl @@ -0,0 +1,149 @@ +-module(mochiweb_socket_server_tests). + +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + +socket_server(Opts, ServerFun) -> + ServerOpts = [{ip, "127.0.0.1"}, {port, 0}, {backlog, 5}, {loop, ServerFun}], + {ok, Server} = mochiweb_socket_server:start(ServerOpts ++ Opts), + Port = mochiweb_socket_server:get(Server, port), + {Server, Port}. + +echo_loop(Socket) -> + ok = mochiweb_socket:setopts(Socket, [{active, once}]), + receive + {_Protocol, _, Data} -> + gen_tcp:send(Socket, Data), + echo_loop(Socket); + {tcp_closed, Socket} -> + ok + end. + +start_client_conns(Port, NumClients, ClientFun, ClientArgs, Tester) -> + Opts = [binary, {active, false}, {packet, 1}], + lists:foreach(fun (_N) -> + case gen_tcp:connect("127.0.0.1", Port, Opts) of + {ok, Socket} -> + spawn_link(fun() -> ClientFun(Socket, ClientArgs) end); + {error, E} -> + Tester ! {client_conn_error, E} + end + end, lists:seq(1, NumClients)). + +client_fun(_Socket, []) -> ok; +client_fun(Socket, [{close_sock} | Cmds]) -> + mochiweb_socket:close(Socket), + client_fun(Socket, Cmds); +client_fun(Socket, [{send_pid, To} | Cmds]) -> + To ! {client, self()}, + client_fun(Socket, Cmds); +client_fun(Socket, [{send, Data, Tester} | Cmds]) -> + case gen_tcp:send(Socket, Data) of + ok -> ok; + {error, E} -> Tester ! {client_send_error, self(), E} + end, + client_fun(Socket, Cmds); +client_fun(Socket, [{recv, Length, Timeout, Tester} | Cmds]) -> + case gen_tcp:recv(Socket, Length, Timeout) of + {ok, _} -> ok; + {error, E} -> Tester ! {client_recv_error, self(), E} + end, + client_fun(Socket, Cmds); +client_fun(Socket, [{wait_msg, Msg} | Cmds]) -> + receive + M when M =:= Msg -> ok + end, + client_fun(Socket, Cmds); +client_fun(Socket, [{send_msg, Msg, To} | Cmds]) -> + To ! {Msg, self()}, + client_fun(Socket, Cmds). + +test_basic_accept(Max, PoolSize, NumClients, ReportTo) -> + Tester = self(), + + ServerOpts = [{max, Max}, {acceptor_pool_size, PoolSize}], + ServerLoop = + fun (Socket, _Opts) -> + Tester ! {server_accepted, self()}, + mochiweb_socket:setopts(Socket, [{packet, 1}]), + echo_loop(Socket) + end, + {Server, Port} = socket_server(ServerOpts, ServerLoop), + + Data = <<"data">>, + Timeout = 2000, + ClientCmds = [{send_pid, Tester}, {wait_msg, go}, + {send, Data, Tester}, {recv, size(Data), Timeout, Tester}, + {close_sock}, {send_msg, done, Tester}], + start_client_conns(Port, NumClients, fun client_fun/2, ClientCmds, Tester), + + EventCount = min(NumClients, max(Max, PoolSize)), + + ConnectLoop = + fun (Loop, Connected, Accepted, Errors) -> + case (length(Accepted) + Errors >= EventCount + andalso length(Connected) + Errors >= NumClients) of + true -> {Connected, Accepted}; + false -> + receive + {server_accepted, ServerPid} -> + Loop(Loop, Connected, [ServerPid | Accepted], Errors); + {client, ClientPid} -> + Loop(Loop, [ClientPid | Connected], Accepted, Errors); + {client_conn_error, _E} -> + Loop(Loop, Connected, Accepted, Errors + 1) + end + end + end, + {Connected, Accepted} = ConnectLoop(ConnectLoop, [], [], 0), + + ActiveAfterConnect = mochiweb_socket_server:get(Server, active_sockets), + WaitingAfterConnect = mochiweb_socket_server:get(Server, waiting_acceptors), + + lists:foreach(fun(Pid) -> Pid ! go end, Connected), + WaitLoop = + fun (Loop, Done) -> + case (length(Done) >= length(Connected)) of + true -> + ok; + false -> + receive + {done, From} -> + Loop(Loop, [From | Done]) + end + end + end, + ok = WaitLoop(WaitLoop, []), + + mochiweb_socket_server:stop(Server), + + ReportTo ! {result, {length(Accepted), + ActiveAfterConnect, WaitingAfterConnect}}. + +normal_acceptor_test_fun() -> + % {Max, PoolSize, NumClients, + % {ExpectedAccepts, + % ExpectedActiveAfterConnect, ExpectedWaitingAfterConnect} + Tests = [{3, 1, 1, {1, 1, 1}}, + {3, 1, 2, {2, 2, 1}}, + {3, 1, 3, {3, 3, 0}}, + {3, 3, 3, {3, 3, 0}}, + {1, 3, 3, {3, 3, 0}}, % Max is overridden to PoolSize + {3, 2, 6, {3, 3, 0}} + ], + [fun () -> + Self = self(), + spawn(fun () -> + test_basic_accept(Max, PoolSize, NumClients, Self) + end), + Result = receive {result, R} -> R end, + ?assertEqual(Expected, Result) + end || {Max, PoolSize, NumClients, Expected} <- Tests]. + +-define(LARGE_TIMEOUT, 40). + +normal_acceptor_test_() -> + Tests = normal_acceptor_test_fun(), + {timeout, ?LARGE_TIMEOUT, Tests}. + +-endif. diff --git a/rabbitmq-server/deps/mochiweb/test/mochiweb_test_util.erl b/rabbitmq-server/deps/mochiweb/test/mochiweb_test_util.erl new file mode 100644 index 0000000..a0bf11a --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/test/mochiweb_test_util.erl @@ -0,0 +1,134 @@ +-module(mochiweb_test_util). +-export([with_server/3, client_request/4, sock_fun/2, + read_server_headers/1, drain_reply/3]). +-include("mochiweb_test_util.hrl"). +-include_lib("eunit/include/eunit.hrl"). + +ssl_cert_opts() -> + EbinDir = filename:dirname(code:which(?MODULE)), + CertDir = filename:join([EbinDir, "..", "support", "test-materials"]), + CertFile = filename:join(CertDir, "test_ssl_cert.pem"), + KeyFile = filename:join(CertDir, "test_ssl_key.pem"), + [{certfile, CertFile}, {keyfile, KeyFile}]. + +with_server(Transport, ServerFun, ClientFun) -> + ServerOpts0 = [{ip, "127.0.0.1"}, {port, 0}, {loop, ServerFun}], + ServerOpts = case Transport of + plain -> + ServerOpts0; + ssl -> + ServerOpts0 ++ [{ssl, true}, {ssl_opts, ssl_cert_opts()}] + end, + {ok, Server} = mochiweb_http:start_link(ServerOpts), + Port = mochiweb_socket_server:get(Server, port), + Res = (catch ClientFun(Transport, Port)), + mochiweb_http:stop(Server), + Res. + +sock_fun(Transport, Port) -> + Opts = [binary, {active, false}, {packet, http}], + case Transport of + plain -> + {ok, Socket} = gen_tcp:connect("127.0.0.1", Port, Opts), + fun (recv) -> + gen_tcp:recv(Socket, 0); + ({recv, Length}) -> + gen_tcp:recv(Socket, Length); + ({send, Data}) -> + gen_tcp:send(Socket, Data); + ({setopts, L}) -> + inet:setopts(Socket, L); + (get) -> + Socket + end; + ssl -> + {ok, Socket} = ssl:connect("127.0.0.1", Port, [{ssl_imp, new} | Opts]), + fun (recv) -> + ssl:recv(Socket, 0); + ({recv, Length}) -> + ssl:recv(Socket, Length); + ({send, Data}) -> + ssl:send(Socket, Data); + ({setopts, L}) -> + ssl:setopts(Socket, L); + (get) -> + {ssl, Socket} + end + end. + +client_request(Transport, Port, Method, TestReqs) -> + client_request(sock_fun(Transport, Port), Method, TestReqs). + +client_request(SockFun, _Method, []) -> + {the_end, {error, closed}} = {the_end, SockFun(recv)}, + ok; +client_request(SockFun, Method, + [#treq{path=Path, body=Body, xreply=ExReply, xheaders=ExHeaders} | Rest]) -> + Request = [atom_to_list(Method), " ", Path, " HTTP/1.1\r\n", + client_headers(Body, Rest =:= []), + "\r\n", + Body], + ok = SockFun({setopts, [{packet, http}]}), + ok = SockFun({send, Request}), + case Method of + 'GET' -> + {ok, {http_response, {1,1}, 200, "OK"}} = SockFun(recv); + 'POST' -> + {ok, {http_response, {1,1}, 201, "Created"}} = SockFun(recv); + 'CONNECT' -> + {ok, {http_response, {1,1}, 200, "OK"}} = SockFun(recv) + end, + Headers = read_server_headers(SockFun), + ?assertMatch("MochiWeb" ++ _, mochiweb_headers:get_value("Server", Headers)), + ?assert(mochiweb_headers:get_value("Date", Headers) =/= undefined), + ?assert(mochiweb_headers:get_value("Content-Type", Headers) =/= undefined), + ContentLength = list_to_integer(mochiweb_headers:get_value("Content-Length", Headers)), + EHeaders = mochiweb_headers:make(ExHeaders), + lists:foreach( + fun (K) -> + ?assertEqual(mochiweb_headers:get_value(K, EHeaders), + mochiweb_headers:get_value(K, Headers)) + end, + %% Assumes implementation details of the headers + gb_trees:keys(EHeaders)), + {payload, ExReply} = {payload, drain_reply(SockFun, ContentLength, <<>>)}, + client_request(SockFun, Method, Rest). + +read_server_headers(SockFun) -> + ok = SockFun({setopts, [{packet, httph}]}), + Headers = read_server_headers(SockFun, mochiweb_headers:empty()), + ok = SockFun({setopts, [{packet, raw}]}), + Headers. + +read_server_headers(SockFun, Headers) -> + case SockFun(recv) of + {ok, http_eoh} -> + Headers; + {ok, {http_header, _, Header, _, Value}} -> + read_server_headers( + SockFun, + mochiweb_headers:insert(Header, Value, Headers)) + end. + +client_headers(Body, IsLastRequest) -> + ["Host: localhost\r\n", + case Body of + <<>> -> + ""; + _ -> + ["Content-Type: application/octet-stream\r\n", + "Content-Length: ", integer_to_list(byte_size(Body)), "\r\n"] + end, + case IsLastRequest of + true -> + "Connection: close\r\n"; + false -> + "" + end]. + +drain_reply(_SockFun, 0, Acc) -> + Acc; +drain_reply(SockFun, Length, Acc) -> + Sz = erlang:min(Length, 1024), + {ok, B} = SockFun({recv, Sz}), + drain_reply(SockFun, Length - Sz, <>). diff --git a/rabbitmq-server/deps/mochiweb/test/mochiweb_test_util.hrl b/rabbitmq-server/deps/mochiweb/test/mochiweb_test_util.hrl new file mode 100644 index 0000000..503be98 --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/test/mochiweb_test_util.hrl @@ -0,0 +1 @@ +-record(treq, {path, body= <<>>, xreply= <<>>, xheaders= []}). diff --git a/rabbitmq-server/deps/mochiweb/test/mochiweb_tests.erl b/rabbitmq-server/deps/mochiweb/test/mochiweb_tests.erl new file mode 100644 index 0000000..0b558ac --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/test/mochiweb_tests.erl @@ -0,0 +1,220 @@ +-module(mochiweb_tests). +-include_lib("eunit/include/eunit.hrl"). +-include("mochiweb_test_util.hrl"). + +with_server(Transport, ServerFun, ClientFun) -> + mochiweb_test_util:with_server(Transport, ServerFun, ClientFun). + +request_test() -> + R = mochiweb_request:new(z, z, "/foo/bar/baz%20wibble+quux?qs=2", z, []), + "/foo/bar/baz wibble quux" = R:get(path), + ok. + +-define(LARGE_TIMEOUT, 60). + +single_http_GET_test() -> + do_GET(plain, 1). + +single_https_GET_test() -> + do_GET(ssl, 1). + +multiple_http_GET_test() -> + do_GET(plain, 3). + +multiple_https_GET_test() -> + do_GET(ssl, 3). + +hundred_http_GET_test_() -> % note the underscore + {timeout, ?LARGE_TIMEOUT, + fun() -> ?assertEqual(ok, do_GET(plain,100)) end}. + +hundred_https_GET_test_() -> % note the underscore + {timeout, ?LARGE_TIMEOUT, + fun() -> ?assertEqual(ok, do_GET(ssl,100)) end}. + +single_128_http_POST_test() -> + do_POST(plain, 128, 1). + +single_128_https_POST_test() -> + do_POST(ssl, 128, 1). + +single_2k_http_POST_test() -> + do_POST(plain, 2048, 1). + +single_2k_https_POST_test() -> + do_POST(ssl, 2048, 1). + +single_100k_http_POST_test_() -> % note the underscore + {timeout, ?LARGE_TIMEOUT, + fun() -> ?assertEqual(ok, do_POST(plain, 102400, 1)) end}. + +single_100k_https_POST_test_() -> % note the underscore + {timeout, ?LARGE_TIMEOUT, + fun() -> ?assertEqual(ok, do_POST(ssl, 102400, 1)) end}. + +multiple_100k_http_POST_test() -> + {timeout, ?LARGE_TIMEOUT, + fun() -> ?assertEqual(ok, do_POST(plain, 102400, 3)) end}. + +multiple_100K_https_POST_test() -> + {timeout, ?LARGE_TIMEOUT, + fun() -> ?assertEqual(ok, do_POST(ssl, 102400, 3)) end}. + +hundred_128_http_POST_test_() -> % note the underscore + {timeout, ?LARGE_TIMEOUT, + fun() -> ?assertEqual(ok, do_POST(plain, 128, 100)) end}. + +hundred_128_https_POST_test_() -> % note the underscore + {timeout, ?LARGE_TIMEOUT, + fun() -> ?assertEqual(ok, do_POST(ssl, 128, 100)) end}. + +single_GET_scheme_test_() -> + [{"ssl", ?_assertEqual(ok, do_GET("derp", ssl, 1))}, + {"plain", ?_assertEqual(ok, do_GET("derp", plain, 1))}]. + +single_GET_absoluteURI_test_() -> + Uri = "https://example.com:123/x/", + ServerFun = fun (Req) -> + Req:ok({"text/plain", Req:get(path)}) + end, + %% Note that all the scheme/host/port information is discarded from path + ClientFun = new_client_fun('GET', [#treq{path = Uri, xreply = <<"/x/">>}]), + [{atom_to_list(Transport), + ?_assertEqual(ok, with_server(Transport, ServerFun, ClientFun))} + || Transport <- [ssl, plain]]. + +single_CONNECT_test_() -> + [{"ssl", ?_assertEqual(ok, do_CONNECT(ssl, 1))}, + {"plain", ?_assertEqual(ok, do_CONNECT(plain, 1))}]. + +single_GET_any_test_() -> + ServerFun = fun (Req) -> + Req:ok({"text/plain", Req:get(path)}) + end, + ClientFun = new_client_fun('GET', [#treq{path = "*", xreply = <<"*">>}]), + [{atom_to_list(Transport), + ?_assertEqual(ok, with_server(Transport, ServerFun, ClientFun))} + || Transport <- [ssl, plain]]. + + +cookie_header_test() -> + ReplyPrefix = "You requested: ", + ExHeaders = [{"Set-Cookie", "foo=bar"}, + {"Set-Cookie", "foo=baz"}], + ServerFun = fun (Req) -> + Reply = ReplyPrefix ++ Req:get(path), + Req:ok({"text/plain", ExHeaders, Reply}) + end, + Path = "cookie_header", + ExpectedReply = list_to_binary(ReplyPrefix ++ Path), + TestReqs = [#treq{path=Path, xreply=ExpectedReply, xheaders=ExHeaders}], + ClientFun = new_client_fun('GET', TestReqs), + ok = with_server(plain, ServerFun, ClientFun), + ok. + + +do_CONNECT(Transport, Times) -> + PathPrefix = "example.com:", + ReplyPrefix = "You requested: ", + ServerFun = fun (Req) -> + Reply = ReplyPrefix ++ Req:get(path), + Req:ok({"text/plain", Reply}) + end, + TestReqs = [begin + Path = PathPrefix ++ integer_to_list(N), + ExpectedReply = list_to_binary(ReplyPrefix ++ Path), + #treq{path=Path, xreply=ExpectedReply} + end || N <- lists:seq(1, Times)], + ClientFun = new_client_fun('CONNECT', TestReqs), + ok = with_server(Transport, ServerFun, ClientFun), + ok. + +do_GET(Transport, Times) -> + do_GET("/whatever/", Transport, Times). + +do_GET(PathPrefix, Transport, Times) -> + ReplyPrefix = "You requested: ", + ServerFun = fun (Req) -> + Reply = ReplyPrefix ++ Req:get(path), + Req:ok({"text/plain", Reply}) + end, + TestReqs = [begin + Path = PathPrefix ++ integer_to_list(N), + ExpectedReply = list_to_binary(ReplyPrefix ++ Path), + #treq{path=Path, xreply=ExpectedReply} + end || N <- lists:seq(1, Times)], + ClientFun = new_client_fun('GET', TestReqs), + ok = with_server(Transport, ServerFun, ClientFun), + ok. + +do_POST(Transport, Size, Times) -> + ServerFun = fun (Req) -> + Body = Req:recv_body(), + Headers = [{"Content-Type", "application/octet-stream"}], + Req:respond({201, Headers, Body}) + end, + TestReqs = [begin + Path = "/stuff/" ++ integer_to_list(N), + Body = crypto:rand_bytes(Size), + #treq{path=Path, body=Body, xreply=Body} + end || N <- lists:seq(1, Times)], + ClientFun = new_client_fun('POST', TestReqs), + ok = with_server(Transport, ServerFun, ClientFun), + ok. + +new_client_fun(Method, TestReqs) -> + fun (Transport, Port) -> + mochiweb_test_util:client_request(Transport, Port, Method, TestReqs) + end. + +close_on_unread_data_test() -> + ok = with_server( + plain, + fun mochiweb_request:not_found/1, + fun close_on_unread_data_client/2). + +close_on_unread_data_client(Transport, Port) -> + SockFun = mochiweb_test_util:sock_fun(Transport, Port), + %% A normal GET request should not trigger this behavior + Request0 = string:join( + ["GET / HTTP/1.1", + "Host: localhost", + "", + ""], + "\r\n"), + ok = SockFun({setopts, [{packet, http}]}), + ok = SockFun({send, Request0}), + ?assertMatch( + {ok, {http_response, {1, 1}, 404, _}}, + SockFun(recv)), + Headers0 = mochiweb_test_util:read_server_headers(SockFun), + ?assertEqual( + undefined, + mochiweb_headers:get_value("Connection", Headers0)), + Len0 = list_to_integer( + mochiweb_headers:get_value("Content-Length", Headers0)), + _Body0 = mochiweb_test_util:drain_reply(SockFun, Len0, <<>>), + %% Re-use same socket + Request = string:join( + ["POST / HTTP/1.1", + "Host: localhost", + "Content-Type: application/json", + "Content-Length: 2", + "", + "{}"], + "\r\n"), + ok = SockFun({setopts, [{packet, http}]}), + ok = SockFun({send, Request}), + ?assertMatch( + {ok, {http_response, {1, 1}, 404, _}}, + SockFun(recv)), + Headers = mochiweb_test_util:read_server_headers(SockFun), + %% Expect to see a Connection: close header when we know the + %% server will close the connection re #146 + ?assertEqual( + "close", + mochiweb_headers:get_value("Connection", Headers)), + Len = list_to_integer(mochiweb_headers:get_value("Content-Length", Headers)), + _Body = mochiweb_test_util:drain_reply(SockFun, Len, <<>>), + ?assertEqual({error, closed}, SockFun(recv)), + ok. diff --git a/rabbitmq-server/deps/mochiweb/test/mochiweb_websocket_tests.erl b/rabbitmq-server/deps/mochiweb/test/mochiweb_websocket_tests.erl new file mode 100644 index 0000000..eb8de5b --- /dev/null +++ b/rabbitmq-server/deps/mochiweb/test/mochiweb_websocket_tests.erl @@ -0,0 +1,160 @@ +-module(mochiweb_websocket_tests). +-author('lukasz.lalik@zadane.pl'). + +%% The MIT License (MIT) + +%% Copyright (c) 2012 Zadane.pl sp. z o.o. + +%% Permission is hereby granted, free of charge, to any person obtaining a copy +%% of this software and associated documentation files (the "Software"), to deal +%% in the Software without restriction, including without limitation the rights +%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +%% copies of the Software, and to permit persons to whom the Software is +%% furnished to do so, subject to the following conditions: + +%% The above copyright notice and this permission notice shall be included in +%% all copies or substantial portions of the Software. + +%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +%% THE SOFTWARE. + +-include_lib("eunit/include/eunit.hrl"). + +make_handshake_for_correct_client_test() -> + %% Hybi handshake + Req1 = mochiweb_request:new( + nil, 'GET', "/foo", {1, 1}, + mochiweb_headers:make([{"Sec-WebSocket-Key", + "Xn3fdKyc3qEXPuj2A3O+ZA=="}])), + + {Version1, + {HttpCode1, Headers1, _}} = mochiweb_websocket:make_handshake(Req1), + ?assertEqual(hybi, Version1), + ?assertEqual(101, HttpCode1), + ?assertEqual("Upgrade", proplists:get_value("Connection", Headers1)), + ?assertEqual(<<"BIFTHkJk4r5t8kuud82tZJaQsCE=">>, + proplists:get_value("Sec-Websocket-Accept", Headers1)), + + %% Hixie handshake + {Version2, {HttpCode2, Headers2, Body2}} = + mochiweb_websocket:hixie_handshake( + "ws://", + "localhost", "/", + "33j284 9 z63 e 9 7", + "TF'3|6D12659H 7 70", + <<175,181,191,215,128,195,144,120>>, + "null"), + ?assertEqual(hixie, Version2), + ?assertEqual(101, HttpCode2), + ?assertEqual("null", proplists:get_value("Sec-WebSocket-Origin", Headers2)), + ?assertEqual("ws://localhost/", + proplists:get_value("Sec-WebSocket-Location", Headers2)), + ?assertEqual( + <<230,144,237,94,84,214,41,69,244,150,134,167,221,103,239,246>>, + Body2). + +hybi_frames_decode_test() -> + ?assertEqual( + [{1, <<"foo">>}], + mochiweb_websocket:parse_hybi_frames( + nil, <<129,131,118,21,153,58,16,122,246>>, [])), + ?assertEqual( + [{1, <<"foo">>}, {1, <<"bar">>}], + mochiweb_websocket:parse_hybi_frames( + nil, + <<129,131,1,225,201,42,103,142,166,129,131,93,222,214,66,63,191,164>>, + [])). + +hixie_frames_decode_test() -> + ?assertEqual( + [], + mochiweb_websocket:parse_hixie_frames(<<>>, [])), + ?assertEqual( + [<<"foo">>], + mochiweb_websocket:parse_hixie_frames(<<0,102,111,111,255>>, [])), + ?assertEqual( + [<<"foo">>, <<"bar">>], + mochiweb_websocket:parse_hixie_frames( + <<0,102,111,111,255,0,98,97,114,255>>, + [])). + +end_to_end_test_factory(ServerTransport) -> + mochiweb_test_util:with_server( + ServerTransport, + fun end_to_end_server/1, + fun (Transport, Port) -> + end_to_end_client(mochiweb_test_util:sock_fun(Transport, Port)) + end). + +end_to_end_server(Req) -> + ?assertEqual("Upgrade", Req:get_header_value("connection")), + ?assertEqual("websocket", Req:get_header_value("upgrade")), + {ReentryWs, _ReplyChannel} = mochiweb_websocket:upgrade_connection( + Req, + fun end_to_end_ws_loop/3), + ReentryWs(ok). + +end_to_end_ws_loop(Payload, State, ReplyChannel) -> + %% Echo server + lists:foreach(ReplyChannel, Payload), + State. + +end_to_end_client(S) -> + %% Key and Accept per https://tools.ietf.org/html/rfc6455 + UpgradeReq = string:join( + ["GET / HTTP/1.1", + "Host: localhost", + "Upgrade: websocket", + "Connection: Upgrade", + "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==", + "", + ""], "\r\n"), + ok = S({send, UpgradeReq}), + {ok, {http_response, {1,1}, 101, _}} = S(recv), + read_expected_headers( + S, + [{'Upgrade', "websocket"}, + {'Connection', "Upgrade"}, + {'Content-Length', "0"}, + {"Sec-Websocket-Accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="}]), + %% The first message sent over telegraph :) + SmallMessage = <<"What hath God wrought?">>, + ok = S({send, + << 1:1, %% Fin + 0:1, %% Rsv1 + 0:1, %% Rsv2 + 0:1, %% Rsv3 + 2:4, %% Opcode, 1 = text frame + 1:1, %% Mask on + (byte_size(SmallMessage)):7, %% Length, <125 case + 0:32, %% Mask (trivial) + SmallMessage/binary >>}), + {ok, WsFrames} = S(recv), + << 1:1, %% Fin + 0:1, %% Rsv1 + 0:1, %% Rsv2 + 0:1, %% Rsv3 + 1:4, %% Opcode, text frame (all mochiweb suports for now) + MsgSize:8, %% Expecting small size + SmallMessage/binary >> = WsFrames, + ?assertEqual(MsgSize, byte_size(SmallMessage)), + ok. + +read_expected_headers(S, D) -> + Headers = mochiweb_test_util:read_server_headers(S), + lists:foreach( + fun ({K, V}) -> + ?assertEqual(V, mochiweb_headers:get_value(K, Headers)) + end, + D). + +end_to_end_http_test() -> + end_to_end_test_factory(plain). + +end_to_end_https_test() -> + end_to_end_test_factory(ssl). diff --git a/rabbitmq-server/deps/rabbit_common/LICENSE b/rabbitmq-server/deps/rabbit_common/LICENSE new file mode 100644 index 0000000..b086024 --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/LICENSE @@ -0,0 +1,11 @@ +This package, the RabbitMQ Management Plugin is licensed under the MPL. For the +MPL, please see LICENSE-MPL-RabbitMQ. + +This package makes use of the following third party libraries: +jQuery - http://jquery.com/ - MIT license, see LICENSE-MIT-jQuery164 +EJS - http://embeddedjs.com/ - MIT license, see LICENSE-MIT-EJS10 +Sammy - http://code.quirkey.com/sammy/ - MIT license, see LICENSE-MIT-Sammy060 +Cowboy - http://ninenines.eu/ - ISC license +base64.js - http://code.google.com/p/stringencoders/ - BSD license, see LICENSE-BSD-base64js +If you have any questions regarding licensing, please contact us at +info@rabbitmq.com. diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MPL-RabbitMQ b/rabbitmq-server/deps/rabbit_common/LICENSE-MPL-RabbitMQ similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MPL-RabbitMQ rename to rabbitmq-server/deps/rabbit_common/LICENSE-MPL-RabbitMQ index 0339c53..e163fcc 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MPL-RabbitMQ +++ b/rabbitmq-server/deps/rabbit_common/LICENSE-MPL-RabbitMQ @@ -447,7 +447,7 @@ EXHIBIT A -Mozilla Public License. The Original Code is RabbitMQ Management Plugin. The Initial Developer of the Original Code is GoPivotal, Inc. - Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.'' + Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved.'' [NOTE: The text of this Exhibit A may differ slightly from the text of the notices in the Source Code files of the Original Code. You should diff --git a/rabbitmq-server/deps/rabbit_common/Makefile b/rabbitmq-server/deps/rabbit_common/Makefile new file mode 100644 index 0000000..44b9566 --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/Makefile @@ -0,0 +1,71 @@ +PROJECT = rabbit_common + +BUILD_DEPS = rabbitmq_codegen +TEST_DEPS = mochiweb + +.DEFAULT_GOAL = all + +EXTRA_SOURCES += include/rabbit_framing.hrl \ + src/rabbit_framing_amqp_0_8.erl \ + src/rabbit_framing_amqp_0_9_1.erl + +.DEFAULT_GOAL = all +$(PROJECT).d:: $(EXTRA_SOURCES) + +# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be +# reviewed and merged. + +ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git +ERLANG_MK_COMMIT = rabbitmq-tmp + +include mk/rabbitmq-components.mk +include erlang.mk +include mk/rabbitmq-dist.mk + +# -------------------------------------------------------------------- +# Compilation. +# -------------------------------------------------------------------- + +ERTS_VER := $(shell erl -version 2>&1 | sed -E 's/.* version //') +tls_atom_version_MAX_ERTS_VER = 6.0 +ifeq ($(call compare_version,$(ERTS_VER),$(tls_atom_version_MAX_ERTS_VER),<),true) +RMQ_ERLC_OPTS += -Ddefine_tls_atom_version +endif + +ERLC_OPTS += $(RMQ_ERLC_OPTS) + +TEST_ERLC_OPTS += $(RMQ_ERLC_OPTS) + +# -------------------------------------------------------------------- +# Framing sources generation. +# -------------------------------------------------------------------- + +PYTHON ?= python +CODEGEN = $(CURDIR)/codegen.py +CODEGEN_DIR ?= $(DEPS_DIR)/rabbitmq_codegen +CODEGEN_AMQP = $(CODEGEN_DIR)/amqp_codegen.py + +AMQP_SPEC_JSON_FILES_0_8 = $(CODEGEN_DIR)/amqp-rabbitmq-0.8.json +AMQP_SPEC_JSON_FILES_0_9_1 = $(CODEGEN_DIR)/amqp-rabbitmq-0.9.1.json \ + $(CODEGEN_DIR)/credit_extension.json + +include/rabbit_framing.hrl:: $(CODEGEN) $(CODEGEN_AMQP) \ + $(AMQP_SPEC_JSON_FILES_0_9_1) $(AMQP_SPEC_JSON_FILES_0_8) + $(gen_verbose) env PYTHONPATH=$(CODEGEN_DIR) \ + $(PYTHON) $(CODEGEN) --ignore-conflicts header \ + $(AMQP_SPEC_JSON_FILES_0_9_1) $(AMQP_SPEC_JSON_FILES_0_8) $@ + +src/rabbit_framing_amqp_0_9_1.erl:: $(CODEGEN) $(CODEGEN_AMQP) \ + $(AMQP_SPEC_JSON_FILES_0_9_1) + $(gen_verbose) env PYTHONPATH=$(CODEGEN_DIR) \ + $(PYTHON) $(CODEGEN) body $(AMQP_SPEC_JSON_FILES_0_9_1) $@ + +src/rabbit_framing_amqp_0_8.erl:: $(CODEGEN) $(CODEGEN_AMQP) \ + $(AMQP_SPEC_JSON_FILES_0_8) + $(gen_verbose) env PYTHONPATH=$(CODEGEN_DIR) \ + $(PYTHON) $(CODEGEN) body $(AMQP_SPEC_JSON_FILES_0_8) $@ + +clean:: clean-extra-sources + +clean-extra-sources: + $(gen_verbose) rm -f $(EXTRA_SOURCES) diff --git a/rabbitmq-server/deps/rabbit_common/build.config b/rabbitmq-server/deps/rabbit_common/build.config new file mode 100644 index 0000000..b143068 --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/build.config @@ -0,0 +1,43 @@ +# Do *not* comment or remove core modules +# unless you know what you are doing. +# +# Feel free to comment plugins out however. + +# Core modules. +core/core +index/* +core/index +core/deps + +# Plugins that must run before Erlang code gets compiled. +plugins/erlydtl +plugins/protobuffs + +# Core modules, continued. +core/erlc +core/docs +core/rel +core/test +core/compat + +# Plugins. +plugins/asciidoc +plugins/bootstrap +plugins/c_src +plugins/ci +plugins/ct +plugins/dialyzer +# plugins/edoc +plugins/elvis +plugins/escript +plugins/eunit +plugins/relx +plugins/shell +plugins/triq +plugins/xref + +# Plugins enhancing the functionality of other plugins. +plugins/cover + +# Core modules which can use variables from plugins. +core/deps-tools diff --git a/rabbitmq-server/codegen.py b/rabbitmq-server/deps/rabbit_common/codegen.py old mode 100644 new mode 100755 similarity index 72% rename from rabbitmq-server/codegen.py rename to rabbitmq-server/deps/rabbit_common/codegen.py index fbc6f61..e2468c2 --- a/rabbitmq-server/codegen.py +++ b/rabbitmq-server/deps/rabbit_common/codegen.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python + ## The contents of this file are subject to the Mozilla Public License ## Version 1.1 (the "License"); you may not use this file except in ## compliance with the License. You may obtain a copy of the License @@ -15,10 +17,9 @@ ## from __future__ import nested_scopes +from __future__ import print_function import sys -sys.path.append("../rabbitmq-codegen") # in case we're next to an experimental revision -sys.path.append("codegen") # in case we're building from a distribution package from amqp_codegen import * import string @@ -35,13 +36,19 @@ def convertTable(d): erlangDefaultValueTypeConvMap = { bool : lambda x: str(x).lower(), - str : lambda x: "<<\"" + x + "\">>", int : lambda x: str(x), float : lambda x: str(x), - dict: convertTable, - unicode: lambda x: "<<\"" + x.encode("utf-8") + "\">>" + dict: convertTable } +try: + _checkIfPython2 = unicode + erlangDefaultValueTypeConvMap[str] = lambda x: "<<\"" + x + "\">>" + erlangDefaultValueTypeConvMap[unicode] = lambda x: "<<\"" + x.encode("utf-8") + "\">>" +except NameError: + erlangDefaultValueTypeConvMap[bytes] = lambda x: "<<\"" + x + "\">>" + erlangDefaultValueTypeConvMap[str] = lambda x: "<<\"" + x + "\">>" + def erlangize(s): s = s.replace('-', '_') s = s.replace(' ', '_') @@ -91,7 +98,7 @@ def prettyType(typeName, subTypes, typesPerLine = 4): return "-type(%s ::\n %s)." % (typeName, sTs) def printFileHeader(): - print """%% Autogenerated code. Do not edit. + print("""%% Autogenerated code. Do not edit. %% %% The contents of this file are subject to the Mozilla Public License %% Version 1.1 (the "License"); you may not use this file except in @@ -107,7 +114,7 @@ def printFileHeader(): %% %% The Initial Developer of the Original Code is Pivotal Software, Inc. %% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. -%%""" +%%""") def genErl(spec): def erlType(domain): @@ -126,30 +133,30 @@ def genErl(spec): return ', '.join([erlangize(f.name) + " = F" + str(f.index) for f in fields]) def genLookupMethodName(m): - print "lookup_method_name({%d, %d}) -> %s;" % (m.klass.index, m.index, m.erlangName()) + print("lookup_method_name({%d, %d}) -> %s;" % (m.klass.index, m.index, m.erlangName())) def genLookupClassName(c): - print "lookup_class_name(%d) -> %s;" % (c.index, c.erlangName()) + print("lookup_class_name(%d) -> %s;" % (c.index, c.erlangName())) def genMethodId(m): - print "method_id(%s) -> {%d, %d};" % (m.erlangName(), m.klass.index, m.index) + print("method_id(%s) -> {%d, %d};" % (m.erlangName(), m.klass.index, m.index)) def genMethodHasContent(m): - print "method_has_content(%s) -> %s;" % (m.erlangName(), str(m.hasContent).lower()) + print("method_has_content(%s) -> %s;" % (m.erlangName(), str(m.hasContent).lower())) def genMethodIsSynchronous(m): hasNoWait = "nowait" in fieldNameList(m.arguments) if m.isSynchronous and hasNoWait: - print "is_method_synchronous(#%s{nowait = NoWait}) -> not(NoWait);" % (m.erlangName()) + print("is_method_synchronous(#%s{nowait = NoWait}) -> not(NoWait);" % (m.erlangName())) else: - print "is_method_synchronous(#%s{}) -> %s;" % (m.erlangName(), str(m.isSynchronous).lower()) + print("is_method_synchronous(#%s{}) -> %s;" % (m.erlangName(), str(m.isSynchronous).lower())) def genMethodFieldTypes(m): """Not currently used - may be useful in future?""" - print "method_fieldtypes(%s) -> %s;" % (m.erlangName(), fieldTypeList(m.arguments)) + print("method_fieldtypes(%s) -> %s;" % (m.erlangName(), fieldTypeList(m.arguments))) def genMethodFieldNames(m): - print "method_fieldnames(%s) -> %s;" % (m.erlangName(), fieldNameList(m.arguments)) + print("method_fieldnames(%s) -> %s;" % (m.erlangName(), fieldNameList(m.arguments))) def packMethodFields(fields): packed = [] @@ -192,22 +199,22 @@ def genErl(spec): type = erlType(f.domain) if type == 'bit': for index in range(f.count()): - print " F%d = ((F%dBits band %d) /= 0)," % \ + print(" F%d = ((F%dBits band %d) /= 0)," % \ (f.index + index, f.index, - 1 << index) + 1 << index)) elif type == 'table': - print " F%d = rabbit_binary_parser:parse_table(F%dTab)," % \ - (f.index, f.index) + print(" F%d = rabbit_binary_parser:parse_table(F%dTab)," % \ + (f.index, f.index)) # We skip the check on content-bearing methods for # speed. This is a sanity check, not a security thing. elif type == 'shortstr' and not hasContent: - print " rabbit_binary_parser:assert_utf8(F%d)," % (f.index) + print(" rabbit_binary_parser:assert_utf8(F%d)," % (f.index)) else: pass def genMethodRecord(m): - print "method_record(%s) -> #%s{};" % (m.erlangName(), m.erlangName()) + print("method_record(%s) -> #%s{};" % (m.erlangName(), m.erlangName())) def genDecodeMethodFields(m): packedFields = packMethodFields(m.arguments) @@ -217,9 +224,9 @@ def genErl(spec): else: restSeparator = '' recordConstructorExpr = '#%s{%s}' % (m.erlangName(), fieldMapList(m.arguments)) - print "decode_method_fields(%s, <<%s>>) ->" % (m.erlangName(), binaryPattern) + print("decode_method_fields(%s, <<%s>>) ->" % (m.erlangName(), binaryPattern)) genFieldPostprocessing(packedFields, m.hasContent) - print " %s;" % (recordConstructorExpr,) + print(" %s;" % (recordConstructorExpr,)) def genDecodeProperties(c): def presentBin(fields): @@ -228,45 +235,45 @@ def genErl(spec): def writePropFieldLine(field): i = str(field.index) if field.domain == 'bit': - print " {F%s, R%s} = {P%s =/= 0, R%s}," % \ - (i, str(field.index + 1), i, i) + print(" {F%s, R%s} = {P%s =/= 0, R%s}," % \ + (i, str(field.index + 1), i, i)) else: - print " {F%s, R%s} = if P%s =:= 0 -> {undefined, R%s}; true -> ?%s_VAL(R%s, L%s, V%s, X%s) end," % \ - (i, str(field.index + 1), i, i, erlType(field.domain).upper(), i, i, i, i) + print(" {F%s, R%s} = if P%s =:= 0 -> {undefined, R%s}; true -> ?%s_VAL(R%s, L%s, V%s, X%s) end," % \ + (i, str(field.index + 1), i, i, erlType(field.domain).upper(), i, i, i, i)) if len(c.fields) == 0: - print "decode_properties(%d, <<>>) ->" % (c.index,) + print("decode_properties(%d, <<>>) ->" % (c.index,)) else: - print ("decode_properties(%d, %s) ->" % - (c.index, presentBin(c.fields))) + print(("decode_properties(%d, %s) ->" % + (c.index, presentBin(c.fields)))) for field in c.fields: writePropFieldLine(field) - print " <<>> = %s," % ('R' + str(len(c.fields))) - print " #'P_%s'{%s};" % (erlangize(c.name), fieldMapList(c.fields)) + print(" <<>> = %s," % ('R' + str(len(c.fields)))) + print(" #'P_%s'{%s};" % (erlangize(c.name), fieldMapList(c.fields))) def genFieldPreprocessing(packed): for f in packed: type = erlType(f.domain) if type == 'bit': - print " F%dBits = (%s)," % \ + print(" F%dBits = (%s)," % \ (f.index, ' bor '.join(['(bitvalue(F%d) bsl %d)' % (x.index, x.index - f.index) - for x in f.contents])) + for x in f.contents]))) elif type == 'table': - print " F%dTab = rabbit_binary_generator:generate_table(F%d)," % (f.index, f.index) - print " F%dLen = size(F%dTab)," % (f.index, f.index) + print(" F%dTab = rabbit_binary_generator:generate_table(F%d)," % (f.index, f.index)) + print(" F%dLen = size(F%dTab)," % (f.index, f.index)) elif type == 'shortstr': - print " F%dLen = shortstr_size(F%d)," % (f.index, f.index) + print(" F%dLen = shortstr_size(F%d)," % (f.index, f.index)) elif type == 'longstr': - print " F%dLen = size(F%d)," % (f.index, f.index) + print(" F%dLen = size(F%d)," % (f.index, f.index)) else: pass def genEncodeMethodFields(m): packedFields = packMethodFields(m.arguments) - print "encode_method_fields(#%s{%s}) ->" % (m.erlangName(), fieldMapList(m.arguments)) + print("encode_method_fields(#%s{%s}) ->" % (m.erlangName(), fieldMapList(m.arguments))) genFieldPreprocessing(packedFields) - print " <<%s>>;" % (', '.join([methodFieldFragment(f) for f in packedFields])) + print(" <<%s>>;" % (', '.join([methodFieldFragment(f) for f in packedFields]))) def genEncodeProperties(c): def presentBin(fields): @@ -275,21 +282,21 @@ def genErl(spec): def writePropFieldLine(field): i = str(field.index) if field.domain == 'bit': - print " {P%s, R%s} = {F%s =:= 1, R%s}," % \ - (i, str(field.index + 1), i, i) + print(" {P%s, R%s} = {F%s =:= 1, R%s}," % \ + (i, str(field.index + 1), i, i)) else: - print " {P%s, R%s} = if F%s =:= undefined -> {0, R%s}; true -> {1, [?%s_PROP(F%s, L%s) | R%s]} end," % \ - (i, str(field.index + 1), i, i, erlType(field.domain).upper(), i, i, i) + print(" {P%s, R%s} = if F%s =:= undefined -> {0, R%s}; true -> {1, [?%s_PROP(F%s, L%s) | R%s]} end," % \ + (i, str(field.index + 1), i, i, erlType(field.domain).upper(), i, i, i)) - print "encode_properties(#'P_%s'{%s}) ->" % (erlangize(c.name), fieldMapList(c.fields)) + print("encode_properties(#'P_%s'{%s}) ->" % (erlangize(c.name), fieldMapList(c.fields))) if len(c.fields) == 0: - print " <<>>;" + print(" <<>>;") else: - print " R0 = [<<>>]," + print(" R0 = [<<>>],") for field in c.fields: writePropFieldLine(field) - print " list_to_binary([%s | lists:reverse(R%s)]);" % \ - (presentBin(c.fields), str(len(c.fields))) + print(" list_to_binary([%s | lists:reverse(R%s)]);" % \ + (presentBin(c.fields), str(len(c.fields)))) def messageConstantClass(cls): # We do this because 0.8 uses "soft error" and 8.1 uses "soft-error". @@ -304,13 +311,13 @@ def genErl(spec): def genLookupException1(c,hardErrorBoolStr): n = erlangConstantName(c) - print 'lookup_amqp_exception(%s) -> {%s, ?%s, <<"%s">>};' % \ - (n.lower(), hardErrorBoolStr, n, n) + print('lookup_amqp_exception(%s) -> {%s, ?%s, <<"%s">>};' % \ + (n.lower(), hardErrorBoolStr, n, n)) def genAmqpException(c,v,cls): n = erlangConstantName(c) - print 'amqp_exception(?%s) -> %s;' % \ - (n, n.lower()) + print('amqp_exception(?%s) -> %s;' % \ + (n, n.lower())) methods = spec.allMethods() @@ -320,8 +327,8 @@ def genErl(spec): module = "%s_%d" % (module, spec.revision) if module == "rabbit_framing_amqp_8_0": module = "rabbit_framing_amqp_0_8" - print "-module(%s)." % module - print """-include("rabbit_framing.hrl"). + print("-module(%s)." % module) + print("""-include("rabbit_framing.hrl"). -export([version/0]). -export([lookup_method_name/1]). @@ -339,11 +346,11 @@ def genErl(spec): -export([lookup_amqp_exception/1]). -export([amqp_exception/1]). -""" - print "%% Various types" - print "-ifdef(use_specs)." +""") + print("%% Various types") + print("-ifdef(use_specs).") - print """-export_type([amqp_field_type/0, amqp_property_type/0, + print("""-export_type([amqp_field_type/0, amqp_property_type/0, amqp_table/0, amqp_array/0, amqp_value/0, amqp_method_name/0, amqp_method/0, amqp_method_record/0, amqp_method_field_name/0, amqp_property_record/0, @@ -351,6 +358,7 @@ def genErl(spec): -type(amqp_field_type() :: 'longstr' | 'signedint' | 'decimal' | 'timestamp' | + 'unsignedbyte' | 'unsignedshort' | 'unsignedint' | 'table' | 'byte' | 'double' | 'float' | 'long' | 'short' | 'bool' | 'binary' | 'void' | 'array'). -type(amqp_property_type() :: @@ -373,37 +381,38 @@ def genErl(spec): 'undefined' | % void non_neg_integer() % timestamp ). -""" +""") - print prettyType("amqp_method_name()", - [m.erlangName() for m in methods]) - print prettyType("amqp_method()", + print(prettyType("amqp_method_name()", + [m.erlangName() for m in methods])) + print(prettyType("amqp_method()", ["{%s, %s}" % (m.klass.index, m.index) for m in methods], - 6) - print prettyType("amqp_method_record()", - ["#%s{}" % (m.erlangName()) for m in methods]) + 6)) + print(prettyType("amqp_method_record()", + ["#%s{}" % (m.erlangName()) for m in methods])) fieldNames = set() for m in methods: - fieldNames.update(m.arguments) - fieldNames = [erlangize(f.name) for f in fieldNames] - print prettyType("amqp_method_field_name()", - fieldNames) - print prettyType("amqp_property_record()", - ["#'P_%s'{}" % erlangize(c.name) for c in spec.allClasses()]) - print prettyType("amqp_exception()", - ["'%s'" % erlangConstantName(c).lower() for (c, v, cls) in spec.constants]) - print prettyType("amqp_exception_code()", - ["%i" % v for (c, v, cls) in spec.constants]) + fieldNames.update([erlangize(f.name) for f in m.arguments]) + fieldNames = [f for f in fieldNames] + fieldNames.sort() + print(prettyType("amqp_method_field_name()", + fieldNames)) + print(prettyType("amqp_property_record()", + ["#'P_%s'{}" % erlangize(c.name) for c in spec.allClasses()])) + print(prettyType("amqp_exception()", + ["'%s'" % erlangConstantName(c).lower() for (c, v, cls) in spec.constants])) + print(prettyType("amqp_exception_code()", + ["%i" % v for (c, v, cls) in spec.constants])) classIds = set() for m in spec.allMethods(): classIds.add(m.klass.index) - print prettyType("amqp_class_id()", - ["%i" % ci for ci in classIds]) - print prettyType("amqp_class_name()", - ["%s" % c.erlangName() for c in spec.allClasses()]) - print "-endif. % use_specs" + print(prettyType("amqp_class_id()", + ["%i" % ci for ci in classIds])) + print(prettyType("amqp_class_name()", + ["%s" % c.erlangName() for c in spec.allClasses()])) + print("-endif. % use_specs") - print """ + print(""" %% Method signatures -ifdef(use_specs). -spec(version/0 :: () -> {non_neg_integer(), non_neg_integer(), non_neg_integer()}). @@ -506,52 +515,52 @@ shortstr_size(S) -> T = rabbit_binary_generator:generate_table(X), <<(size(T)):32, T/binary>> end). -""" +""") version = "{%d, %d, %d}" % (spec.major, spec.minor, spec.revision) if version == '{8, 0, 0}': version = '{0, 8, 0}' - print "version() -> %s." % (version) + print("version() -> %s." % (version)) for m in methods: genLookupMethodName(m) - print "lookup_method_name({_ClassId, _MethodId} = Id) -> exit({unknown_method_id, Id})." + print("lookup_method_name({_ClassId, _MethodId} = Id) -> exit({unknown_method_id, Id}).") for c in spec.allClasses(): genLookupClassName(c) - print "lookup_class_name(ClassId) -> exit({unknown_class_id, ClassId})." + print("lookup_class_name(ClassId) -> exit({unknown_class_id, ClassId}).") for m in methods: genMethodId(m) - print "method_id(Name) -> exit({unknown_method_name, Name})." + print("method_id(Name) -> exit({unknown_method_name, Name}).") for m in methods: genMethodHasContent(m) - print "method_has_content(Name) -> exit({unknown_method_name, Name})." + print("method_has_content(Name) -> exit({unknown_method_name, Name}).") for m in methods: genMethodIsSynchronous(m) - print "is_method_synchronous(Name) -> exit({unknown_method_name, Name})." + print("is_method_synchronous(Name) -> exit({unknown_method_name, Name}).") for m in methods: genMethodRecord(m) - print "method_record(Name) -> exit({unknown_method_name, Name})." + print("method_record(Name) -> exit({unknown_method_name, Name}).") for m in methods: genMethodFieldNames(m) - print "method_fieldnames(Name) -> exit({unknown_method_name, Name})." + print("method_fieldnames(Name) -> exit({unknown_method_name, Name}).") for m in methods: genDecodeMethodFields(m) - print "decode_method_fields(Name, BinaryFields) ->" - print " rabbit_misc:frame_error(Name, BinaryFields)." + print("decode_method_fields(Name, BinaryFields) ->") + print(" rabbit_misc:frame_error(Name, BinaryFields).") for c in spec.allClasses(): genDecodeProperties(c) - print "decode_properties(ClassId, _BinaryFields) -> exit({unknown_class_id, ClassId})." + print("decode_properties(ClassId, _BinaryFields) -> exit({unknown_class_id, ClassId}).") for m in methods: genEncodeMethodFields(m) - print "encode_method_fields(Record) -> exit({unknown_method_name, element(1, Record)})." + print("encode_method_fields(Record) -> exit({unknown_method_name, element(1, Record)}).") for c in spec.allClasses(): genEncodeProperties(c) - print "encode_properties(Record) -> exit({unknown_properties_record, Record})." + print("encode_properties(Record) -> exit({unknown_properties_record, Record}).") for (c,v,cls) in spec.constants: genLookupException(c,v,cls) - print "lookup_amqp_exception(Code) ->" - print " rabbit_log:warning(\"Unknown AMQP error code '~p'~n\", [Code])," - print " {true, ?INTERNAL_ERROR, <<\"INTERNAL_ERROR\">>}." + print("lookup_amqp_exception(Code) ->") + print(" rabbit_log:warning(\"Unknown AMQP error code '~p'~n\", [Code]),") + print(" {true, ?INTERNAL_ERROR, <<\"INTERNAL_ERROR\">>}.") for(c,v,cls) in spec.constants: genAmqpException(c,v,cls) - print "amqp_exception(_Code) -> undefined." + print("amqp_exception(_Code) -> undefined.") def genHrl(spec): def fieldNameList(fields): @@ -559,7 +568,7 @@ def genHrl(spec): def fieldNameListDefaults(fields): def fillField(field): - result = erlangize(f.name) + result = erlangize(field.name) if field.defaultvalue != None: conv_fn = erlangDefaultValueTypeConvMap[type(field.defaultvalue)] result += ' = ' + conv_fn(field.defaultvalue) @@ -569,18 +578,18 @@ def genHrl(spec): methods = spec.allMethods() printFileHeader() - print "-define(PROTOCOL_PORT, %d)." % (spec.port) + print("-define(PROTOCOL_PORT, %d)." % (spec.port)) for (c,v,cls) in spec.constants: - print "-define(%s, %s)." % (erlangConstantName(c), v) + print("-define(%s, %s)." % (erlangConstantName(c), v)) - print "%% Method field records." + print("%% Method field records.") for m in methods: - print "-record(%s, {%s})." % (m.erlangName(), fieldNameListDefaults(m.arguments)) + print("-record(%s, {%s})." % (m.erlangName(), fieldNameListDefaults(m.arguments))) - print "%% Class property records." + print("%% Class property records.") for c in spec.allClasses(): - print "-record('P_%s', {%s})." % (erlangize(c.name), fieldNameList(c.fields)) + print("-record('P_%s', {%s})." % (erlangize(c.name), fieldNameList(c.fields))) def generateErl(specPath): diff --git a/rabbitmq-server/deps/rabbit_common/erlang.mk b/rabbitmq-server/deps/rabbit_common/erlang.mk new file mode 100644 index 0000000..fc2d806 --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/erlang.mk @@ -0,0 +1,6617 @@ +# Copyright (c) 2013-2015, Loïc Hoguin +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +.PHONY: all app deps search rel docs install-docs check tests clean distclean help erlang-mk + +ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) + +ERLANG_MK_VERSION = 2.0.0-pre.2-16-gb52203c-dirty + +# Core configuration. + +PROJECT ?= $(notdir $(CURDIR)) +PROJECT := $(strip $(PROJECT)) + +PROJECT_VERSION ?= rolling + +# Verbosity. + +V ?= 0 + +verbose_0 = @ +verbose_2 = set -x; +verbose = $(verbose_$(V)) + +gen_verbose_0 = @echo " GEN " $@; +gen_verbose_2 = set -x; +gen_verbose = $(gen_verbose_$(V)) + +# Temporary files directory. + +ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk +export ERLANG_MK_TMP + +# "erl" command. + +ERL = erl +A0 -noinput -boot start_clean + +# Platform detection. + +ifeq ($(PLATFORM),) +UNAME_S := $(shell uname -s) + +ifeq ($(UNAME_S),Linux) +PLATFORM = linux +else ifeq ($(UNAME_S),Darwin) +PLATFORM = darwin +else ifeq ($(UNAME_S),SunOS) +PLATFORM = solaris +else ifeq ($(UNAME_S),GNU) +PLATFORM = gnu +else ifeq ($(UNAME_S),FreeBSD) +PLATFORM = freebsd +else ifeq ($(UNAME_S),NetBSD) +PLATFORM = netbsd +else ifeq ($(UNAME_S),OpenBSD) +PLATFORM = openbsd +else ifeq ($(UNAME_S),DragonFly) +PLATFORM = dragonfly +else ifeq ($(shell uname -o),Msys) +PLATFORM = msys2 +else +$(error Unable to detect platform. Please open a ticket with the output of uname -a.) +endif + +export PLATFORM +endif + +# Core targets. + +all:: deps app rel + +# Noop to avoid a Make warning when there's nothing to do. +rel:: + $(verbose) : + +check:: clean app tests + +clean:: clean-crashdump + +clean-crashdump: +ifneq ($(wildcard erl_crash.dump),) + $(gen_verbose) rm -f erl_crash.dump +endif + +distclean:: clean distclean-tmp + +distclean-tmp: + $(gen_verbose) rm -rf $(ERLANG_MK_TMP) + +help:: + $(verbose) printf "%s\n" \ + "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \ + "Copyright (c) 2013-2015 Loïc Hoguin " \ + "" \ + "Usage: [V=1] $(MAKE) [target]..." \ + "" \ + "Core targets:" \ + " all Run deps, app and rel targets in that order" \ + " app Compile the project" \ + " deps Fetch dependencies (if needed) and compile them" \ + " fetch-deps Fetch dependencies (if needed) without compiling them" \ + " list-deps Fetch dependencies (if needed) and list them" \ + " search q=... Search for a package in the built-in index" \ + " rel Build a release for this project, if applicable" \ + " docs Build the documentation for this project" \ + " install-docs Install the man pages for this project" \ + " check Compile and run all tests and analysis for this project" \ + " tests Run the tests for this project" \ + " clean Delete temporary and output files from most targets" \ + " distclean Delete all temporary and output files" \ + " help Display this help and exit" \ + " erlang-mk Update erlang.mk to the latest version" + +# Core functions. + +empty := +space := $(empty) $(empty) +tab := $(empty) $(empty) +comma := , + +define newline + + +endef + +define comma_list +$(subst $(space),$(comma),$(strip $(1))) +endef + +# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. +define erlang +$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk +endef + +ifeq ($(PLATFORM),msys2) +core_native_path = $(subst \,\\\\,$(shell cygpath -w $1)) +else +core_native_path = $1 +endif + +ifeq ($(shell which wget 2>/dev/null | wc -l), 1) +define core_http_get + wget --no-check-certificate -O $(1) $(2)|| rm $(1) +endef +else +define core_http_get.erl + ssl:start(), + inets:start(), + case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of + {ok, {{_, 200, _}, _, Body}} -> + case file:write_file("$(1)", Body) of + ok -> ok; + {error, R1} -> halt(R1) + end; + {error, R2} -> + halt(R2) + end, + halt(0). +endef + +define core_http_get + $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2)) +endef +endif + +core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1))) + +core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2))) + +core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1))))))))))))))))))))))))))) + +core_ls = $(filter-out $(1),$(shell echo $(1))) + +# @todo Use a solution that does not require using perl. +core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2) + +# Automated update. + +ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk +ERLANG_MK_COMMIT ?= +ERLANG_MK_BUILD_CONFIG ?= build.config +ERLANG_MK_BUILD_DIR ?= .erlang.mk.build + +erlang-mk: + git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) +ifdef ERLANG_MK_COMMIT + cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) +endif + if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi + $(MAKE) -C $(ERLANG_MK_BUILD_DIR) + cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk + rm -rf $(ERLANG_MK_BUILD_DIR) + +# The erlang.mk package index is bundled in the default erlang.mk build. +# Search for the string "copyright" to skip to the rest of the code. + +PACKAGES += aberth +pkg_aberth_name = aberth +pkg_aberth_description = Generic BERT-RPC server in Erlang +pkg_aberth_homepage = https://github.com/a13x/aberth +pkg_aberth_fetch = git +pkg_aberth_repo = https://github.com/a13x/aberth +pkg_aberth_commit = master + +PACKAGES += active +pkg_active_name = active +pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running +pkg_active_homepage = https://github.com/proger/active +pkg_active_fetch = git +pkg_active_repo = https://github.com/proger/active +pkg_active_commit = master + +PACKAGES += actordb_core +pkg_actordb_core_name = actordb_core +pkg_actordb_core_description = ActorDB main source +pkg_actordb_core_homepage = http://www.actordb.com/ +pkg_actordb_core_fetch = git +pkg_actordb_core_repo = https://github.com/biokoda/actordb_core +pkg_actordb_core_commit = master + +PACKAGES += actordb_thrift +pkg_actordb_thrift_name = actordb_thrift +pkg_actordb_thrift_description = Thrift API for ActorDB +pkg_actordb_thrift_homepage = http://www.actordb.com/ +pkg_actordb_thrift_fetch = git +pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift +pkg_actordb_thrift_commit = master + +PACKAGES += aleppo +pkg_aleppo_name = aleppo +pkg_aleppo_description = Alternative Erlang Pre-Processor +pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo +pkg_aleppo_fetch = git +pkg_aleppo_repo = https://github.com/ErlyORM/aleppo +pkg_aleppo_commit = master + +PACKAGES += alog +pkg_alog_name = alog +pkg_alog_description = Simply the best logging framework for Erlang +pkg_alog_homepage = https://github.com/siberian-fast-food/alogger +pkg_alog_fetch = git +pkg_alog_repo = https://github.com/siberian-fast-food/alogger +pkg_alog_commit = master + +PACKAGES += amqp_client +pkg_amqp_client_name = amqp_client +pkg_amqp_client_description = RabbitMQ Erlang AMQP client +pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html +pkg_amqp_client_fetch = git +pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git +pkg_amqp_client_commit = master + +PACKAGES += annotations +pkg_annotations_name = annotations +pkg_annotations_description = Simple code instrumentation utilities +pkg_annotations_homepage = https://github.com/hyperthunk/annotations +pkg_annotations_fetch = git +pkg_annotations_repo = https://github.com/hyperthunk/annotations +pkg_annotations_commit = master + +PACKAGES += antidote +pkg_antidote_name = antidote +pkg_antidote_description = Large-scale computation without synchronisation +pkg_antidote_homepage = https://syncfree.lip6.fr/ +pkg_antidote_fetch = git +pkg_antidote_repo = https://github.com/SyncFree/antidote +pkg_antidote_commit = master + +PACKAGES += apns +pkg_apns_name = apns +pkg_apns_description = Apple Push Notification Server for Erlang +pkg_apns_homepage = http://inaka.github.com/apns4erl +pkg_apns_fetch = git +pkg_apns_repo = https://github.com/inaka/apns4erl +pkg_apns_commit = 1.0.4 + +PACKAGES += azdht +pkg_azdht_name = azdht +pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang +pkg_azdht_homepage = https://github.com/arcusfelis/azdht +pkg_azdht_fetch = git +pkg_azdht_repo = https://github.com/arcusfelis/azdht +pkg_azdht_commit = master + +PACKAGES += backoff +pkg_backoff_name = backoff +pkg_backoff_description = Simple exponential backoffs in Erlang +pkg_backoff_homepage = https://github.com/ferd/backoff +pkg_backoff_fetch = git +pkg_backoff_repo = https://github.com/ferd/backoff +pkg_backoff_commit = master + +PACKAGES += barrel_tcp +pkg_barrel_tcp_name = barrel_tcp +pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang. +pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_fetch = git +pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_commit = master + +PACKAGES += basho_bench +pkg_basho_bench_name = basho_bench +pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for. +pkg_basho_bench_homepage = https://github.com/basho/basho_bench +pkg_basho_bench_fetch = git +pkg_basho_bench_repo = https://github.com/basho/basho_bench +pkg_basho_bench_commit = master + +PACKAGES += bcrypt +pkg_bcrypt_name = bcrypt +pkg_bcrypt_description = Bcrypt Erlang / C library +pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt +pkg_bcrypt_fetch = git +pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt +pkg_bcrypt_commit = master + +PACKAGES += beam +pkg_beam_name = beam +pkg_beam_description = BEAM emulator written in Erlang +pkg_beam_homepage = https://github.com/tonyrog/beam +pkg_beam_fetch = git +pkg_beam_repo = https://github.com/tonyrog/beam +pkg_beam_commit = master + +PACKAGES += beanstalk +pkg_beanstalk_name = beanstalk +pkg_beanstalk_description = An Erlang client for beanstalkd +pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_fetch = git +pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_commit = master + +PACKAGES += bear +pkg_bear_name = bear +pkg_bear_description = a set of statistics functions for erlang +pkg_bear_homepage = https://github.com/boundary/bear +pkg_bear_fetch = git +pkg_bear_repo = https://github.com/boundary/bear +pkg_bear_commit = master + +PACKAGES += bertconf +pkg_bertconf_name = bertconf +pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded +pkg_bertconf_homepage = https://github.com/ferd/bertconf +pkg_bertconf_fetch = git +pkg_bertconf_repo = https://github.com/ferd/bertconf +pkg_bertconf_commit = master + +PACKAGES += bifrost +pkg_bifrost_name = bifrost +pkg_bifrost_description = Erlang FTP Server Framework +pkg_bifrost_homepage = https://github.com/thorstadt/bifrost +pkg_bifrost_fetch = git +pkg_bifrost_repo = https://github.com/thorstadt/bifrost +pkg_bifrost_commit = master + +PACKAGES += binpp +pkg_binpp_name = binpp +pkg_binpp_description = Erlang Binary Pretty Printer +pkg_binpp_homepage = https://github.com/jtendo/binpp +pkg_binpp_fetch = git +pkg_binpp_repo = https://github.com/jtendo/binpp +pkg_binpp_commit = master + +PACKAGES += bisect +pkg_bisect_name = bisect +pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang +pkg_bisect_homepage = https://github.com/knutin/bisect +pkg_bisect_fetch = git +pkg_bisect_repo = https://github.com/knutin/bisect +pkg_bisect_commit = master + +PACKAGES += bitcask +pkg_bitcask_name = bitcask +pkg_bitcask_description = because you need another a key/value storage engine +pkg_bitcask_homepage = https://github.com/basho/bitcask +pkg_bitcask_fetch = git +pkg_bitcask_repo = https://github.com/basho/bitcask +pkg_bitcask_commit = master + +PACKAGES += bitstore +pkg_bitstore_name = bitstore +pkg_bitstore_description = A document based ontology development environment +pkg_bitstore_homepage = https://github.com/bdionne/bitstore +pkg_bitstore_fetch = git +pkg_bitstore_repo = https://github.com/bdionne/bitstore +pkg_bitstore_commit = master + +PACKAGES += bootstrap +pkg_bootstrap_name = bootstrap +pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application. +pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap +pkg_bootstrap_fetch = git +pkg_bootstrap_repo = https://github.com/schlagert/bootstrap +pkg_bootstrap_commit = master + +PACKAGES += boss +pkg_boss_name = boss +pkg_boss_description = Erlang web MVC, now featuring Comet +pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_fetch = git +pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_commit = master + +PACKAGES += boss_db +pkg_boss_db_name = boss_db +pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang +pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db +pkg_boss_db_fetch = git +pkg_boss_db_repo = https://github.com/ErlyORM/boss_db +pkg_boss_db_commit = master + +PACKAGES += bson +pkg_bson_name = bson +pkg_bson_description = BSON documents in Erlang, see bsonspec.org +pkg_bson_homepage = https://github.com/comtihon/bson-erlang +pkg_bson_fetch = git +pkg_bson_repo = https://github.com/comtihon/bson-erlang +pkg_bson_commit = master + +PACKAGES += bullet +pkg_bullet_name = bullet +pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy. +pkg_bullet_homepage = http://ninenines.eu +pkg_bullet_fetch = git +pkg_bullet_repo = https://github.com/ninenines/bullet +pkg_bullet_commit = master + +PACKAGES += cache +pkg_cache_name = cache +pkg_cache_description = Erlang in-memory cache +pkg_cache_homepage = https://github.com/fogfish/cache +pkg_cache_fetch = git +pkg_cache_repo = https://github.com/fogfish/cache +pkg_cache_commit = master + +PACKAGES += cake +pkg_cake_name = cake +pkg_cake_description = Really simple terminal colorization +pkg_cake_homepage = https://github.com/darach/cake-erl +pkg_cake_fetch = git +pkg_cake_repo = https://github.com/darach/cake-erl +pkg_cake_commit = v0.1.2 + +PACKAGES += carotene +pkg_carotene_name = carotene +pkg_carotene_description = Real-time server +pkg_carotene_homepage = https://github.com/carotene/carotene +pkg_carotene_fetch = git +pkg_carotene_repo = https://github.com/carotene/carotene +pkg_carotene_commit = master + +PACKAGES += cberl +pkg_cberl_name = cberl +pkg_cberl_description = NIF based Erlang bindings for Couchbase +pkg_cberl_homepage = https://github.com/chitika/cberl +pkg_cberl_fetch = git +pkg_cberl_repo = https://github.com/chitika/cberl +pkg_cberl_commit = master + +PACKAGES += cecho +pkg_cecho_name = cecho +pkg_cecho_description = An ncurses library for Erlang +pkg_cecho_homepage = https://github.com/mazenharake/cecho +pkg_cecho_fetch = git +pkg_cecho_repo = https://github.com/mazenharake/cecho +pkg_cecho_commit = master + +PACKAGES += cferl +pkg_cferl_name = cferl +pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client +pkg_cferl_homepage = https://github.com/ddossot/cferl +pkg_cferl_fetch = git +pkg_cferl_repo = https://github.com/ddossot/cferl +pkg_cferl_commit = master + +PACKAGES += chaos_monkey +pkg_chaos_monkey_name = chaos_monkey +pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes. +pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_fetch = git +pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_commit = master + +PACKAGES += check_node +pkg_check_node_name = check_node +pkg_check_node_description = Nagios Scripts for monitoring Riak +pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios +pkg_check_node_fetch = git +pkg_check_node_repo = https://github.com/basho-labs/riak_nagios +pkg_check_node_commit = master + +PACKAGES += chronos +pkg_chronos_name = chronos +pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests. +pkg_chronos_homepage = https://github.com/lehoff/chronos +pkg_chronos_fetch = git +pkg_chronos_repo = https://github.com/lehoff/chronos +pkg_chronos_commit = master + +PACKAGES += cl +pkg_cl_name = cl +pkg_cl_description = OpenCL binding for Erlang +pkg_cl_homepage = https://github.com/tonyrog/cl +pkg_cl_fetch = git +pkg_cl_repo = https://github.com/tonyrog/cl +pkg_cl_commit = master + +PACKAGES += classifier +pkg_classifier_name = classifier +pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier +pkg_classifier_homepage = https://github.com/inaka/classifier +pkg_classifier_fetch = git +pkg_classifier_repo = https://github.com/inaka/classifier +pkg_classifier_commit = master + +PACKAGES += clique +pkg_clique_name = clique +pkg_clique_description = CLI Framework for Erlang +pkg_clique_homepage = https://github.com/basho/clique +pkg_clique_fetch = git +pkg_clique_repo = https://github.com/basho/clique +pkg_clique_commit = develop + +PACKAGES += cloudi_core +pkg_cloudi_core_name = cloudi_core +pkg_cloudi_core_description = CloudI internal service runtime +pkg_cloudi_core_homepage = http://cloudi.org/ +pkg_cloudi_core_fetch = git +pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core +pkg_cloudi_core_commit = master + +PACKAGES += cloudi_service_api_requests +pkg_cloudi_service_api_requests_name = cloudi_service_api_requests +pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support) +pkg_cloudi_service_api_requests_homepage = http://cloudi.org/ +pkg_cloudi_service_api_requests_fetch = git +pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests +pkg_cloudi_service_api_requests_commit = master + +PACKAGES += cloudi_service_db +pkg_cloudi_service_db_name = cloudi_service_db +pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) +pkg_cloudi_service_db_homepage = http://cloudi.org/ +pkg_cloudi_service_db_fetch = git +pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db +pkg_cloudi_service_db_commit = master + +PACKAGES += cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service +pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_fetch = git +pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_commit = master + +PACKAGES += cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service +pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_cql_fetch = git +pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_commit = master + +PACKAGES += cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service +pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/ +pkg_cloudi_service_db_couchdb_fetch = git +pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_commit = master + +PACKAGES += cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service +pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/ +pkg_cloudi_service_db_elasticsearch_fetch = git +pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_commit = master + +PACKAGES += cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_description = memcached CloudI Service +pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/ +pkg_cloudi_service_db_memcached_fetch = git +pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_commit = master + +PACKAGES += cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_description = MySQL CloudI Service +pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_mysql_fetch = git +pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_commit = master + +PACKAGES += cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service +pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_pgsql_fetch = git +pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_commit = master + +PACKAGES += cloudi_service_db_riak +pkg_cloudi_service_db_riak_name = cloudi_service_db_riak +pkg_cloudi_service_db_riak_description = Riak CloudI Service +pkg_cloudi_service_db_riak_homepage = http://cloudi.org/ +pkg_cloudi_service_db_riak_fetch = git +pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak +pkg_cloudi_service_db_riak_commit = master + +PACKAGES += cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service +pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/ +pkg_cloudi_service_db_tokyotyrant_fetch = git +pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_commit = master + +PACKAGES += cloudi_service_filesystem +pkg_cloudi_service_filesystem_name = cloudi_service_filesystem +pkg_cloudi_service_filesystem_description = Filesystem CloudI Service +pkg_cloudi_service_filesystem_homepage = http://cloudi.org/ +pkg_cloudi_service_filesystem_fetch = git +pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem +pkg_cloudi_service_filesystem_commit = master + +PACKAGES += cloudi_service_http_client +pkg_cloudi_service_http_client_name = cloudi_service_http_client +pkg_cloudi_service_http_client_description = HTTP client CloudI Service +pkg_cloudi_service_http_client_homepage = http://cloudi.org/ +pkg_cloudi_service_http_client_fetch = git +pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client +pkg_cloudi_service_http_client_commit = master + +PACKAGES += cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service +pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/ +pkg_cloudi_service_http_cowboy_fetch = git +pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_commit = master + +PACKAGES += cloudi_service_http_elli +pkg_cloudi_service_http_elli_name = cloudi_service_http_elli +pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service +pkg_cloudi_service_http_elli_homepage = http://cloudi.org/ +pkg_cloudi_service_http_elli_fetch = git +pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli +pkg_cloudi_service_http_elli_commit = master + +PACKAGES += cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service +pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/ +pkg_cloudi_service_map_reduce_fetch = git +pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_commit = master + +PACKAGES += cloudi_service_oauth1 +pkg_cloudi_service_oauth1_name = cloudi_service_oauth1 +pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service +pkg_cloudi_service_oauth1_homepage = http://cloudi.org/ +pkg_cloudi_service_oauth1_fetch = git +pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1 +pkg_cloudi_service_oauth1_commit = master + +PACKAGES += cloudi_service_queue +pkg_cloudi_service_queue_name = cloudi_service_queue +pkg_cloudi_service_queue_description = Persistent Queue Service +pkg_cloudi_service_queue_homepage = http://cloudi.org/ +pkg_cloudi_service_queue_fetch = git +pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue +pkg_cloudi_service_queue_commit = master + +PACKAGES += cloudi_service_quorum +pkg_cloudi_service_quorum_name = cloudi_service_quorum +pkg_cloudi_service_quorum_description = CloudI Quorum Service +pkg_cloudi_service_quorum_homepage = http://cloudi.org/ +pkg_cloudi_service_quorum_fetch = git +pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum +pkg_cloudi_service_quorum_commit = master + +PACKAGES += cloudi_service_router +pkg_cloudi_service_router_name = cloudi_service_router +pkg_cloudi_service_router_description = CloudI Router Service +pkg_cloudi_service_router_homepage = http://cloudi.org/ +pkg_cloudi_service_router_fetch = git +pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router +pkg_cloudi_service_router_commit = master + +PACKAGES += cloudi_service_tcp +pkg_cloudi_service_tcp_name = cloudi_service_tcp +pkg_cloudi_service_tcp_description = TCP CloudI Service +pkg_cloudi_service_tcp_homepage = http://cloudi.org/ +pkg_cloudi_service_tcp_fetch = git +pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp +pkg_cloudi_service_tcp_commit = master + +PACKAGES += cloudi_service_timers +pkg_cloudi_service_timers_name = cloudi_service_timers +pkg_cloudi_service_timers_description = Timers CloudI Service +pkg_cloudi_service_timers_homepage = http://cloudi.org/ +pkg_cloudi_service_timers_fetch = git +pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers +pkg_cloudi_service_timers_commit = master + +PACKAGES += cloudi_service_udp +pkg_cloudi_service_udp_name = cloudi_service_udp +pkg_cloudi_service_udp_description = UDP CloudI Service +pkg_cloudi_service_udp_homepage = http://cloudi.org/ +pkg_cloudi_service_udp_fetch = git +pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp +pkg_cloudi_service_udp_commit = master + +PACKAGES += cloudi_service_validate +pkg_cloudi_service_validate_name = cloudi_service_validate +pkg_cloudi_service_validate_description = CloudI Validate Service +pkg_cloudi_service_validate_homepage = http://cloudi.org/ +pkg_cloudi_service_validate_fetch = git +pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate +pkg_cloudi_service_validate_commit = master + +PACKAGES += cloudi_service_zeromq +pkg_cloudi_service_zeromq_name = cloudi_service_zeromq +pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service +pkg_cloudi_service_zeromq_homepage = http://cloudi.org/ +pkg_cloudi_service_zeromq_fetch = git +pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq +pkg_cloudi_service_zeromq_commit = master + +PACKAGES += cluster_info +pkg_cluster_info_name = cluster_info +pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app +pkg_cluster_info_homepage = https://github.com/basho/cluster_info +pkg_cluster_info_fetch = git +pkg_cluster_info_repo = https://github.com/basho/cluster_info +pkg_cluster_info_commit = master + +PACKAGES += color +pkg_color_name = color +pkg_color_description = ANSI colors for your Erlang +pkg_color_homepage = https://github.com/julianduque/erlang-color +pkg_color_fetch = git +pkg_color_repo = https://github.com/julianduque/erlang-color +pkg_color_commit = master + +PACKAGES += confetti +pkg_confetti_name = confetti +pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids +pkg_confetti_homepage = https://github.com/jtendo/confetti +pkg_confetti_fetch = git +pkg_confetti_repo = https://github.com/jtendo/confetti +pkg_confetti_commit = master + +PACKAGES += couchbeam +pkg_couchbeam_name = couchbeam +pkg_couchbeam_description = Apache CouchDB client in Erlang +pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam +pkg_couchbeam_fetch = git +pkg_couchbeam_repo = https://github.com/benoitc/couchbeam +pkg_couchbeam_commit = master + +PACKAGES += covertool +pkg_covertool_name = covertool +pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports +pkg_covertool_homepage = https://github.com/idubrov/covertool +pkg_covertool_fetch = git +pkg_covertool_repo = https://github.com/idubrov/covertool +pkg_covertool_commit = master + +PACKAGES += cowboy +pkg_cowboy_name = cowboy +pkg_cowboy_description = Small, fast and modular HTTP server. +pkg_cowboy_homepage = http://ninenines.eu +pkg_cowboy_fetch = git +pkg_cowboy_repo = https://github.com/ninenines/cowboy +pkg_cowboy_commit = 1.0.1 + +PACKAGES += cowdb +pkg_cowdb_name = cowdb +pkg_cowdb_description = Pure Key/Value database library for Erlang Applications +pkg_cowdb_homepage = https://github.com/refuge/cowdb +pkg_cowdb_fetch = git +pkg_cowdb_repo = https://github.com/refuge/cowdb +pkg_cowdb_commit = master + +PACKAGES += cowlib +pkg_cowlib_name = cowlib +pkg_cowlib_description = Support library for manipulating Web protocols. +pkg_cowlib_homepage = http://ninenines.eu +pkg_cowlib_fetch = git +pkg_cowlib_repo = https://github.com/ninenines/cowlib +pkg_cowlib_commit = 1.0.1 + +PACKAGES += cpg +pkg_cpg_name = cpg +pkg_cpg_description = CloudI Process Groups +pkg_cpg_homepage = https://github.com/okeuday/cpg +pkg_cpg_fetch = git +pkg_cpg_repo = https://github.com/okeuday/cpg +pkg_cpg_commit = master + +PACKAGES += cqerl +pkg_cqerl_name = cqerl +pkg_cqerl_description = Native Erlang CQL client for Cassandra +pkg_cqerl_homepage = https://matehat.github.io/cqerl/ +pkg_cqerl_fetch = git +pkg_cqerl_repo = https://github.com/matehat/cqerl +pkg_cqerl_commit = master + +PACKAGES += cr +pkg_cr_name = cr +pkg_cr_description = Chain Replication +pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm +pkg_cr_fetch = git +pkg_cr_repo = https://github.com/spawnproc/cr +pkg_cr_commit = master + +PACKAGES += cuttlefish +pkg_cuttlefish_name = cuttlefish +pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me? +pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish +pkg_cuttlefish_fetch = git +pkg_cuttlefish_repo = https://github.com/basho/cuttlefish +pkg_cuttlefish_commit = master + +PACKAGES += damocles +pkg_damocles_name = damocles +pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box. +pkg_damocles_homepage = https://github.com/lostcolony/damocles +pkg_damocles_fetch = git +pkg_damocles_repo = https://github.com/lostcolony/damocles +pkg_damocles_commit = master + +PACKAGES += debbie +pkg_debbie_name = debbie +pkg_debbie_description = .DEB Built In Erlang +pkg_debbie_homepage = https://github.com/crownedgrouse/debbie +pkg_debbie_fetch = git +pkg_debbie_repo = https://github.com/crownedgrouse/debbie +pkg_debbie_commit = master + +PACKAGES += decimal +pkg_decimal_name = decimal +pkg_decimal_description = An Erlang decimal arithmetic library +pkg_decimal_homepage = https://github.com/tim/erlang-decimal +pkg_decimal_fetch = git +pkg_decimal_repo = https://github.com/tim/erlang-decimal +pkg_decimal_commit = master + +PACKAGES += detergent +pkg_detergent_name = detergent +pkg_detergent_description = An emulsifying Erlang SOAP library +pkg_detergent_homepage = https://github.com/devinus/detergent +pkg_detergent_fetch = git +pkg_detergent_repo = https://github.com/devinus/detergent +pkg_detergent_commit = master + +PACKAGES += detest +pkg_detest_name = detest +pkg_detest_description = Tool for running tests on a cluster of erlang nodes +pkg_detest_homepage = https://github.com/biokoda/detest +pkg_detest_fetch = git +pkg_detest_repo = https://github.com/biokoda/detest +pkg_detest_commit = master + +PACKAGES += dh_date +pkg_dh_date_name = dh_date +pkg_dh_date_description = Date formatting / parsing library for erlang +pkg_dh_date_homepage = https://github.com/daleharvey/dh_date +pkg_dh_date_fetch = git +pkg_dh_date_repo = https://github.com/daleharvey/dh_date +pkg_dh_date_commit = master + +PACKAGES += dhtcrawler +pkg_dhtcrawler_name = dhtcrawler +pkg_dhtcrawler_description = dhtcrawler is a DHT crawler written in erlang. It can join a DHT network and crawl many P2P torrents. +pkg_dhtcrawler_homepage = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_fetch = git +pkg_dhtcrawler_repo = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_commit = master + +PACKAGES += dirbusterl +pkg_dirbusterl_name = dirbusterl +pkg_dirbusterl_description = DirBuster successor in Erlang +pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_fetch = git +pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_commit = master + +PACKAGES += dispcount +pkg_dispcount_name = dispcount +pkg_dispcount_description = Erlang task dispatcher based on ETS counters. +pkg_dispcount_homepage = https://github.com/ferd/dispcount +pkg_dispcount_fetch = git +pkg_dispcount_repo = https://github.com/ferd/dispcount +pkg_dispcount_commit = master + +PACKAGES += dlhttpc +pkg_dlhttpc_name = dlhttpc +pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints +pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc +pkg_dlhttpc_fetch = git +pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc +pkg_dlhttpc_commit = master + +PACKAGES += dns +pkg_dns_name = dns +pkg_dns_description = Erlang DNS library +pkg_dns_homepage = https://github.com/aetrion/dns_erlang +pkg_dns_fetch = git +pkg_dns_repo = https://github.com/aetrion/dns_erlang +pkg_dns_commit = master + +PACKAGES += dnssd +pkg_dnssd_name = dnssd +pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation +pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_fetch = git +pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_commit = master + +PACKAGES += dtl +pkg_dtl_name = dtl +pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang. +pkg_dtl_homepage = https://github.com/oinksoft/dtl +pkg_dtl_fetch = git +pkg_dtl_repo = https://github.com/oinksoft/dtl +pkg_dtl_commit = master + +PACKAGES += dynamic_compile +pkg_dynamic_compile_name = dynamic_compile +pkg_dynamic_compile_description = compile and load erlang modules from string input +pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_fetch = git +pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_commit = master + +PACKAGES += e2 +pkg_e2_name = e2 +pkg_e2_description = Library to simply writing correct OTP applications. +pkg_e2_homepage = http://e2project.org +pkg_e2_fetch = git +pkg_e2_repo = https://github.com/gar1t/e2 +pkg_e2_commit = master + +PACKAGES += eamf +pkg_eamf_name = eamf +pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang +pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf +pkg_eamf_fetch = git +pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf +pkg_eamf_commit = master + +PACKAGES += eavro +pkg_eavro_name = eavro +pkg_eavro_description = Apache Avro encoder/decoder +pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_fetch = git +pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_commit = master + +PACKAGES += ecapnp +pkg_ecapnp_name = ecapnp +pkg_ecapnp_description = Cap'n Proto library for Erlang +pkg_ecapnp_homepage = https://github.com/kaos/ecapnp +pkg_ecapnp_fetch = git +pkg_ecapnp_repo = https://github.com/kaos/ecapnp +pkg_ecapnp_commit = master + +PACKAGES += econfig +pkg_econfig_name = econfig +pkg_econfig_description = simple Erlang config handler using INI files +pkg_econfig_homepage = https://github.com/benoitc/econfig +pkg_econfig_fetch = git +pkg_econfig_repo = https://github.com/benoitc/econfig +pkg_econfig_commit = master + +PACKAGES += edate +pkg_edate_name = edate +pkg_edate_description = date manipulation library for erlang +pkg_edate_homepage = https://github.com/dweldon/edate +pkg_edate_fetch = git +pkg_edate_repo = https://github.com/dweldon/edate +pkg_edate_commit = master + +PACKAGES += edgar +pkg_edgar_name = edgar +pkg_edgar_description = Erlang Does GNU AR +pkg_edgar_homepage = https://github.com/crownedgrouse/edgar +pkg_edgar_fetch = git +pkg_edgar_repo = https://github.com/crownedgrouse/edgar +pkg_edgar_commit = master + +PACKAGES += edis +pkg_edis_name = edis +pkg_edis_description = An Erlang implementation of Redis KV Store +pkg_edis_homepage = http://inaka.github.com/edis/ +pkg_edis_fetch = git +pkg_edis_repo = https://github.com/inaka/edis +pkg_edis_commit = master + +PACKAGES += edns +pkg_edns_name = edns +pkg_edns_description = Erlang/OTP DNS server +pkg_edns_homepage = https://github.com/hcvst/erlang-dns +pkg_edns_fetch = git +pkg_edns_repo = https://github.com/hcvst/erlang-dns +pkg_edns_commit = master + +PACKAGES += edown +pkg_edown_name = edown +pkg_edown_description = EDoc extension for generating Github-flavored Markdown +pkg_edown_homepage = https://github.com/uwiger/edown +pkg_edown_fetch = git +pkg_edown_repo = https://github.com/uwiger/edown +pkg_edown_commit = master + +PACKAGES += eep +pkg_eep_name = eep +pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy +pkg_eep_homepage = https://github.com/virtan/eep +pkg_eep_fetch = git +pkg_eep_repo = https://github.com/virtan/eep +pkg_eep_commit = master + +PACKAGES += eep_app +pkg_eep_app_name = eep_app +pkg_eep_app_description = Embedded Event Processing +pkg_eep_app_homepage = https://github.com/darach/eep-erl +pkg_eep_app_fetch = git +pkg_eep_app_repo = https://github.com/darach/eep-erl +pkg_eep_app_commit = master + +PACKAGES += efene +pkg_efene_name = efene +pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX +pkg_efene_homepage = https://github.com/efene/efene +pkg_efene_fetch = git +pkg_efene_repo = https://github.com/efene/efene +pkg_efene_commit = master + +PACKAGES += eganglia +pkg_eganglia_name = eganglia +pkg_eganglia_description = Erlang library to interact with Ganglia +pkg_eganglia_homepage = https://github.com/inaka/eganglia +pkg_eganglia_fetch = git +pkg_eganglia_repo = https://github.com/inaka/eganglia +pkg_eganglia_commit = v0.9.1 + +PACKAGES += egeoip +pkg_egeoip_name = egeoip +pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database. +pkg_egeoip_homepage = https://github.com/mochi/egeoip +pkg_egeoip_fetch = git +pkg_egeoip_repo = https://github.com/mochi/egeoip +pkg_egeoip_commit = master + +PACKAGES += ehsa +pkg_ehsa_name = ehsa +pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules +pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa +pkg_ehsa_fetch = hg +pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa +pkg_ehsa_commit = 2.0.4 + +PACKAGES += ej +pkg_ej_name = ej +pkg_ej_description = Helper module for working with Erlang terms representing JSON +pkg_ej_homepage = https://github.com/seth/ej +pkg_ej_fetch = git +pkg_ej_repo = https://github.com/seth/ej +pkg_ej_commit = master + +PACKAGES += ejabberd +pkg_ejabberd_name = ejabberd +pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform +pkg_ejabberd_homepage = https://github.com/processone/ejabberd +pkg_ejabberd_fetch = git +pkg_ejabberd_repo = https://github.com/processone/ejabberd +pkg_ejabberd_commit = master + +PACKAGES += ejwt +pkg_ejwt_name = ejwt +pkg_ejwt_description = erlang library for JSON Web Token +pkg_ejwt_homepage = https://github.com/artefactop/ejwt +pkg_ejwt_fetch = git +pkg_ejwt_repo = https://github.com/artefactop/ejwt +pkg_ejwt_commit = master + +PACKAGES += ekaf +pkg_ekaf_name = ekaf +pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang. +pkg_ekaf_homepage = https://github.com/helpshift/ekaf +pkg_ekaf_fetch = git +pkg_ekaf_repo = https://github.com/helpshift/ekaf +pkg_ekaf_commit = master + +PACKAGES += elarm +pkg_elarm_name = elarm +pkg_elarm_description = Alarm Manager for Erlang. +pkg_elarm_homepage = https://github.com/esl/elarm +pkg_elarm_fetch = git +pkg_elarm_repo = https://github.com/esl/elarm +pkg_elarm_commit = master + +PACKAGES += eleveldb +pkg_eleveldb_name = eleveldb +pkg_eleveldb_description = Erlang LevelDB API +pkg_eleveldb_homepage = https://github.com/basho/eleveldb +pkg_eleveldb_fetch = git +pkg_eleveldb_repo = https://github.com/basho/eleveldb +pkg_eleveldb_commit = master + +PACKAGES += elli +pkg_elli_name = elli +pkg_elli_description = Simple, robust and performant Erlang web server +pkg_elli_homepage = https://github.com/knutin/elli +pkg_elli_fetch = git +pkg_elli_repo = https://github.com/knutin/elli +pkg_elli_commit = master + +PACKAGES += elvis +pkg_elvis_name = elvis +pkg_elvis_description = Erlang Style Reviewer +pkg_elvis_homepage = https://github.com/inaka/elvis +pkg_elvis_fetch = git +pkg_elvis_repo = https://github.com/inaka/elvis +pkg_elvis_commit = 0.2.4 + +PACKAGES += emagick +pkg_emagick_name = emagick +pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool. +pkg_emagick_homepage = https://github.com/kivra/emagick +pkg_emagick_fetch = git +pkg_emagick_repo = https://github.com/kivra/emagick +pkg_emagick_commit = master + +PACKAGES += emysql +pkg_emysql_name = emysql +pkg_emysql_description = Stable, pure Erlang MySQL driver. +pkg_emysql_homepage = https://github.com/Eonblast/Emysql +pkg_emysql_fetch = git +pkg_emysql_repo = https://github.com/Eonblast/Emysql +pkg_emysql_commit = master + +PACKAGES += enm +pkg_enm_name = enm +pkg_enm_description = Erlang driver for nanomsg +pkg_enm_homepage = https://github.com/basho/enm +pkg_enm_fetch = git +pkg_enm_repo = https://github.com/basho/enm +pkg_enm_commit = master + +PACKAGES += entop +pkg_entop_name = entop +pkg_entop_description = A top-like tool for monitoring an Erlang node +pkg_entop_homepage = https://github.com/mazenharake/entop +pkg_entop_fetch = git +pkg_entop_repo = https://github.com/mazenharake/entop +pkg_entop_commit = master + +PACKAGES += epcap +pkg_epcap_name = epcap +pkg_epcap_description = Erlang packet capture interface using pcap +pkg_epcap_homepage = https://github.com/msantos/epcap +pkg_epcap_fetch = git +pkg_epcap_repo = https://github.com/msantos/epcap +pkg_epcap_commit = master + +PACKAGES += eper +pkg_eper_name = eper +pkg_eper_description = Erlang performance and debugging tools. +pkg_eper_homepage = https://github.com/massemanet/eper +pkg_eper_fetch = git +pkg_eper_repo = https://github.com/massemanet/eper +pkg_eper_commit = master + +PACKAGES += epgsql +pkg_epgsql_name = epgsql +pkg_epgsql_description = Erlang PostgreSQL client library. +pkg_epgsql_homepage = https://github.com/epgsql/epgsql +pkg_epgsql_fetch = git +pkg_epgsql_repo = https://github.com/epgsql/epgsql +pkg_epgsql_commit = master + +PACKAGES += episcina +pkg_episcina_name = episcina +pkg_episcina_description = A simple non intrusive resource pool for connections +pkg_episcina_homepage = https://github.com/erlware/episcina +pkg_episcina_fetch = git +pkg_episcina_repo = https://github.com/erlware/episcina +pkg_episcina_commit = master + +PACKAGES += eplot +pkg_eplot_name = eplot +pkg_eplot_description = A plot engine written in erlang. +pkg_eplot_homepage = https://github.com/psyeugenic/eplot +pkg_eplot_fetch = git +pkg_eplot_repo = https://github.com/psyeugenic/eplot +pkg_eplot_commit = master + +PACKAGES += epocxy +pkg_epocxy_name = epocxy +pkg_epocxy_description = Erlang Patterns of Concurrency +pkg_epocxy_homepage = https://github.com/duomark/epocxy +pkg_epocxy_fetch = git +pkg_epocxy_repo = https://github.com/duomark/epocxy +pkg_epocxy_commit = master + +PACKAGES += epubnub +pkg_epubnub_name = epubnub +pkg_epubnub_description = Erlang PubNub API +pkg_epubnub_homepage = https://github.com/tsloughter/epubnub +pkg_epubnub_fetch = git +pkg_epubnub_repo = https://github.com/tsloughter/epubnub +pkg_epubnub_commit = master + +PACKAGES += eqm +pkg_eqm_name = eqm +pkg_eqm_description = Erlang pub sub with supply-demand channels +pkg_eqm_homepage = https://github.com/loucash/eqm +pkg_eqm_fetch = git +pkg_eqm_repo = https://github.com/loucash/eqm +pkg_eqm_commit = master + +PACKAGES += eredis +pkg_eredis_name = eredis +pkg_eredis_description = Erlang Redis client +pkg_eredis_homepage = https://github.com/wooga/eredis +pkg_eredis_fetch = git +pkg_eredis_repo = https://github.com/wooga/eredis +pkg_eredis_commit = master + +PACKAGES += eredis_pool +pkg_eredis_pool_name = eredis_pool +pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy. +pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_fetch = git +pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_commit = master + +PACKAGES += erl_streams +pkg_erl_streams_name = erl_streams +pkg_erl_streams_description = Streams in Erlang +pkg_erl_streams_homepage = https://github.com/epappas/erl_streams +pkg_erl_streams_fetch = git +pkg_erl_streams_repo = https://github.com/epappas/erl_streams +pkg_erl_streams_commit = master + +PACKAGES += erlang_cep +pkg_erlang_cep_name = erlang_cep +pkg_erlang_cep_description = A basic CEP package written in erlang +pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_fetch = git +pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_commit = master + +PACKAGES += erlang_js +pkg_erlang_js_name = erlang_js +pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime. +pkg_erlang_js_homepage = https://github.com/basho/erlang_js +pkg_erlang_js_fetch = git +pkg_erlang_js_repo = https://github.com/basho/erlang_js +pkg_erlang_js_commit = master + +PACKAGES += erlang_localtime +pkg_erlang_localtime_name = erlang_localtime +pkg_erlang_localtime_description = Erlang library for conversion from one local time to another +pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_fetch = git +pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_commit = master + +PACKAGES += erlang_smtp +pkg_erlang_smtp_name = erlang_smtp +pkg_erlang_smtp_description = Erlang SMTP and POP3 server code. +pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_fetch = git +pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_commit = master + +PACKAGES += erlang_term +pkg_erlang_term_name = erlang_term +pkg_erlang_term_description = Erlang Term Info +pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term +pkg_erlang_term_fetch = git +pkg_erlang_term_repo = https://github.com/okeuday/erlang_term +pkg_erlang_term_commit = master + +PACKAGES += erlastic_search +pkg_erlastic_search_name = erlastic_search +pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface. +pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_fetch = git +pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_commit = master + +PACKAGES += erlasticsearch +pkg_erlasticsearch_name = erlasticsearch +pkg_erlasticsearch_description = Erlang thrift interface to elastic_search +pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_fetch = git +pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_commit = master + +PACKAGES += erlbrake +pkg_erlbrake_name = erlbrake +pkg_erlbrake_description = Erlang Airbrake notification client +pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake +pkg_erlbrake_fetch = git +pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake +pkg_erlbrake_commit = master + +PACKAGES += erlcloud +pkg_erlcloud_name = erlcloud +pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB) +pkg_erlcloud_homepage = https://github.com/gleber/erlcloud +pkg_erlcloud_fetch = git +pkg_erlcloud_repo = https://github.com/gleber/erlcloud +pkg_erlcloud_commit = master + +PACKAGES += erlcron +pkg_erlcron_name = erlcron +pkg_erlcron_description = Erlang cronish system +pkg_erlcron_homepage = https://github.com/erlware/erlcron +pkg_erlcron_fetch = git +pkg_erlcron_repo = https://github.com/erlware/erlcron +pkg_erlcron_commit = master + +PACKAGES += erldb +pkg_erldb_name = erldb +pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang +pkg_erldb_homepage = http://erldb.org +pkg_erldb_fetch = git +pkg_erldb_repo = https://github.com/erldb/erldb +pkg_erldb_commit = master + +PACKAGES += erldis +pkg_erldis_name = erldis +pkg_erldis_description = redis erlang client library +pkg_erldis_homepage = https://github.com/cstar/erldis +pkg_erldis_fetch = git +pkg_erldis_repo = https://github.com/cstar/erldis +pkg_erldis_commit = master + +PACKAGES += erldns +pkg_erldns_name = erldns +pkg_erldns_description = DNS server, in erlang. +pkg_erldns_homepage = https://github.com/aetrion/erl-dns +pkg_erldns_fetch = git +pkg_erldns_repo = https://github.com/aetrion/erl-dns +pkg_erldns_commit = master + +PACKAGES += erldocker +pkg_erldocker_name = erldocker +pkg_erldocker_description = Docker Remote API client for Erlang +pkg_erldocker_homepage = https://github.com/proger/erldocker +pkg_erldocker_fetch = git +pkg_erldocker_repo = https://github.com/proger/erldocker +pkg_erldocker_commit = master + +PACKAGES += erlfsmon +pkg_erlfsmon_name = erlfsmon +pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX +pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon +pkg_erlfsmon_fetch = git +pkg_erlfsmon_repo = https://github.com/proger/erlfsmon +pkg_erlfsmon_commit = master + +PACKAGES += erlgit +pkg_erlgit_name = erlgit +pkg_erlgit_description = Erlang convenience wrapper around git executable +pkg_erlgit_homepage = https://github.com/gleber/erlgit +pkg_erlgit_fetch = git +pkg_erlgit_repo = https://github.com/gleber/erlgit +pkg_erlgit_commit = master + +PACKAGES += erlguten +pkg_erlguten_name = erlguten +pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang. +pkg_erlguten_homepage = https://github.com/richcarl/erlguten +pkg_erlguten_fetch = git +pkg_erlguten_repo = https://github.com/richcarl/erlguten +pkg_erlguten_commit = master + +PACKAGES += erlmc +pkg_erlmc_name = erlmc +pkg_erlmc_description = Erlang memcached binary protocol client +pkg_erlmc_homepage = https://github.com/jkvor/erlmc +pkg_erlmc_fetch = git +pkg_erlmc_repo = https://github.com/jkvor/erlmc +pkg_erlmc_commit = master + +PACKAGES += erlmongo +pkg_erlmongo_name = erlmongo +pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support +pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_fetch = git +pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_commit = master + +PACKAGES += erlog +pkg_erlog_name = erlog +pkg_erlog_description = Prolog interpreter in and for Erlang +pkg_erlog_homepage = https://github.com/rvirding/erlog +pkg_erlog_fetch = git +pkg_erlog_repo = https://github.com/rvirding/erlog +pkg_erlog_commit = master + +PACKAGES += erlpass +pkg_erlpass_name = erlpass +pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever. +pkg_erlpass_homepage = https://github.com/ferd/erlpass +pkg_erlpass_fetch = git +pkg_erlpass_repo = https://github.com/ferd/erlpass +pkg_erlpass_commit = master + +PACKAGES += erlport +pkg_erlport_name = erlport +pkg_erlport_description = ErlPort - connect Erlang to other languages +pkg_erlport_homepage = https://github.com/hdima/erlport +pkg_erlport_fetch = git +pkg_erlport_repo = https://github.com/hdima/erlport +pkg_erlport_commit = master + +PACKAGES += erlsh +pkg_erlsh_name = erlsh +pkg_erlsh_description = Erlang shell tools +pkg_erlsh_homepage = https://github.com/proger/erlsh +pkg_erlsh_fetch = git +pkg_erlsh_repo = https://github.com/proger/erlsh +pkg_erlsh_commit = master + +PACKAGES += erlsha2 +pkg_erlsha2_name = erlsha2 +pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs. +pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2 +pkg_erlsha2_fetch = git +pkg_erlsha2_repo = https://github.com/vinoski/erlsha2 +pkg_erlsha2_commit = master + +PACKAGES += erlsom +pkg_erlsom_name = erlsom +pkg_erlsom_description = XML parser for Erlang +pkg_erlsom_homepage = https://github.com/willemdj/erlsom +pkg_erlsom_fetch = git +pkg_erlsom_repo = https://github.com/willemdj/erlsom +pkg_erlsom_commit = master + +PACKAGES += erlubi +pkg_erlubi_name = erlubi +pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer) +pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi +pkg_erlubi_fetch = git +pkg_erlubi_repo = https://github.com/krestenkrab/erlubi +pkg_erlubi_commit = master + +PACKAGES += erlvolt +pkg_erlvolt_name = erlvolt +pkg_erlvolt_description = VoltDB Erlang Client Driver +pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_fetch = git +pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_commit = master + +PACKAGES += erlware_commons +pkg_erlware_commons_name = erlware_commons +pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components. +pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons +pkg_erlware_commons_fetch = git +pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons +pkg_erlware_commons_commit = master + +PACKAGES += erlydtl +pkg_erlydtl_name = erlydtl +pkg_erlydtl_description = Django Template Language for Erlang. +pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl +pkg_erlydtl_fetch = git +pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl +pkg_erlydtl_commit = master + +PACKAGES += errd +pkg_errd_name = errd +pkg_errd_description = Erlang RRDTool library +pkg_errd_homepage = https://github.com/archaelus/errd +pkg_errd_fetch = git +pkg_errd_repo = https://github.com/archaelus/errd +pkg_errd_commit = master + +PACKAGES += erserve +pkg_erserve_name = erserve +pkg_erserve_description = Erlang/Rserve communication interface +pkg_erserve_homepage = https://github.com/del/erserve +pkg_erserve_fetch = git +pkg_erserve_repo = https://github.com/del/erserve +pkg_erserve_commit = master + +PACKAGES += erwa +pkg_erwa_name = erwa +pkg_erwa_description = A WAMP router and client written in Erlang. +pkg_erwa_homepage = https://github.com/bwegh/erwa +pkg_erwa_fetch = git +pkg_erwa_repo = https://github.com/bwegh/erwa +pkg_erwa_commit = 0.1.1 + +PACKAGES += espec +pkg_espec_name = espec +pkg_espec_description = ESpec: Behaviour driven development framework for Erlang +pkg_espec_homepage = https://github.com/lucaspiller/espec +pkg_espec_fetch = git +pkg_espec_repo = https://github.com/lucaspiller/espec +pkg_espec_commit = master + +PACKAGES += estatsd +pkg_estatsd_name = estatsd +pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite +pkg_estatsd_homepage = https://github.com/RJ/estatsd +pkg_estatsd_fetch = git +pkg_estatsd_repo = https://github.com/RJ/estatsd +pkg_estatsd_commit = master + +PACKAGES += etap +pkg_etap_name = etap +pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output. +pkg_etap_homepage = https://github.com/ngerakines/etap +pkg_etap_fetch = git +pkg_etap_repo = https://github.com/ngerakines/etap +pkg_etap_commit = master + +PACKAGES += etest +pkg_etest_name = etest +pkg_etest_description = A lightweight, convention over configuration test framework for Erlang +pkg_etest_homepage = https://github.com/wooga/etest +pkg_etest_fetch = git +pkg_etest_repo = https://github.com/wooga/etest +pkg_etest_commit = master + +PACKAGES += etest_http +pkg_etest_http_name = etest_http +pkg_etest_http_description = etest Assertions around HTTP (client-side) +pkg_etest_http_homepage = https://github.com/wooga/etest_http +pkg_etest_http_fetch = git +pkg_etest_http_repo = https://github.com/wooga/etest_http +pkg_etest_http_commit = master + +PACKAGES += etoml +pkg_etoml_name = etoml +pkg_etoml_description = TOML language erlang parser +pkg_etoml_homepage = https://github.com/kalta/etoml +pkg_etoml_fetch = git +pkg_etoml_repo = https://github.com/kalta/etoml +pkg_etoml_commit = master + +PACKAGES += eunit +pkg_eunit_name = eunit +pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. +pkg_eunit_homepage = https://github.com/richcarl/eunit +pkg_eunit_fetch = git +pkg_eunit_repo = https://github.com/richcarl/eunit +pkg_eunit_commit = master + +PACKAGES += eunit_formatters +pkg_eunit_formatters_name = eunit_formatters +pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better. +pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_fetch = git +pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_commit = master + +PACKAGES += euthanasia +pkg_euthanasia_name = euthanasia +pkg_euthanasia_description = Merciful killer for your Erlang processes +pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia +pkg_euthanasia_fetch = git +pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia +pkg_euthanasia_commit = master + +PACKAGES += evum +pkg_evum_name = evum +pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM +pkg_evum_homepage = https://github.com/msantos/evum +pkg_evum_fetch = git +pkg_evum_repo = https://github.com/msantos/evum +pkg_evum_commit = master + +PACKAGES += exec +pkg_exec_name = exec +pkg_exec_description = Execute and control OS processes from Erlang/OTP. +pkg_exec_homepage = http://saleyn.github.com/erlexec +pkg_exec_fetch = git +pkg_exec_repo = https://github.com/saleyn/erlexec +pkg_exec_commit = master + +PACKAGES += exml +pkg_exml_name = exml +pkg_exml_description = XML parsing library in Erlang +pkg_exml_homepage = https://github.com/paulgray/exml +pkg_exml_fetch = git +pkg_exml_repo = https://github.com/paulgray/exml +pkg_exml_commit = master + +PACKAGES += exometer +pkg_exometer_name = exometer +pkg_exometer_description = Basic measurement objects and probe behavior +pkg_exometer_homepage = https://github.com/Feuerlabs/exometer +pkg_exometer_fetch = git +pkg_exometer_repo = https://github.com/Feuerlabs/exometer +pkg_exometer_commit = 1.2 + +PACKAGES += exs1024 +pkg_exs1024_name = exs1024 +pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang. +pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024 +pkg_exs1024_fetch = git +pkg_exs1024_repo = https://github.com/jj1bdx/exs1024 +pkg_exs1024_commit = master + +PACKAGES += exs64 +pkg_exs64_name = exs64 +pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang. +pkg_exs64_homepage = https://github.com/jj1bdx/exs64 +pkg_exs64_fetch = git +pkg_exs64_repo = https://github.com/jj1bdx/exs64 +pkg_exs64_commit = master + +PACKAGES += exsplus116 +pkg_exsplus116_name = exsplus116 +pkg_exsplus116_description = Xorshift116plus for Erlang +pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_fetch = git +pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_commit = master + +PACKAGES += exsplus128 +pkg_exsplus128_name = exsplus128 +pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang. +pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_fetch = git +pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_commit = master + +PACKAGES += ezmq +pkg_ezmq_name = ezmq +pkg_ezmq_description = zMQ implemented in Erlang +pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq +pkg_ezmq_fetch = git +pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq +pkg_ezmq_commit = master + +PACKAGES += ezmtp +pkg_ezmtp_name = ezmtp +pkg_ezmtp_description = ZMTP protocol in pure Erlang. +pkg_ezmtp_homepage = https://github.com/a13x/ezmtp +pkg_ezmtp_fetch = git +pkg_ezmtp_repo = https://github.com/a13x/ezmtp +pkg_ezmtp_commit = master + +PACKAGES += fast_disk_log +pkg_fast_disk_log_name = fast_disk_log +pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger +pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_fetch = git +pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_commit = master + +PACKAGES += feeder +pkg_feeder_name = feeder +pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds. +pkg_feeder_homepage = https://github.com/michaelnisi/feeder +pkg_feeder_fetch = git +pkg_feeder_repo = https://github.com/michaelnisi/feeder +pkg_feeder_commit = v1.4.6 + +PACKAGES += fix +pkg_fix_name = fix +pkg_fix_description = http://fixprotocol.org/ implementation. +pkg_fix_homepage = https://github.com/maxlapshin/fix +pkg_fix_fetch = git +pkg_fix_repo = https://github.com/maxlapshin/fix +pkg_fix_commit = master + +PACKAGES += flower +pkg_flower_name = flower +pkg_flower_description = FlowER - a Erlang OpenFlow development platform +pkg_flower_homepage = https://github.com/travelping/flower +pkg_flower_fetch = git +pkg_flower_repo = https://github.com/travelping/flower +pkg_flower_commit = master + +PACKAGES += fn +pkg_fn_name = fn +pkg_fn_description = Function utilities for Erlang +pkg_fn_homepage = https://github.com/reiddraper/fn +pkg_fn_fetch = git +pkg_fn_repo = https://github.com/reiddraper/fn +pkg_fn_commit = master + +PACKAGES += folsom +pkg_folsom_name = folsom +pkg_folsom_description = Expose Erlang Events and Metrics +pkg_folsom_homepage = https://github.com/boundary/folsom +pkg_folsom_fetch = git +pkg_folsom_repo = https://github.com/boundary/folsom +pkg_folsom_commit = master + +PACKAGES += folsom_cowboy +pkg_folsom_cowboy_name = folsom_cowboy +pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper. +pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_fetch = git +pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_commit = master + +PACKAGES += folsomite +pkg_folsomite_name = folsomite +pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics +pkg_folsomite_homepage = https://github.com/campanja/folsomite +pkg_folsomite_fetch = git +pkg_folsomite_repo = https://github.com/campanja/folsomite +pkg_folsomite_commit = master + +PACKAGES += fs +pkg_fs_name = fs +pkg_fs_description = Erlang FileSystem Listener +pkg_fs_homepage = https://github.com/synrc/fs +pkg_fs_fetch = git +pkg_fs_repo = https://github.com/synrc/fs +pkg_fs_commit = master + +PACKAGES += fuse +pkg_fuse_name = fuse +pkg_fuse_description = A Circuit Breaker for Erlang +pkg_fuse_homepage = https://github.com/jlouis/fuse +pkg_fuse_fetch = git +pkg_fuse_repo = https://github.com/jlouis/fuse +pkg_fuse_commit = master + +PACKAGES += gcm +pkg_gcm_name = gcm +pkg_gcm_description = An Erlang application for Google Cloud Messaging +pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang +pkg_gcm_fetch = git +pkg_gcm_repo = https://github.com/pdincau/gcm-erlang +pkg_gcm_commit = master + +PACKAGES += gcprof +pkg_gcprof_name = gcprof +pkg_gcprof_description = Garbage Collection profiler for Erlang +pkg_gcprof_homepage = https://github.com/knutin/gcprof +pkg_gcprof_fetch = git +pkg_gcprof_repo = https://github.com/knutin/gcprof +pkg_gcprof_commit = master + +PACKAGES += geas +pkg_geas_name = geas +pkg_geas_description = Guess Erlang Application Scattering +pkg_geas_homepage = https://github.com/crownedgrouse/geas +pkg_geas_fetch = git +pkg_geas_repo = https://github.com/crownedgrouse/geas +pkg_geas_commit = master + +PACKAGES += geef +pkg_geef_name = geef +pkg_geef_description = Git NEEEEF (Erlang NIF) +pkg_geef_homepage = https://github.com/carlosmn/geef +pkg_geef_fetch = git +pkg_geef_repo = https://github.com/carlosmn/geef +pkg_geef_commit = master + +PACKAGES += gen_cycle +pkg_gen_cycle_name = gen_cycle +pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks +pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_fetch = git +pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_commit = develop + +PACKAGES += gen_icmp +pkg_gen_icmp_name = gen_icmp +pkg_gen_icmp_description = Erlang interface to ICMP sockets +pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp +pkg_gen_icmp_fetch = git +pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp +pkg_gen_icmp_commit = master + +PACKAGES += gen_nb_server +pkg_gen_nb_server_name = gen_nb_server +pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers +pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_fetch = git +pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_commit = master + +PACKAGES += gen_paxos +pkg_gen_paxos_name = gen_paxos +pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol +pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos +pkg_gen_paxos_fetch = git +pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos +pkg_gen_paxos_commit = master + +PACKAGES += gen_smtp +pkg_gen_smtp_name = gen_smtp +pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules +pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_fetch = git +pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_commit = master + +PACKAGES += gen_tracker +pkg_gen_tracker_name = gen_tracker +pkg_gen_tracker_description = supervisor with ets handling of children and their metadata +pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_fetch = git +pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_commit = master + +PACKAGES += gen_unix +pkg_gen_unix_name = gen_unix +pkg_gen_unix_description = Erlang Unix socket interface +pkg_gen_unix_homepage = https://github.com/msantos/gen_unix +pkg_gen_unix_fetch = git +pkg_gen_unix_repo = https://github.com/msantos/gen_unix +pkg_gen_unix_commit = master + +PACKAGES += getopt +pkg_getopt_name = getopt +pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax +pkg_getopt_homepage = https://github.com/jcomellas/getopt +pkg_getopt_fetch = git +pkg_getopt_repo = https://github.com/jcomellas/getopt +pkg_getopt_commit = master + +PACKAGES += gettext +pkg_gettext_name = gettext +pkg_gettext_description = Erlang internationalization library. +pkg_gettext_homepage = https://github.com/etnt/gettext +pkg_gettext_fetch = git +pkg_gettext_repo = https://github.com/etnt/gettext +pkg_gettext_commit = master + +PACKAGES += giallo +pkg_giallo_name = giallo +pkg_giallo_description = Small and flexible web framework on top of Cowboy +pkg_giallo_homepage = https://github.com/kivra/giallo +pkg_giallo_fetch = git +pkg_giallo_repo = https://github.com/kivra/giallo +pkg_giallo_commit = master + +PACKAGES += gin +pkg_gin_name = gin +pkg_gin_description = The guards and for Erlang parse_transform +pkg_gin_homepage = https://github.com/mad-cocktail/gin +pkg_gin_fetch = git +pkg_gin_repo = https://github.com/mad-cocktail/gin +pkg_gin_commit = master + +PACKAGES += gitty +pkg_gitty_name = gitty +pkg_gitty_description = Git access in erlang +pkg_gitty_homepage = https://github.com/maxlapshin/gitty +pkg_gitty_fetch = git +pkg_gitty_repo = https://github.com/maxlapshin/gitty +pkg_gitty_commit = master + +PACKAGES += gold_fever +pkg_gold_fever_name = gold_fever +pkg_gold_fever_description = A Treasure Hunt for Erlangers +pkg_gold_fever_homepage = https://github.com/inaka/gold_fever +pkg_gold_fever_fetch = git +pkg_gold_fever_repo = https://github.com/inaka/gold_fever +pkg_gold_fever_commit = master + +PACKAGES += gossiperl +pkg_gossiperl_name = gossiperl +pkg_gossiperl_description = Gossip middleware in Erlang +pkg_gossiperl_homepage = http://gossiperl.com/ +pkg_gossiperl_fetch = git +pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl +pkg_gossiperl_commit = master + +PACKAGES += gpb +pkg_gpb_name = gpb +pkg_gpb_description = A Google Protobuf implementation for Erlang +pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_fetch = git +pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_commit = master + +PACKAGES += gproc +pkg_gproc_name = gproc +pkg_gproc_description = Extended process registry for Erlang +pkg_gproc_homepage = https://github.com/uwiger/gproc +pkg_gproc_fetch = git +pkg_gproc_repo = https://github.com/uwiger/gproc +pkg_gproc_commit = master + +PACKAGES += grapherl +pkg_grapherl_name = grapherl +pkg_grapherl_description = Create graphs of Erlang systems and programs +pkg_grapherl_homepage = https://github.com/eproxus/grapherl +pkg_grapherl_fetch = git +pkg_grapherl_repo = https://github.com/eproxus/grapherl +pkg_grapherl_commit = master + +PACKAGES += gun +pkg_gun_name = gun +pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang. +pkg_gun_homepage = http//ninenines.eu +pkg_gun_fetch = git +pkg_gun_repo = https://github.com/ninenines/gun +pkg_gun_commit = master + +PACKAGES += gut +pkg_gut_name = gut +pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman +pkg_gut_homepage = https://github.com/unbalancedparentheses/gut +pkg_gut_fetch = git +pkg_gut_repo = https://github.com/unbalancedparentheses/gut +pkg_gut_commit = master + +PACKAGES += hackney +pkg_hackney_name = hackney +pkg_hackney_description = simple HTTP client in Erlang +pkg_hackney_homepage = https://github.com/benoitc/hackney +pkg_hackney_fetch = git +pkg_hackney_repo = https://github.com/benoitc/hackney +pkg_hackney_commit = master + +PACKAGES += hamcrest +pkg_hamcrest_name = hamcrest +pkg_hamcrest_description = Erlang port of Hamcrest +pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_fetch = git +pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_commit = master + +PACKAGES += hanoidb +pkg_hanoidb_name = hanoidb +pkg_hanoidb_description = Erlang LSM BTree Storage +pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_fetch = git +pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_commit = master + +PACKAGES += hottub +pkg_hottub_name = hottub +pkg_hottub_description = Permanent Erlang Worker Pool +pkg_hottub_homepage = https://github.com/bfrog/hottub +pkg_hottub_fetch = git +pkg_hottub_repo = https://github.com/bfrog/hottub +pkg_hottub_commit = master + +PACKAGES += hpack +pkg_hpack_name = hpack +pkg_hpack_description = HPACK Implementation for Erlang +pkg_hpack_homepage = https://github.com/joedevivo/hpack +pkg_hpack_fetch = git +pkg_hpack_repo = https://github.com/joedevivo/hpack +pkg_hpack_commit = master + +PACKAGES += hyper +pkg_hyper_name = hyper +pkg_hyper_description = Erlang implementation of HyperLogLog +pkg_hyper_homepage = https://github.com/GameAnalytics/hyper +pkg_hyper_fetch = git +pkg_hyper_repo = https://github.com/GameAnalytics/hyper +pkg_hyper_commit = master + +PACKAGES += ibrowse +pkg_ibrowse_name = ibrowse +pkg_ibrowse_description = Erlang HTTP client +pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_fetch = git +pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_commit = v4.1.1 + +PACKAGES += ierlang +pkg_ierlang_name = ierlang +pkg_ierlang_description = An Erlang language kernel for IPython. +pkg_ierlang_homepage = https://github.com/robbielynch/ierlang +pkg_ierlang_fetch = git +pkg_ierlang_repo = https://github.com/robbielynch/ierlang +pkg_ierlang_commit = master + +PACKAGES += iota +pkg_iota_name = iota +pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code +pkg_iota_homepage = https://github.com/jpgneves/iota +pkg_iota_fetch = git +pkg_iota_repo = https://github.com/jpgneves/iota +pkg_iota_commit = master + +PACKAGES += irc_lib +pkg_irc_lib_name = irc_lib +pkg_irc_lib_description = Erlang irc client library +pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_fetch = git +pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_commit = master + +PACKAGES += ircd +pkg_ircd_name = ircd +pkg_ircd_description = A pluggable IRC daemon application/library for Erlang. +pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd +pkg_ircd_fetch = git +pkg_ircd_repo = https://github.com/tonyg/erlang-ircd +pkg_ircd_commit = master + +PACKAGES += iris +pkg_iris_name = iris +pkg_iris_description = Iris Erlang binding +pkg_iris_homepage = https://github.com/project-iris/iris-erl +pkg_iris_fetch = git +pkg_iris_repo = https://github.com/project-iris/iris-erl +pkg_iris_commit = master + +PACKAGES += iso8601 +pkg_iso8601_name = iso8601 +pkg_iso8601_description = Erlang ISO 8601 date formatter/parser +pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_fetch = git +pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_commit = master + +PACKAGES += jamdb_sybase +pkg_jamdb_sybase_name = jamdb_sybase +pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE +pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_fetch = git +pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_commit = 0.6.0 + +PACKAGES += jerg +pkg_jerg_name = jerg +pkg_jerg_description = JSON Schema to Erlang Records Generator +pkg_jerg_homepage = https://github.com/ddossot/jerg +pkg_jerg_fetch = git +pkg_jerg_repo = https://github.com/ddossot/jerg +pkg_jerg_commit = master + +PACKAGES += jesse +pkg_jesse_name = jesse +pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang. +pkg_jesse_homepage = https://github.com/klarna/jesse +pkg_jesse_fetch = git +pkg_jesse_repo = https://github.com/klarna/jesse +pkg_jesse_commit = master + +PACKAGES += jiffy +pkg_jiffy_name = jiffy +pkg_jiffy_description = JSON NIFs for Erlang. +pkg_jiffy_homepage = https://github.com/davisp/jiffy +pkg_jiffy_fetch = git +pkg_jiffy_repo = https://github.com/davisp/jiffy +pkg_jiffy_commit = master + +PACKAGES += jiffy_v +pkg_jiffy_v_name = jiffy_v +pkg_jiffy_v_description = JSON validation utility +pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_fetch = git +pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_commit = 0.3.3 + +PACKAGES += jobs +pkg_jobs_name = jobs +pkg_jobs_description = a Job scheduler for load regulation +pkg_jobs_homepage = https://github.com/esl/jobs +pkg_jobs_fetch = git +pkg_jobs_repo = https://github.com/esl/jobs +pkg_jobs_commit = 0.3 + +PACKAGES += joxa +pkg_joxa_name = joxa +pkg_joxa_description = A Modern Lisp for the Erlang VM +pkg_joxa_homepage = https://github.com/joxa/joxa +pkg_joxa_fetch = git +pkg_joxa_repo = https://github.com/joxa/joxa +pkg_joxa_commit = master + +PACKAGES += json +pkg_json_name = json +pkg_json_description = a high level json library for erlang (17.0+) +pkg_json_homepage = https://github.com/talentdeficit/json +pkg_json_fetch = git +pkg_json_repo = https://github.com/talentdeficit/json +pkg_json_commit = master + +PACKAGES += json_rec +pkg_json_rec_name = json_rec +pkg_json_rec_description = JSON to erlang record +pkg_json_rec_homepage = https://github.com/justinkirby/json_rec +pkg_json_rec_fetch = git +pkg_json_rec_repo = https://github.com/justinkirby/json_rec +pkg_json_rec_commit = master + +PACKAGES += jsonerl +pkg_jsonerl_name = jsonerl +pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder +pkg_jsonerl_homepage = https://github.com/lambder/jsonerl +pkg_jsonerl_fetch = git +pkg_jsonerl_repo = https://github.com/lambder/jsonerl +pkg_jsonerl_commit = master + +PACKAGES += jsonpath +pkg_jsonpath_name = jsonpath +pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation +pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_fetch = git +pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_commit = master + +PACKAGES += jsonx +pkg_jsonx_name = jsonx +pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C. +pkg_jsonx_homepage = https://github.com/iskra/jsonx +pkg_jsonx_fetch = git +pkg_jsonx_repo = https://github.com/iskra/jsonx +pkg_jsonx_commit = master + +PACKAGES += jsx +pkg_jsx_name = jsx +pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON. +pkg_jsx_homepage = https://github.com/talentdeficit/jsx +pkg_jsx_fetch = git +pkg_jsx_repo = https://github.com/talentdeficit/jsx +pkg_jsx_commit = master + +PACKAGES += kafka +pkg_kafka_name = kafka +pkg_kafka_description = Kafka consumer and producer in Erlang +pkg_kafka_homepage = https://github.com/wooga/kafka-erlang +pkg_kafka_fetch = git +pkg_kafka_repo = https://github.com/wooga/kafka-erlang +pkg_kafka_commit = master + +PACKAGES += kai +pkg_kai_name = kai +pkg_kai_description = DHT storage by Takeshi Inoue +pkg_kai_homepage = https://github.com/synrc/kai +pkg_kai_fetch = git +pkg_kai_repo = https://github.com/synrc/kai +pkg_kai_commit = master + +PACKAGES += katja +pkg_katja_name = katja +pkg_katja_description = A simple Riemann client written in Erlang. +pkg_katja_homepage = https://github.com/nifoc/katja +pkg_katja_fetch = git +pkg_katja_repo = https://github.com/nifoc/katja +pkg_katja_commit = master + +PACKAGES += kdht +pkg_kdht_name = kdht +pkg_kdht_description = kdht is an erlang DHT implementation +pkg_kdht_homepage = https://github.com/kevinlynx/kdht +pkg_kdht_fetch = git +pkg_kdht_repo = https://github.com/kevinlynx/kdht +pkg_kdht_commit = master + +PACKAGES += key2value +pkg_key2value_name = key2value +pkg_key2value_description = Erlang 2-way map +pkg_key2value_homepage = https://github.com/okeuday/key2value +pkg_key2value_fetch = git +pkg_key2value_repo = https://github.com/okeuday/key2value +pkg_key2value_commit = master + +PACKAGES += keys1value +pkg_keys1value_name = keys1value +pkg_keys1value_description = Erlang set associative map for key lists +pkg_keys1value_homepage = https://github.com/okeuday/keys1value +pkg_keys1value_fetch = git +pkg_keys1value_repo = https://github.com/okeuday/keys1value +pkg_keys1value_commit = master + +PACKAGES += kinetic +pkg_kinetic_name = kinetic +pkg_kinetic_description = Erlang Kinesis Client +pkg_kinetic_homepage = https://github.com/AdRoll/kinetic +pkg_kinetic_fetch = git +pkg_kinetic_repo = https://github.com/AdRoll/kinetic +pkg_kinetic_commit = master + +PACKAGES += kjell +pkg_kjell_name = kjell +pkg_kjell_description = Erlang Shell +pkg_kjell_homepage = https://github.com/karlll/kjell +pkg_kjell_fetch = git +pkg_kjell_repo = https://github.com/karlll/kjell +pkg_kjell_commit = master + +PACKAGES += kraken +pkg_kraken_name = kraken +pkg_kraken_description = Distributed Pubsub Server for Realtime Apps +pkg_kraken_homepage = https://github.com/Asana/kraken +pkg_kraken_fetch = git +pkg_kraken_repo = https://github.com/Asana/kraken +pkg_kraken_commit = master + +PACKAGES += kucumberl +pkg_kucumberl_name = kucumberl +pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber +pkg_kucumberl_homepage = https://github.com/openshine/kucumberl +pkg_kucumberl_fetch = git +pkg_kucumberl_repo = https://github.com/openshine/kucumberl +pkg_kucumberl_commit = master + +PACKAGES += kvc +pkg_kvc_name = kvc +pkg_kvc_description = KVC - Key Value Coding for Erlang data structures +pkg_kvc_homepage = https://github.com/etrepum/kvc +pkg_kvc_fetch = git +pkg_kvc_repo = https://github.com/etrepum/kvc +pkg_kvc_commit = master + +PACKAGES += kvlists +pkg_kvlists_name = kvlists +pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang +pkg_kvlists_homepage = https://github.com/jcomellas/kvlists +pkg_kvlists_fetch = git +pkg_kvlists_repo = https://github.com/jcomellas/kvlists +pkg_kvlists_commit = master + +PACKAGES += kvs +pkg_kvs_name = kvs +pkg_kvs_description = Container and Iterator +pkg_kvs_homepage = https://github.com/synrc/kvs +pkg_kvs_fetch = git +pkg_kvs_repo = https://github.com/synrc/kvs +pkg_kvs_commit = master + +PACKAGES += lager +pkg_lager_name = lager +pkg_lager_description = A logging framework for Erlang/OTP. +pkg_lager_homepage = https://github.com/basho/lager +pkg_lager_fetch = git +pkg_lager_repo = https://github.com/basho/lager +pkg_lager_commit = master + +PACKAGES += lager_amqp_backend +pkg_lager_amqp_backend_name = lager_amqp_backend +pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend +pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_fetch = git +pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_commit = master + +PACKAGES += lager_syslog +pkg_lager_syslog_name = lager_syslog +pkg_lager_syslog_description = Syslog backend for lager +pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog +pkg_lager_syslog_fetch = git +pkg_lager_syslog_repo = https://github.com/basho/lager_syslog +pkg_lager_syslog_commit = master + +PACKAGES += lambdapad +pkg_lambdapad_name = lambdapad +pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang. +pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad +pkg_lambdapad_fetch = git +pkg_lambdapad_repo = https://github.com/gar1t/lambdapad +pkg_lambdapad_commit = master + +PACKAGES += lasp +pkg_lasp_name = lasp +pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations +pkg_lasp_homepage = http://lasp-lang.org/ +pkg_lasp_fetch = git +pkg_lasp_repo = https://github.com/lasp-lang/lasp +pkg_lasp_commit = master + +PACKAGES += lasse +pkg_lasse_name = lasse +pkg_lasse_description = SSE handler for Cowboy +pkg_lasse_homepage = https://github.com/inaka/lasse +pkg_lasse_fetch = git +pkg_lasse_repo = https://github.com/inaka/lasse +pkg_lasse_commit = 0.1.0 + +PACKAGES += ldap +pkg_ldap_name = ldap +pkg_ldap_description = LDAP server written in Erlang +pkg_ldap_homepage = https://github.com/spawnproc/ldap +pkg_ldap_fetch = git +pkg_ldap_repo = https://github.com/spawnproc/ldap +pkg_ldap_commit = master + +PACKAGES += lethink +pkg_lethink_name = lethink +pkg_lethink_description = erlang driver for rethinkdb +pkg_lethink_homepage = https://github.com/taybin/lethink +pkg_lethink_fetch = git +pkg_lethink_repo = https://github.com/taybin/lethink +pkg_lethink_commit = master + +PACKAGES += lfe +pkg_lfe_name = lfe +pkg_lfe_description = Lisp Flavoured Erlang (LFE) +pkg_lfe_homepage = https://github.com/rvirding/lfe +pkg_lfe_fetch = git +pkg_lfe_repo = https://github.com/rvirding/lfe +pkg_lfe_commit = master + +PACKAGES += ling +pkg_ling_name = ling +pkg_ling_description = Erlang on Xen +pkg_ling_homepage = https://github.com/cloudozer/ling +pkg_ling_fetch = git +pkg_ling_repo = https://github.com/cloudozer/ling +pkg_ling_commit = master + +PACKAGES += live +pkg_live_name = live +pkg_live_description = Automated module and configuration reloader. +pkg_live_homepage = http://ninenines.eu +pkg_live_fetch = git +pkg_live_repo = https://github.com/ninenines/live +pkg_live_commit = master + +PACKAGES += lmq +pkg_lmq_name = lmq +pkg_lmq_description = Lightweight Message Queue +pkg_lmq_homepage = https://github.com/iij/lmq +pkg_lmq_fetch = git +pkg_lmq_repo = https://github.com/iij/lmq +pkg_lmq_commit = master + +PACKAGES += locker +pkg_locker_name = locker +pkg_locker_description = Atomic distributed 'check and set' for short-lived keys +pkg_locker_homepage = https://github.com/wooga/locker +pkg_locker_fetch = git +pkg_locker_repo = https://github.com/wooga/locker +pkg_locker_commit = master + +PACKAGES += locks +pkg_locks_name = locks +pkg_locks_description = A scalable, deadlock-resolving resource locker +pkg_locks_homepage = https://github.com/uwiger/locks +pkg_locks_fetch = git +pkg_locks_repo = https://github.com/uwiger/locks +pkg_locks_commit = master + +PACKAGES += log4erl +pkg_log4erl_name = log4erl +pkg_log4erl_description = A logger for erlang in the spirit of Log4J. +pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl +pkg_log4erl_fetch = git +pkg_log4erl_repo = https://github.com/ahmednawras/log4erl +pkg_log4erl_commit = master + +PACKAGES += lol +pkg_lol_name = lol +pkg_lol_description = Lisp on erLang, and programming is fun again +pkg_lol_homepage = https://github.com/b0oh/lol +pkg_lol_fetch = git +pkg_lol_repo = https://github.com/b0oh/lol +pkg_lol_commit = master + +PACKAGES += lucid +pkg_lucid_name = lucid +pkg_lucid_description = HTTP/2 server written in Erlang +pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid +pkg_lucid_fetch = git +pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid +pkg_lucid_commit = master + +PACKAGES += luerl +pkg_luerl_name = luerl +pkg_luerl_description = Lua in Erlang +pkg_luerl_homepage = https://github.com/rvirding/luerl +pkg_luerl_fetch = git +pkg_luerl_repo = https://github.com/rvirding/luerl +pkg_luerl_commit = develop + +PACKAGES += luwak +pkg_luwak_name = luwak +pkg_luwak_description = Large-object storage interface for Riak +pkg_luwak_homepage = https://github.com/basho/luwak +pkg_luwak_fetch = git +pkg_luwak_repo = https://github.com/basho/luwak +pkg_luwak_commit = master + +PACKAGES += lux +pkg_lux_name = lux +pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands +pkg_lux_homepage = https://github.com/hawk/lux +pkg_lux_fetch = git +pkg_lux_repo = https://github.com/hawk/lux +pkg_lux_commit = master + +PACKAGES += machi +pkg_machi_name = machi +pkg_machi_description = Machi file store +pkg_machi_homepage = https://github.com/basho/machi +pkg_machi_fetch = git +pkg_machi_repo = https://github.com/basho/machi +pkg_machi_commit = master + +PACKAGES += mad +pkg_mad_name = mad +pkg_mad_description = Small and Fast Rebar Replacement +pkg_mad_homepage = https://github.com/synrc/mad +pkg_mad_fetch = git +pkg_mad_repo = https://github.com/synrc/mad +pkg_mad_commit = master + +PACKAGES += marina +pkg_marina_name = marina +pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client +pkg_marina_homepage = https://github.com/lpgauth/marina +pkg_marina_fetch = git +pkg_marina_repo = https://github.com/lpgauth/marina +pkg_marina_commit = master + +PACKAGES += mavg +pkg_mavg_name = mavg +pkg_mavg_description = Erlang :: Exponential moving average library +pkg_mavg_homepage = https://github.com/EchoTeam/mavg +pkg_mavg_fetch = git +pkg_mavg_repo = https://github.com/EchoTeam/mavg +pkg_mavg_commit = master + +PACKAGES += mc_erl +pkg_mc_erl_name = mc_erl +pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. +pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl +pkg_mc_erl_fetch = git +pkg_mc_erl_repo = https://github.com/clonejo/mc-erl +pkg_mc_erl_commit = master + +PACKAGES += mcd +pkg_mcd_name = mcd +pkg_mcd_description = Fast memcached protocol client in pure Erlang +pkg_mcd_homepage = https://github.com/EchoTeam/mcd +pkg_mcd_fetch = git +pkg_mcd_repo = https://github.com/EchoTeam/mcd +pkg_mcd_commit = master + +PACKAGES += mcerlang +pkg_mcerlang_name = mcerlang +pkg_mcerlang_description = The McErlang model checker for Erlang +pkg_mcerlang_homepage = https://github.com/fredlund/McErlang +pkg_mcerlang_fetch = git +pkg_mcerlang_repo = https://github.com/fredlund/McErlang +pkg_mcerlang_commit = master + +PACKAGES += meck +pkg_meck_name = meck +pkg_meck_description = A mocking library for Erlang +pkg_meck_homepage = https://github.com/eproxus/meck +pkg_meck_fetch = git +pkg_meck_repo = https://github.com/eproxus/meck +pkg_meck_commit = master + +PACKAGES += mekao +pkg_mekao_name = mekao +pkg_mekao_description = SQL constructor +pkg_mekao_homepage = https://github.com/ddosia/mekao +pkg_mekao_fetch = git +pkg_mekao_repo = https://github.com/ddosia/mekao +pkg_mekao_commit = master + +PACKAGES += memo +pkg_memo_name = memo +pkg_memo_description = Erlang memoization server +pkg_memo_homepage = https://github.com/tuncer/memo +pkg_memo_fetch = git +pkg_memo_repo = https://github.com/tuncer/memo +pkg_memo_commit = master + +PACKAGES += merge_index +pkg_merge_index_name = merge_index +pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop). +pkg_merge_index_homepage = https://github.com/basho/merge_index +pkg_merge_index_fetch = git +pkg_merge_index_repo = https://github.com/basho/merge_index +pkg_merge_index_commit = master + +PACKAGES += merl +pkg_merl_name = merl +pkg_merl_description = Metaprogramming in Erlang +pkg_merl_homepage = https://github.com/richcarl/merl +pkg_merl_fetch = git +pkg_merl_repo = https://github.com/richcarl/merl +pkg_merl_commit = master + +PACKAGES += mimetypes +pkg_mimetypes_name = mimetypes +pkg_mimetypes_description = Erlang MIME types library +pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes +pkg_mimetypes_fetch = git +pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes +pkg_mimetypes_commit = master + +PACKAGES += mixer +pkg_mixer_name = mixer +pkg_mixer_description = Mix in functions from other modules +pkg_mixer_homepage = https://github.com/chef/mixer +pkg_mixer_fetch = git +pkg_mixer_repo = https://github.com/chef/mixer +pkg_mixer_commit = master + +PACKAGES += mochiweb +pkg_mochiweb_name = mochiweb +pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers. +pkg_mochiweb_homepage = https://github.com/mochi/mochiweb +pkg_mochiweb_fetch = git +pkg_mochiweb_repo = https://github.com/mochi/mochiweb +pkg_mochiweb_commit = master + +PACKAGES += mochiweb_xpath +pkg_mochiweb_xpath_name = mochiweb_xpath +pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser +pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_fetch = git +pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_commit = master + +PACKAGES += mockgyver +pkg_mockgyver_name = mockgyver +pkg_mockgyver_description = A mocking library for Erlang +pkg_mockgyver_homepage = https://github.com/klajo/mockgyver +pkg_mockgyver_fetch = git +pkg_mockgyver_repo = https://github.com/klajo/mockgyver +pkg_mockgyver_commit = master + +PACKAGES += modlib +pkg_modlib_name = modlib +pkg_modlib_description = Web framework based on Erlang's inets httpd +pkg_modlib_homepage = https://github.com/gar1t/modlib +pkg_modlib_fetch = git +pkg_modlib_repo = https://github.com/gar1t/modlib +pkg_modlib_commit = master + +PACKAGES += mongodb +pkg_mongodb_name = mongodb +pkg_mongodb_description = MongoDB driver for Erlang +pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_fetch = git +pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_commit = master + +PACKAGES += mongooseim +pkg_mongooseim_name = mongooseim +pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions +pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform +pkg_mongooseim_fetch = git +pkg_mongooseim_repo = https://github.com/esl/MongooseIM +pkg_mongooseim_commit = master + +PACKAGES += moyo +pkg_moyo_name = moyo +pkg_moyo_description = Erlang utility functions library +pkg_moyo_homepage = https://github.com/dwango/moyo +pkg_moyo_fetch = git +pkg_moyo_repo = https://github.com/dwango/moyo +pkg_moyo_commit = master + +PACKAGES += msgpack +pkg_msgpack_name = msgpack +pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang +pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_fetch = git +pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_commit = master + +PACKAGES += mu2 +pkg_mu2_name = mu2 +pkg_mu2_description = Erlang mutation testing tool +pkg_mu2_homepage = https://github.com/ramsay-t/mu2 +pkg_mu2_fetch = git +pkg_mu2_repo = https://github.com/ramsay-t/mu2 +pkg_mu2_commit = master + +PACKAGES += mustache +pkg_mustache_name = mustache +pkg_mustache_description = Mustache template engine for Erlang. +pkg_mustache_homepage = https://github.com/mojombo/mustache.erl +pkg_mustache_fetch = git +pkg_mustache_repo = https://github.com/mojombo/mustache.erl +pkg_mustache_commit = master + +PACKAGES += myproto +pkg_myproto_name = myproto +pkg_myproto_description = MySQL Server Protocol in Erlang +pkg_myproto_homepage = https://github.com/altenwald/myproto +pkg_myproto_fetch = git +pkg_myproto_repo = https://github.com/altenwald/myproto +pkg_myproto_commit = master + +PACKAGES += mysql +pkg_mysql_name = mysql +pkg_mysql_description = Erlang MySQL Driver (from code.google.com) +pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_fetch = git +pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_commit = master + +PACKAGES += n2o +pkg_n2o_name = n2o +pkg_n2o_description = WebSocket Application Server +pkg_n2o_homepage = https://github.com/5HT/n2o +pkg_n2o_fetch = git +pkg_n2o_repo = https://github.com/5HT/n2o +pkg_n2o_commit = master + +PACKAGES += nat_upnp +pkg_nat_upnp_name = nat_upnp +pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD +pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_fetch = git +pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_commit = master + +PACKAGES += neo4j +pkg_neo4j_name = neo4j +pkg_neo4j_description = Erlang client library for Neo4J. +pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_fetch = git +pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_commit = master + +PACKAGES += neotoma +pkg_neotoma_name = neotoma +pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars. +pkg_neotoma_homepage = https://github.com/seancribbs/neotoma +pkg_neotoma_fetch = git +pkg_neotoma_repo = https://github.com/seancribbs/neotoma +pkg_neotoma_commit = master + +PACKAGES += newrelic +pkg_newrelic_name = newrelic +pkg_newrelic_description = Erlang library for sending metrics to New Relic +pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang +pkg_newrelic_fetch = git +pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang +pkg_newrelic_commit = master + +PACKAGES += nifty +pkg_nifty_name = nifty +pkg_nifty_description = Erlang NIF wrapper generator +pkg_nifty_homepage = https://github.com/parapluu/nifty +pkg_nifty_fetch = git +pkg_nifty_repo = https://github.com/parapluu/nifty +pkg_nifty_commit = master + +PACKAGES += nitrogen_core +pkg_nitrogen_core_name = nitrogen_core +pkg_nitrogen_core_description = The core Nitrogen library. +pkg_nitrogen_core_homepage = http://nitrogenproject.com/ +pkg_nitrogen_core_fetch = git +pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core +pkg_nitrogen_core_commit = master + +PACKAGES += nkbase +pkg_nkbase_name = nkbase +pkg_nkbase_description = NkBASE distributed database +pkg_nkbase_homepage = https://github.com/Nekso/nkbase +pkg_nkbase_fetch = git +pkg_nkbase_repo = https://github.com/Nekso/nkbase +pkg_nkbase_commit = develop + +PACKAGES += nkdocker +pkg_nkdocker_name = nkdocker +pkg_nkdocker_description = Erlang Docker client +pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker +pkg_nkdocker_fetch = git +pkg_nkdocker_repo = https://github.com/Nekso/nkdocker +pkg_nkdocker_commit = master + +PACKAGES += nkpacket +pkg_nkpacket_name = nkpacket +pkg_nkpacket_description = Generic Erlang transport layer +pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket +pkg_nkpacket_fetch = git +pkg_nkpacket_repo = https://github.com/Nekso/nkpacket +pkg_nkpacket_commit = master + +PACKAGES += nksip +pkg_nksip_name = nksip +pkg_nksip_description = Erlang SIP application server +pkg_nksip_homepage = https://github.com/kalta/nksip +pkg_nksip_fetch = git +pkg_nksip_repo = https://github.com/kalta/nksip +pkg_nksip_commit = master + +PACKAGES += nodefinder +pkg_nodefinder_name = nodefinder +pkg_nodefinder_description = automatic node discovery via UDP multicast +pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder +pkg_nodefinder_fetch = git +pkg_nodefinder_repo = https://github.com/okeuday/nodefinder +pkg_nodefinder_commit = master + +PACKAGES += nprocreg +pkg_nprocreg_name = nprocreg +pkg_nprocreg_description = Minimal Distributed Erlang Process Registry +pkg_nprocreg_homepage = http://nitrogenproject.com/ +pkg_nprocreg_fetch = git +pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg +pkg_nprocreg_commit = master + +PACKAGES += oauth +pkg_oauth_name = oauth +pkg_oauth_description = An Erlang OAuth 1.0 implementation +pkg_oauth_homepage = https://github.com/tim/erlang-oauth +pkg_oauth_fetch = git +pkg_oauth_repo = https://github.com/tim/erlang-oauth +pkg_oauth_commit = master + +PACKAGES += oauth2 +pkg_oauth2_name = oauth2 +pkg_oauth2_description = Erlang Oauth2 implementation +pkg_oauth2_homepage = https://github.com/kivra/oauth2 +pkg_oauth2_fetch = git +pkg_oauth2_repo = https://github.com/kivra/oauth2 +pkg_oauth2_commit = master + +PACKAGES += oauth2c +pkg_oauth2c_name = oauth2c +pkg_oauth2c_description = Erlang OAuth2 Client +pkg_oauth2c_homepage = https://github.com/kivra/oauth2_client +pkg_oauth2c_fetch = git +pkg_oauth2c_repo = https://github.com/kivra/oauth2_client +pkg_oauth2c_commit = master + +PACKAGES += octopus +pkg_octopus_name = octopus +pkg_octopus_description = Small and flexible pool manager written in Erlang +pkg_octopus_homepage = https://github.com/erlangbureau/octopus +pkg_octopus_fetch = git +pkg_octopus_repo = https://github.com/erlangbureau/octopus +pkg_octopus_commit = 1.0.0 + +PACKAGES += of_protocol +pkg_of_protocol_name = of_protocol +pkg_of_protocol_description = OpenFlow Protocol Library for Erlang +pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_fetch = git +pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_commit = master + +PACKAGES += opencouch +pkg_opencouch_name = couch +pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB +pkg_opencouch_homepage = https://github.com/benoitc/opencouch +pkg_opencouch_fetch = git +pkg_opencouch_repo = https://github.com/benoitc/opencouch +pkg_opencouch_commit = master + +PACKAGES += openflow +pkg_openflow_name = openflow +pkg_openflow_description = An OpenFlow controller written in pure erlang +pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_fetch = git +pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_commit = master + +PACKAGES += openid +pkg_openid_name = openid +pkg_openid_description = Erlang OpenID +pkg_openid_homepage = https://github.com/brendonh/erl_openid +pkg_openid_fetch = git +pkg_openid_repo = https://github.com/brendonh/erl_openid +pkg_openid_commit = master + +PACKAGES += openpoker +pkg_openpoker_name = openpoker +pkg_openpoker_description = Genesis Texas hold'em Game Server +pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker +pkg_openpoker_fetch = git +pkg_openpoker_repo = https://github.com/hpyhacking/openpoker +pkg_openpoker_commit = master + +PACKAGES += pal +pkg_pal_name = pal +pkg_pal_description = Pragmatic Authentication Library +pkg_pal_homepage = https://github.com/manifest/pal +pkg_pal_fetch = git +pkg_pal_repo = https://github.com/manifest/pal +pkg_pal_commit = master + +PACKAGES += parse_trans +pkg_parse_trans_name = parse_trans +pkg_parse_trans_description = Parse transform utilities for Erlang +pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans +pkg_parse_trans_fetch = git +pkg_parse_trans_repo = https://github.com/uwiger/parse_trans +pkg_parse_trans_commit = master + +PACKAGES += parsexml +pkg_parsexml_name = parsexml +pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API +pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml +pkg_parsexml_fetch = git +pkg_parsexml_repo = https://github.com/maxlapshin/parsexml +pkg_parsexml_commit = master + +PACKAGES += pegjs +pkg_pegjs_name = pegjs +pkg_pegjs_description = An implementation of PEG.js grammar for Erlang. +pkg_pegjs_homepage = https://github.com/dmitriid/pegjs +pkg_pegjs_fetch = git +pkg_pegjs_repo = https://github.com/dmitriid/pegjs +pkg_pegjs_commit = 0.3 + +PACKAGES += percept2 +pkg_percept2_name = percept2 +pkg_percept2_description = Concurrent profiling tool for Erlang +pkg_percept2_homepage = https://github.com/huiqing/percept2 +pkg_percept2_fetch = git +pkg_percept2_repo = https://github.com/huiqing/percept2 +pkg_percept2_commit = master + +PACKAGES += pgsql +pkg_pgsql_name = pgsql +pkg_pgsql_description = Erlang PostgreSQL driver +pkg_pgsql_homepage = https://github.com/semiocast/pgsql +pkg_pgsql_fetch = git +pkg_pgsql_repo = https://github.com/semiocast/pgsql +pkg_pgsql_commit = master + +PACKAGES += pkgx +pkg_pkgx_name = pkgx +pkg_pkgx_description = Build .deb packages from Erlang releases +pkg_pkgx_homepage = https://github.com/arjan/pkgx +pkg_pkgx_fetch = git +pkg_pkgx_repo = https://github.com/arjan/pkgx +pkg_pkgx_commit = master + +PACKAGES += pkt +pkg_pkt_name = pkt +pkg_pkt_description = Erlang network protocol library +pkg_pkt_homepage = https://github.com/msantos/pkt +pkg_pkt_fetch = git +pkg_pkt_repo = https://github.com/msantos/pkt +pkg_pkt_commit = master + +PACKAGES += plain_fsm +pkg_plain_fsm_name = plain_fsm +pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs. +pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_fetch = git +pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_commit = master + +PACKAGES += plumtree +pkg_plumtree_name = plumtree +pkg_plumtree_description = Epidemic Broadcast Trees +pkg_plumtree_homepage = https://github.com/helium/plumtree +pkg_plumtree_fetch = git +pkg_plumtree_repo = https://github.com/helium/plumtree +pkg_plumtree_commit = master + +PACKAGES += pmod_transform +pkg_pmod_transform_name = pmod_transform +pkg_pmod_transform_description = Parse transform for parameterized modules +pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform +pkg_pmod_transform_fetch = git +pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform +pkg_pmod_transform_commit = master + +PACKAGES += pobox +pkg_pobox_name = pobox +pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang +pkg_pobox_homepage = https://github.com/ferd/pobox +pkg_pobox_fetch = git +pkg_pobox_repo = https://github.com/ferd/pobox +pkg_pobox_commit = master + +PACKAGES += ponos +pkg_ponos_name = ponos +pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang +pkg_ponos_homepage = https://github.com/klarna/ponos +pkg_ponos_fetch = git +pkg_ponos_repo = https://github.com/klarna/ponos +pkg_ponos_commit = master + +PACKAGES += poolboy +pkg_poolboy_name = poolboy +pkg_poolboy_description = A hunky Erlang worker pool factory +pkg_poolboy_homepage = https://github.com/devinus/poolboy +pkg_poolboy_fetch = git +pkg_poolboy_repo = https://github.com/devinus/poolboy +pkg_poolboy_commit = master + +PACKAGES += pooler +pkg_pooler_name = pooler +pkg_pooler_description = An OTP Process Pool Application +pkg_pooler_homepage = https://github.com/seth/pooler +pkg_pooler_fetch = git +pkg_pooler_repo = https://github.com/seth/pooler +pkg_pooler_commit = master + +PACKAGES += pqueue +pkg_pqueue_name = pqueue +pkg_pqueue_description = Erlang Priority Queues +pkg_pqueue_homepage = https://github.com/okeuday/pqueue +pkg_pqueue_fetch = git +pkg_pqueue_repo = https://github.com/okeuday/pqueue +pkg_pqueue_commit = master + +PACKAGES += procket +pkg_procket_name = procket +pkg_procket_description = Erlang interface to low level socket operations +pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket +pkg_procket_fetch = git +pkg_procket_repo = https://github.com/msantos/procket +pkg_procket_commit = master + +PACKAGES += prop +pkg_prop_name = prop +pkg_prop_description = An Erlang code scaffolding and generator system. +pkg_prop_homepage = https://github.com/nuex/prop +pkg_prop_fetch = git +pkg_prop_repo = https://github.com/nuex/prop +pkg_prop_commit = master + +PACKAGES += proper +pkg_proper_name = proper +pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang. +pkg_proper_homepage = http://proper.softlab.ntua.gr +pkg_proper_fetch = git +pkg_proper_repo = https://github.com/manopapad/proper +pkg_proper_commit = master + +PACKAGES += props +pkg_props_name = props +pkg_props_description = Property structure library +pkg_props_homepage = https://github.com/greyarea/props +pkg_props_fetch = git +pkg_props_repo = https://github.com/greyarea/props +pkg_props_commit = master + +PACKAGES += protobuffs +pkg_protobuffs_name = protobuffs +pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs. +pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_fetch = git +pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_commit = master + +PACKAGES += psycho +pkg_psycho_name = psycho +pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware. +pkg_psycho_homepage = https://github.com/gar1t/psycho +pkg_psycho_fetch = git +pkg_psycho_repo = https://github.com/gar1t/psycho +pkg_psycho_commit = master + +PACKAGES += purity +pkg_purity_name = purity +pkg_purity_description = A side-effect analyzer for Erlang +pkg_purity_homepage = https://github.com/mpitid/purity +pkg_purity_fetch = git +pkg_purity_repo = https://github.com/mpitid/purity +pkg_purity_commit = master + +PACKAGES += push_service +pkg_push_service_name = push_service +pkg_push_service_description = Push service +pkg_push_service_homepage = https://github.com/hairyhum/push_service +pkg_push_service_fetch = git +pkg_push_service_repo = https://github.com/hairyhum/push_service +pkg_push_service_commit = master + +PACKAGES += qdate +pkg_qdate_name = qdate +pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang. +pkg_qdate_homepage = https://github.com/choptastic/qdate +pkg_qdate_fetch = git +pkg_qdate_repo = https://github.com/choptastic/qdate +pkg_qdate_commit = 0.4.0 + +PACKAGES += qrcode +pkg_qrcode_name = qrcode +pkg_qrcode_description = QR Code encoder in Erlang +pkg_qrcode_homepage = https://github.com/komone/qrcode +pkg_qrcode_fetch = git +pkg_qrcode_repo = https://github.com/komone/qrcode +pkg_qrcode_commit = master + +PACKAGES += quest +pkg_quest_name = quest +pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang. +pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest +pkg_quest_fetch = git +pkg_quest_repo = https://github.com/eriksoe/ErlangQuest +pkg_quest_commit = master + +PACKAGES += quickrand +pkg_quickrand_name = quickrand +pkg_quickrand_description = Quick Erlang Random Number Generation +pkg_quickrand_homepage = https://github.com/okeuday/quickrand +pkg_quickrand_fetch = git +pkg_quickrand_repo = https://github.com/okeuday/quickrand +pkg_quickrand_commit = master + +PACKAGES += rabbit +pkg_rabbit_name = rabbit +pkg_rabbit_description = RabbitMQ Server +pkg_rabbit_homepage = https://www.rabbitmq.com/ +pkg_rabbit_fetch = git +pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git +pkg_rabbit_commit = master + +PACKAGES += rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak +pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_fetch = git +pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_commit = master + +PACKAGES += rack +pkg_rack_name = rack +pkg_rack_description = Rack handler for erlang +pkg_rack_homepage = https://github.com/erlyvideo/rack +pkg_rack_fetch = git +pkg_rack_repo = https://github.com/erlyvideo/rack +pkg_rack_commit = master + +PACKAGES += radierl +pkg_radierl_name = radierl +pkg_radierl_description = RADIUS protocol stack implemented in Erlang. +pkg_radierl_homepage = https://github.com/vances/radierl +pkg_radierl_fetch = git +pkg_radierl_repo = https://github.com/vances/radierl +pkg_radierl_commit = master + +PACKAGES += rafter +pkg_rafter_name = rafter +pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol +pkg_rafter_homepage = https://github.com/andrewjstone/rafter +pkg_rafter_fetch = git +pkg_rafter_repo = https://github.com/andrewjstone/rafter +pkg_rafter_commit = master + +PACKAGES += ranch +pkg_ranch_name = ranch +pkg_ranch_description = Socket acceptor pool for TCP protocols. +pkg_ranch_homepage = http://ninenines.eu +pkg_ranch_fetch = git +pkg_ranch_repo = https://github.com/ninenines/ranch +pkg_ranch_commit = 1.1.0 + +PACKAGES += rbeacon +pkg_rbeacon_name = rbeacon +pkg_rbeacon_description = LAN discovery and presence in Erlang. +pkg_rbeacon_homepage = https://github.com/refuge/rbeacon +pkg_rbeacon_fetch = git +pkg_rbeacon_repo = https://github.com/refuge/rbeacon +pkg_rbeacon_commit = master + +PACKAGES += rebar +pkg_rebar_name = rebar +pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases. +pkg_rebar_homepage = http://www.rebar3.org +pkg_rebar_fetch = git +pkg_rebar_repo = https://github.com/rebar/rebar3 +pkg_rebar_commit = master + +PACKAGES += rebus +pkg_rebus_name = rebus +pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang. +pkg_rebus_homepage = https://github.com/olle/rebus +pkg_rebus_fetch = git +pkg_rebus_repo = https://github.com/olle/rebus +pkg_rebus_commit = master + +PACKAGES += rec2json +pkg_rec2json_name = rec2json +pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily. +pkg_rec2json_homepage = https://github.com/lordnull/rec2json +pkg_rec2json_fetch = git +pkg_rec2json_repo = https://github.com/lordnull/rec2json +pkg_rec2json_commit = master + +PACKAGES += recon +pkg_recon_name = recon +pkg_recon_description = Collection of functions and scripts to debug Erlang in production. +pkg_recon_homepage = https://github.com/ferd/recon +pkg_recon_fetch = git +pkg_recon_repo = https://github.com/ferd/recon +pkg_recon_commit = 2.2.1 + +PACKAGES += record_info +pkg_record_info_name = record_info +pkg_record_info_description = Convert between record and proplist +pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info +pkg_record_info_fetch = git +pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info +pkg_record_info_commit = master + +PACKAGES += redgrid +pkg_redgrid_name = redgrid +pkg_redgrid_description = automatic Erlang node discovery via redis +pkg_redgrid_homepage = https://github.com/jkvor/redgrid +pkg_redgrid_fetch = git +pkg_redgrid_repo = https://github.com/jkvor/redgrid +pkg_redgrid_commit = master + +PACKAGES += redo +pkg_redo_name = redo +pkg_redo_description = pipelined erlang redis client +pkg_redo_homepage = https://github.com/jkvor/redo +pkg_redo_fetch = git +pkg_redo_repo = https://github.com/jkvor/redo +pkg_redo_commit = master + +PACKAGES += reload_mk +pkg_reload_mk_name = reload_mk +pkg_reload_mk_description = Live reload plugin for erlang.mk. +pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk +pkg_reload_mk_fetch = git +pkg_reload_mk_repo = https://github.com/bullno1/reload.mk +pkg_reload_mk_commit = master + +PACKAGES += reltool_util +pkg_reltool_util_name = reltool_util +pkg_reltool_util_description = Erlang reltool utility functionality application +pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util +pkg_reltool_util_fetch = git +pkg_reltool_util_repo = https://github.com/okeuday/reltool_util +pkg_reltool_util_commit = master + +PACKAGES += relx +pkg_relx_name = relx +pkg_relx_description = Sane, simple release creation for Erlang +pkg_relx_homepage = https://github.com/erlware/relx +pkg_relx_fetch = git +pkg_relx_repo = https://github.com/erlware/relx +pkg_relx_commit = master + +PACKAGES += resource_discovery +pkg_resource_discovery_name = resource_discovery +pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster. +pkg_resource_discovery_homepage = http://erlware.org/ +pkg_resource_discovery_fetch = git +pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery +pkg_resource_discovery_commit = master + +PACKAGES += restc +pkg_restc_name = restc +pkg_restc_description = Erlang Rest Client +pkg_restc_homepage = https://github.com/kivra/restclient +pkg_restc_fetch = git +pkg_restc_repo = https://github.com/kivra/restclient +pkg_restc_commit = master + +PACKAGES += rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation. +pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_fetch = git +pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_commit = master + +PACKAGES += riak_control +pkg_riak_control_name = riak_control +pkg_riak_control_description = Webmachine-based administration interface for Riak. +pkg_riak_control_homepage = https://github.com/basho/riak_control +pkg_riak_control_fetch = git +pkg_riak_control_repo = https://github.com/basho/riak_control +pkg_riak_control_commit = master + +PACKAGES += riak_core +pkg_riak_core_name = riak_core +pkg_riak_core_description = Distributed systems infrastructure used by Riak. +pkg_riak_core_homepage = https://github.com/basho/riak_core +pkg_riak_core_fetch = git +pkg_riak_core_repo = https://github.com/basho/riak_core +pkg_riak_core_commit = master + +PACKAGES += riak_dt +pkg_riak_dt_name = riak_dt +pkg_riak_dt_description = Convergent replicated datatypes in Erlang +pkg_riak_dt_homepage = https://github.com/basho/riak_dt +pkg_riak_dt_fetch = git +pkg_riak_dt_repo = https://github.com/basho/riak_dt +pkg_riak_dt_commit = master + +PACKAGES += riak_ensemble +pkg_riak_ensemble_name = riak_ensemble +pkg_riak_ensemble_description = Multi-Paxos framework in Erlang +pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_fetch = git +pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_commit = master + +PACKAGES += riak_kv +pkg_riak_kv_name = riak_kv +pkg_riak_kv_description = Riak Key/Value Store +pkg_riak_kv_homepage = https://github.com/basho/riak_kv +pkg_riak_kv_fetch = git +pkg_riak_kv_repo = https://github.com/basho/riak_kv +pkg_riak_kv_commit = master + +PACKAGES += riak_pg +pkg_riak_pg_name = riak_pg +pkg_riak_pg_description = Distributed process groups with riak_core. +pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_fetch = git +pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_commit = master + +PACKAGES += riak_pipe +pkg_riak_pipe_name = riak_pipe +pkg_riak_pipe_description = Riak Pipelines +pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe +pkg_riak_pipe_fetch = git +pkg_riak_pipe_repo = https://github.com/basho/riak_pipe +pkg_riak_pipe_commit = master + +PACKAGES += riak_sysmon +pkg_riak_sysmon_name = riak_sysmon +pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages +pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_fetch = git +pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_commit = master + +PACKAGES += riak_test +pkg_riak_test_name = riak_test +pkg_riak_test_description = I'm in your cluster, testing your riaks +pkg_riak_test_homepage = https://github.com/basho/riak_test +pkg_riak_test_fetch = git +pkg_riak_test_repo = https://github.com/basho/riak_test +pkg_riak_test_commit = master + +PACKAGES += riakc +pkg_riakc_name = riakc +pkg_riakc_description = Erlang clients for Riak. +pkg_riakc_homepage = https://github.com/basho/riak-erlang-client +pkg_riakc_fetch = git +pkg_riakc_repo = https://github.com/basho/riak-erlang-client +pkg_riakc_commit = master + +PACKAGES += riakhttpc +pkg_riakhttpc_name = riakhttpc +pkg_riakhttpc_description = Riak Erlang client using the HTTP interface +pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_fetch = git +pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_commit = master + +PACKAGES += riaknostic +pkg_riaknostic_name = riaknostic +pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap +pkg_riaknostic_homepage = https://github.com/basho/riaknostic +pkg_riaknostic_fetch = git +pkg_riaknostic_repo = https://github.com/basho/riaknostic +pkg_riaknostic_commit = master + +PACKAGES += riakpool +pkg_riakpool_name = riakpool +pkg_riakpool_description = erlang riak client pool +pkg_riakpool_homepage = https://github.com/dweldon/riakpool +pkg_riakpool_fetch = git +pkg_riakpool_repo = https://github.com/dweldon/riakpool +pkg_riakpool_commit = master + +PACKAGES += rivus_cep +pkg_rivus_cep_name = rivus_cep +pkg_rivus_cep_description = Complex event processing in Erlang +pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_fetch = git +pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_commit = master + +PACKAGES += rlimit +pkg_rlimit_name = rlimit +pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent +pkg_rlimit_homepage = https://github.com/jlouis/rlimit +pkg_rlimit_fetch = git +pkg_rlimit_repo = https://github.com/jlouis/rlimit +pkg_rlimit_commit = master + +PACKAGES += safetyvalve +pkg_safetyvalve_name = safetyvalve +pkg_safetyvalve_description = A safety valve for your erlang node +pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_fetch = git +pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_commit = master + +PACKAGES += seestar +pkg_seestar_name = seestar +pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol +pkg_seestar_homepage = https://github.com/iamaleksey/seestar +pkg_seestar_fetch = git +pkg_seestar_repo = https://github.com/iamaleksey/seestar +pkg_seestar_commit = master + +PACKAGES += service +pkg_service_name = service +pkg_service_description = A minimal Erlang behavior for creating CloudI internal services +pkg_service_homepage = http://cloudi.org/ +pkg_service_fetch = git +pkg_service_repo = https://github.com/CloudI/service +pkg_service_commit = master + +PACKAGES += setup +pkg_setup_name = setup +pkg_setup_description = Generic setup utility for Erlang-based systems +pkg_setup_homepage = https://github.com/uwiger/setup +pkg_setup_fetch = git +pkg_setup_repo = https://github.com/uwiger/setup +pkg_setup_commit = master + +PACKAGES += sext +pkg_sext_name = sext +pkg_sext_description = Sortable Erlang Term Serialization +pkg_sext_homepage = https://github.com/uwiger/sext +pkg_sext_fetch = git +pkg_sext_repo = https://github.com/uwiger/sext +pkg_sext_commit = master + +PACKAGES += sfmt +pkg_sfmt_name = sfmt +pkg_sfmt_description = SFMT pseudo random number generator for Erlang. +pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_fetch = git +pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_commit = master + +PACKAGES += sgte +pkg_sgte_name = sgte +pkg_sgte_description = A simple Erlang Template Engine +pkg_sgte_homepage = https://github.com/filippo/sgte +pkg_sgte_fetch = git +pkg_sgte_repo = https://github.com/filippo/sgte +pkg_sgte_commit = master + +PACKAGES += sheriff +pkg_sheriff_name = sheriff +pkg_sheriff_description = Parse transform for type based validation. +pkg_sheriff_homepage = http://ninenines.eu +pkg_sheriff_fetch = git +pkg_sheriff_repo = https://github.com/extend/sheriff +pkg_sheriff_commit = master + +PACKAGES += shotgun +pkg_shotgun_name = shotgun +pkg_shotgun_description = better than just a gun +pkg_shotgun_homepage = https://github.com/inaka/shotgun +pkg_shotgun_fetch = git +pkg_shotgun_repo = https://github.com/inaka/shotgun +pkg_shotgun_commit = 0.1.0 + +PACKAGES += sidejob +pkg_sidejob_name = sidejob +pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang +pkg_sidejob_homepage = https://github.com/basho/sidejob +pkg_sidejob_fetch = git +pkg_sidejob_repo = https://github.com/basho/sidejob +pkg_sidejob_commit = master + +PACKAGES += sieve +pkg_sieve_name = sieve +pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang +pkg_sieve_homepage = https://github.com/benoitc/sieve +pkg_sieve_fetch = git +pkg_sieve_repo = https://github.com/benoitc/sieve +pkg_sieve_commit = master + +PACKAGES += sighandler +pkg_sighandler_name = sighandler +pkg_sighandler_description = Handle UNIX signals in Er lang +pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler +pkg_sighandler_fetch = git +pkg_sighandler_repo = https://github.com/jkingsbery/sighandler +pkg_sighandler_commit = master + +PACKAGES += simhash +pkg_simhash_name = simhash +pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data. +pkg_simhash_homepage = https://github.com/ferd/simhash +pkg_simhash_fetch = git +pkg_simhash_repo = https://github.com/ferd/simhash +pkg_simhash_commit = master + +PACKAGES += simple_bridge +pkg_simple_bridge_name = simple_bridge +pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers. +pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_fetch = git +pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_commit = master + +PACKAGES += simple_oauth2 +pkg_simple_oauth2_name = simple_oauth2 +pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured) +pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_fetch = git +pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_commit = master + +PACKAGES += skel +pkg_skel_name = skel +pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang +pkg_skel_homepage = https://github.com/ParaPhrase/skel +pkg_skel_fetch = git +pkg_skel_repo = https://github.com/ParaPhrase/skel +pkg_skel_commit = master + +PACKAGES += smother +pkg_smother_name = smother +pkg_smother_description = Extended code coverage metrics for Erlang. +pkg_smother_homepage = https://ramsay-t.github.io/Smother/ +pkg_smother_fetch = git +pkg_smother_repo = https://github.com/ramsay-t/Smother +pkg_smother_commit = master + +PACKAGES += social +pkg_social_name = social +pkg_social_description = Cowboy handler for social login via OAuth2 providers +pkg_social_homepage = https://github.com/dvv/social +pkg_social_fetch = git +pkg_social_repo = https://github.com/dvv/social +pkg_social_commit = master + +PACKAGES += spapi_router +pkg_spapi_router_name = spapi_router +pkg_spapi_router_description = Partially-connected Erlang clustering +pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router +pkg_spapi_router_fetch = git +pkg_spapi_router_repo = https://github.com/spilgames/spapi-router +pkg_spapi_router_commit = master + +PACKAGES += sqerl +pkg_sqerl_name = sqerl +pkg_sqerl_description = An Erlang-flavoured SQL DSL +pkg_sqerl_homepage = https://github.com/hairyhum/sqerl +pkg_sqerl_fetch = git +pkg_sqerl_repo = https://github.com/hairyhum/sqerl +pkg_sqerl_commit = master + +PACKAGES += srly +pkg_srly_name = srly +pkg_srly_description = Native Erlang Unix serial interface +pkg_srly_homepage = https://github.com/msantos/srly +pkg_srly_fetch = git +pkg_srly_repo = https://github.com/msantos/srly +pkg_srly_commit = master + +PACKAGES += sshrpc +pkg_sshrpc_name = sshrpc +pkg_sshrpc_description = Erlang SSH RPC module (experimental) +pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_fetch = git +pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_commit = master + +PACKAGES += stable +pkg_stable_name = stable +pkg_stable_description = Library of assorted helpers for Cowboy web server. +pkg_stable_homepage = https://github.com/dvv/stable +pkg_stable_fetch = git +pkg_stable_repo = https://github.com/dvv/stable +pkg_stable_commit = master + +PACKAGES += statebox +pkg_statebox_name = statebox +pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak. +pkg_statebox_homepage = https://github.com/mochi/statebox +pkg_statebox_fetch = git +pkg_statebox_repo = https://github.com/mochi/statebox +pkg_statebox_commit = master + +PACKAGES += statebox_riak +pkg_statebox_riak_name = statebox_riak +pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media. +pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak +pkg_statebox_riak_fetch = git +pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak +pkg_statebox_riak_commit = master + +PACKAGES += statman +pkg_statman_name = statman +pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM +pkg_statman_homepage = https://github.com/knutin/statman +pkg_statman_fetch = git +pkg_statman_repo = https://github.com/knutin/statman +pkg_statman_commit = master + +PACKAGES += statsderl +pkg_statsderl_name = statsderl +pkg_statsderl_description = StatsD client (erlang) +pkg_statsderl_homepage = https://github.com/lpgauth/statsderl +pkg_statsderl_fetch = git +pkg_statsderl_repo = https://github.com/lpgauth/statsderl +pkg_statsderl_commit = master + +PACKAGES += stdinout_pool +pkg_stdinout_pool_name = stdinout_pool +pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication. +pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_fetch = git +pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_commit = master + +PACKAGES += stockdb +pkg_stockdb_name = stockdb +pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang +pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb +pkg_stockdb_fetch = git +pkg_stockdb_repo = https://github.com/maxlapshin/stockdb +pkg_stockdb_commit = master + +PACKAGES += stripe +pkg_stripe_name = stripe +pkg_stripe_description = Erlang interface to the stripe.com API +pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang +pkg_stripe_fetch = git +pkg_stripe_repo = https://github.com/mattsta/stripe-erlang +pkg_stripe_commit = v1 + +PACKAGES += surrogate +pkg_surrogate_name = surrogate +pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes. +pkg_surrogate_homepage = https://github.com/skruger/Surrogate +pkg_surrogate_fetch = git +pkg_surrogate_repo = https://github.com/skruger/Surrogate +pkg_surrogate_commit = master + +PACKAGES += swab +pkg_swab_name = swab +pkg_swab_description = General purpose buffer handling module +pkg_swab_homepage = https://github.com/crownedgrouse/swab +pkg_swab_fetch = git +pkg_swab_repo = https://github.com/crownedgrouse/swab +pkg_swab_commit = master + +PACKAGES += swarm +pkg_swarm_name = swarm +pkg_swarm_description = Fast and simple acceptor pool for Erlang +pkg_swarm_homepage = https://github.com/jeremey/swarm +pkg_swarm_fetch = git +pkg_swarm_repo = https://github.com/jeremey/swarm +pkg_swarm_commit = master + +PACKAGES += switchboard +pkg_switchboard_name = switchboard +pkg_switchboard_description = A framework for processing email using worker plugins. +pkg_switchboard_homepage = https://github.com/thusfresh/switchboard +pkg_switchboard_fetch = git +pkg_switchboard_repo = https://github.com/thusfresh/switchboard +pkg_switchboard_commit = master + +PACKAGES += syn +pkg_syn_name = syn +pkg_syn_description = A global process registry for Erlang. +pkg_syn_homepage = https://github.com/ostinelli/syn +pkg_syn_fetch = git +pkg_syn_repo = https://github.com/ostinelli/syn +pkg_syn_commit = master + +PACKAGES += sync +pkg_sync_name = sync +pkg_sync_description = On-the-fly recompiling and reloading in Erlang. +pkg_sync_homepage = https://github.com/rustyio/sync +pkg_sync_fetch = git +pkg_sync_repo = https://github.com/rustyio/sync +pkg_sync_commit = master + +PACKAGES += syntaxerl +pkg_syntaxerl_name = syntaxerl +pkg_syntaxerl_description = Syntax checker for Erlang +pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_fetch = git +pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_commit = master + +PACKAGES += syslog +pkg_syslog_name = syslog +pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3) +pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog +pkg_syslog_fetch = git +pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog +pkg_syslog_commit = master + +PACKAGES += taskforce +pkg_taskforce_name = taskforce +pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks. +pkg_taskforce_homepage = https://github.com/g-andrade/taskforce +pkg_taskforce_fetch = git +pkg_taskforce_repo = https://github.com/g-andrade/taskforce +pkg_taskforce_commit = master + +PACKAGES += tddreloader +pkg_tddreloader_name = tddreloader +pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes +pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader +pkg_tddreloader_fetch = git +pkg_tddreloader_repo = https://github.com/version2beta/tddreloader +pkg_tddreloader_commit = master + +PACKAGES += tempo +pkg_tempo_name = tempo +pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang. +pkg_tempo_homepage = https://github.com/selectel/tempo +pkg_tempo_fetch = git +pkg_tempo_repo = https://github.com/selectel/tempo +pkg_tempo_commit = master + +PACKAGES += ticktick +pkg_ticktick_name = ticktick +pkg_ticktick_description = Ticktick is an id generator for message service. +pkg_ticktick_homepage = https://github.com/ericliang/ticktick +pkg_ticktick_fetch = git +pkg_ticktick_repo = https://github.com/ericliang/ticktick +pkg_ticktick_commit = master + +PACKAGES += tinymq +pkg_tinymq_name = tinymq +pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue +pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_fetch = git +pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_commit = master + +PACKAGES += tinymt +pkg_tinymt_name = tinymt +pkg_tinymt_description = TinyMT pseudo random number generator for Erlang. +pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_fetch = git +pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_commit = master + +PACKAGES += tirerl +pkg_tirerl_name = tirerl +pkg_tirerl_description = Erlang interface to Elastic Search +pkg_tirerl_homepage = https://github.com/inaka/tirerl +pkg_tirerl_fetch = git +pkg_tirerl_repo = https://github.com/inaka/tirerl +pkg_tirerl_commit = master + +PACKAGES += traffic_tools +pkg_traffic_tools_name = traffic_tools +pkg_traffic_tools_description = Simple traffic limiting library +pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools +pkg_traffic_tools_fetch = git +pkg_traffic_tools_repo = https://github.com/systra/traffic_tools +pkg_traffic_tools_commit = master + +PACKAGES += trails +pkg_trails_name = trails +pkg_trails_description = A couple of improvements over Cowboy Routes +pkg_trails_homepage = http://inaka.github.io/cowboy-trails/ +pkg_trails_fetch = git +pkg_trails_repo = https://github.com/inaka/cowboy-trails +pkg_trails_commit = master + +PACKAGES += trane +pkg_trane_name = trane +pkg_trane_description = SAX style broken HTML parser in Erlang +pkg_trane_homepage = https://github.com/massemanet/trane +pkg_trane_fetch = git +pkg_trane_repo = https://github.com/massemanet/trane +pkg_trane_commit = master + +PACKAGES += transit +pkg_transit_name = transit +pkg_transit_description = transit format for erlang +pkg_transit_homepage = https://github.com/isaiah/transit-erlang +pkg_transit_fetch = git +pkg_transit_repo = https://github.com/isaiah/transit-erlang +pkg_transit_commit = master + +PACKAGES += trie +pkg_trie_name = trie +pkg_trie_description = Erlang Trie Implementation +pkg_trie_homepage = https://github.com/okeuday/trie +pkg_trie_fetch = git +pkg_trie_repo = https://github.com/okeuday/trie +pkg_trie_commit = master + +PACKAGES += triq +pkg_triq_name = triq +pkg_triq_description = Trifork QuickCheck +pkg_triq_homepage = https://github.com/krestenkrab/triq +pkg_triq_fetch = git +pkg_triq_repo = https://github.com/krestenkrab/triq +pkg_triq_commit = master + +PACKAGES += tunctl +pkg_tunctl_name = tunctl +pkg_tunctl_description = Erlang TUN/TAP interface +pkg_tunctl_homepage = https://github.com/msantos/tunctl +pkg_tunctl_fetch = git +pkg_tunctl_repo = https://github.com/msantos/tunctl +pkg_tunctl_commit = master + +PACKAGES += twerl +pkg_twerl_name = twerl +pkg_twerl_description = Erlang client for the Twitter Streaming API +pkg_twerl_homepage = https://github.com/lucaspiller/twerl +pkg_twerl_fetch = git +pkg_twerl_repo = https://github.com/lucaspiller/twerl +pkg_twerl_commit = oauth + +PACKAGES += twitter_erlang +pkg_twitter_erlang_name = twitter_erlang +pkg_twitter_erlang_description = An Erlang twitter client +pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_fetch = git +pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_commit = master + +PACKAGES += ucol_nif +pkg_ucol_nif_name = ucol_nif +pkg_ucol_nif_description = ICU based collation Erlang module +pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif +pkg_ucol_nif_fetch = git +pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif +pkg_ucol_nif_commit = master + +PACKAGES += unicorn +pkg_unicorn_name = unicorn +pkg_unicorn_description = Generic configuration server +pkg_unicorn_homepage = https://github.com/shizzard/unicorn +pkg_unicorn_fetch = git +pkg_unicorn_repo = https://github.com/shizzard/unicorn +pkg_unicorn_commit = 0.3.0 + +PACKAGES += unsplit +pkg_unsplit_name = unsplit +pkg_unsplit_description = Resolves conflicts in Mnesia after network splits +pkg_unsplit_homepage = https://github.com/uwiger/unsplit +pkg_unsplit_fetch = git +pkg_unsplit_repo = https://github.com/uwiger/unsplit +pkg_unsplit_commit = master + +PACKAGES += uuid +pkg_uuid_name = uuid +pkg_uuid_description = Erlang UUID Implementation +pkg_uuid_homepage = https://github.com/okeuday/uuid +pkg_uuid_fetch = git +pkg_uuid_repo = https://github.com/okeuday/uuid +pkg_uuid_commit = v1.4.0 + +PACKAGES += ux +pkg_ux_name = ux +pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation) +pkg_ux_homepage = https://github.com/erlang-unicode/ux +pkg_ux_fetch = git +pkg_ux_repo = https://github.com/erlang-unicode/ux +pkg_ux_commit = master + +PACKAGES += vert +pkg_vert_name = vert +pkg_vert_description = Erlang binding to libvirt virtualization API +pkg_vert_homepage = https://github.com/msantos/erlang-libvirt +pkg_vert_fetch = git +pkg_vert_repo = https://github.com/msantos/erlang-libvirt +pkg_vert_commit = master + +PACKAGES += verx +pkg_verx_name = verx +pkg_verx_description = Erlang implementation of the libvirtd remote protocol +pkg_verx_homepage = https://github.com/msantos/verx +pkg_verx_fetch = git +pkg_verx_repo = https://github.com/msantos/verx +pkg_verx_commit = master + +PACKAGES += vmq_acl +pkg_vmq_acl_name = vmq_acl +pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_acl_homepage = https://verne.mq/ +pkg_vmq_acl_fetch = git +pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl +pkg_vmq_acl_commit = master + +PACKAGES += vmq_bridge +pkg_vmq_bridge_name = vmq_bridge +pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_bridge_homepage = https://verne.mq/ +pkg_vmq_bridge_fetch = git +pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge +pkg_vmq_bridge_commit = master + +PACKAGES += vmq_graphite +pkg_vmq_graphite_name = vmq_graphite +pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_graphite_homepage = https://verne.mq/ +pkg_vmq_graphite_fetch = git +pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite +pkg_vmq_graphite_commit = master + +PACKAGES += vmq_passwd +pkg_vmq_passwd_name = vmq_passwd +pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_passwd_homepage = https://verne.mq/ +pkg_vmq_passwd_fetch = git +pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd +pkg_vmq_passwd_commit = master + +PACKAGES += vmq_server +pkg_vmq_server_name = vmq_server +pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_server_homepage = https://verne.mq/ +pkg_vmq_server_fetch = git +pkg_vmq_server_repo = https://github.com/erlio/vmq_server +pkg_vmq_server_commit = master + +PACKAGES += vmq_snmp +pkg_vmq_snmp_name = vmq_snmp +pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_snmp_homepage = https://verne.mq/ +pkg_vmq_snmp_fetch = git +pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp +pkg_vmq_snmp_commit = master + +PACKAGES += vmq_systree +pkg_vmq_systree_name = vmq_systree +pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_systree_homepage = https://verne.mq/ +pkg_vmq_systree_fetch = git +pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree +pkg_vmq_systree_commit = master + +PACKAGES += vmstats +pkg_vmstats_name = vmstats +pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs. +pkg_vmstats_homepage = https://github.com/ferd/vmstats +pkg_vmstats_fetch = git +pkg_vmstats_repo = https://github.com/ferd/vmstats +pkg_vmstats_commit = master + +PACKAGES += walrus +pkg_walrus_name = walrus +pkg_walrus_description = Walrus - Mustache-like Templating +pkg_walrus_homepage = https://github.com/devinus/walrus +pkg_walrus_fetch = git +pkg_walrus_repo = https://github.com/devinus/walrus +pkg_walrus_commit = master + +PACKAGES += webmachine +pkg_webmachine_name = webmachine +pkg_webmachine_description = A REST-based system for building web applications. +pkg_webmachine_homepage = https://github.com/basho/webmachine +pkg_webmachine_fetch = git +pkg_webmachine_repo = https://github.com/basho/webmachine +pkg_webmachine_commit = master + +PACKAGES += websocket_client +pkg_websocket_client_name = websocket_client +pkg_websocket_client_description = Erlang websocket client (ws and wss supported) +pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client +pkg_websocket_client_fetch = git +pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client +pkg_websocket_client_commit = master + +PACKAGES += worker_pool +pkg_worker_pool_name = worker_pool +pkg_worker_pool_description = a simple erlang worker pool +pkg_worker_pool_homepage = https://github.com/inaka/worker_pool +pkg_worker_pool_fetch = git +pkg_worker_pool_repo = https://github.com/inaka/worker_pool +pkg_worker_pool_commit = 1.0.3 + +PACKAGES += wrangler +pkg_wrangler_name = wrangler +pkg_wrangler_description = Import of the Wrangler svn repository. +pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html +pkg_wrangler_fetch = git +pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler +pkg_wrangler_commit = master + +PACKAGES += wsock +pkg_wsock_name = wsock +pkg_wsock_description = Erlang library to build WebSocket clients and servers +pkg_wsock_homepage = https://github.com/madtrick/wsock +pkg_wsock_fetch = git +pkg_wsock_repo = https://github.com/madtrick/wsock +pkg_wsock_commit = master + +PACKAGES += xhttpc +pkg_xhttpc_name = xhttpc +pkg_xhttpc_description = Extensible HTTP Client for Erlang +pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc +pkg_xhttpc_fetch = git +pkg_xhttpc_repo = https://github.com/seriyps/xhttpc +pkg_xhttpc_commit = master + +PACKAGES += xref_runner +pkg_xref_runner_name = xref_runner +pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref) +pkg_xref_runner_homepage = https://github.com/inaka/xref_runner +pkg_xref_runner_fetch = git +pkg_xref_runner_repo = https://github.com/inaka/xref_runner +pkg_xref_runner_commit = 0.2.0 + +PACKAGES += yamerl +pkg_yamerl_name = yamerl +pkg_yamerl_description = YAML 1.2 parser in pure Erlang +pkg_yamerl_homepage = https://github.com/yakaz/yamerl +pkg_yamerl_fetch = git +pkg_yamerl_repo = https://github.com/yakaz/yamerl +pkg_yamerl_commit = master + +PACKAGES += yamler +pkg_yamler_name = yamler +pkg_yamler_description = libyaml-based yaml loader for Erlang +pkg_yamler_homepage = https://github.com/goertzenator/yamler +pkg_yamler_fetch = git +pkg_yamler_repo = https://github.com/goertzenator/yamler +pkg_yamler_commit = master + +PACKAGES += yaws +pkg_yaws_name = yaws +pkg_yaws_description = Yaws webserver +pkg_yaws_homepage = http://yaws.hyber.org +pkg_yaws_fetch = git +pkg_yaws_repo = https://github.com/klacke/yaws +pkg_yaws_commit = master + +PACKAGES += zab_engine +pkg_zab_engine_name = zab_engine +pkg_zab_engine_description = zab propotocol implement by erlang +pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_fetch = git +pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_commit = master + +PACKAGES += zeta +pkg_zeta_name = zeta +pkg_zeta_description = HTTP access log parser in Erlang +pkg_zeta_homepage = https://github.com/s1n4/zeta +pkg_zeta_fetch = git +pkg_zeta_repo = https://github.com/s1n4/zeta +pkg_zeta_commit = + +PACKAGES += zippers +pkg_zippers_name = zippers +pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers +pkg_zippers_homepage = https://github.com/ferd/zippers +pkg_zippers_fetch = git +pkg_zippers_repo = https://github.com/ferd/zippers +pkg_zippers_commit = master + +PACKAGES += zlists +pkg_zlists_name = zlists +pkg_zlists_description = Erlang lazy lists library. +pkg_zlists_homepage = https://github.com/vjache/erlang-zlists +pkg_zlists_fetch = git +pkg_zlists_repo = https://github.com/vjache/erlang-zlists +pkg_zlists_commit = master + +PACKAGES += zraft_lib +pkg_zraft_lib_name = zraft_lib +pkg_zraft_lib_description = Erlang raft consensus protocol implementation +pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_fetch = git +pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_commit = master + +PACKAGES += zucchini +pkg_zucchini_name = zucchini +pkg_zucchini_description = An Erlang INI parser +pkg_zucchini_homepage = https://github.com/devinus/zucchini +pkg_zucchini_fetch = git +pkg_zucchini_repo = https://github.com/devinus/zucchini +pkg_zucchini_commit = master + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: search + +define pkg_print + $(verbose) printf "%s\n" \ + $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \ + "App name: $(pkg_$(1)_name)" \ + "Description: $(pkg_$(1)_description)" \ + "Home page: $(pkg_$(1)_homepage)" \ + "Fetch with: $(pkg_$(1)_fetch)" \ + "Repository: $(pkg_$(1)_repo)" \ + "Commit: $(pkg_$(1)_commit)" \ + "" + +endef + +search: +ifdef q + $(foreach p,$(PACKAGES), \ + $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \ + $(call pkg_print,$(p)))) +else + $(foreach p,$(PACKAGES),$(call pkg_print,$(p))) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-deps + +# Configuration. + +ifdef OTP_DEPS +$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.) +endif + +IGNORE_DEPS ?= +export IGNORE_DEPS + +APPS_DIR ?= $(CURDIR)/apps +export APPS_DIR + +DEPS_DIR ?= $(CURDIR)/deps +export DEPS_DIR + +REBAR_DEPS_DIR = $(DEPS_DIR) +export REBAR_DEPS_DIR + +dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) +dep_repo = $(patsubst git://github.com/%,https://github.com/%, \ + $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))) +dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) + +ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) +ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) + +ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),) +ifeq ($(ERL_LIBS),) + ERL_LIBS = $(APPS_DIR):$(DEPS_DIR) +else + ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR) +endif +endif +export ERL_LIBS + +export NO_AUTOPATCH + +# Verbosity. + +dep_verbose_0 = @echo " DEP " $(1); +dep_verbose_2 = set -x; +dep_verbose = $(dep_verbose_$(V)) + +# Core targets. + +ifneq ($(SKIP_DEPS),) +deps:: +else +deps:: $(ALL_DEPS_DIRS) +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \ + if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \ + :; \ + else \ + echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ + if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ + $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \ + else \ + echo "Error: No Makefile to build dependency $$dep."; \ + exit 2; \ + fi \ + fi \ + done +endif + +# Deps related targets. + +# @todo rename GNUmakefile and makefile into Makefile first, if they exist +# While Makefile file could be GNUmakefile or makefile, +# in practice only Makefile is needed so far. +define dep_autopatch + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \ + $(call dep_autopatch2,$(1)); \ + else \ + if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + $(call dep_autopatch_erlang_mk,$(1)); \ + else \ + $(call erlang,$(call dep_autopatch_app.erl,$(1))); \ + fi \ + fi \ + else \ + if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ + $(call dep_autopatch_noop,$(1)); \ + else \ + $(call dep_autopatch2,$(1)); \ + fi \ + fi +endef + +define dep_autopatch2 + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + if [ -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \ + $(call dep_autopatch_fetch_rebar); \ + $(call dep_autopatch_rebar,$(1)); \ + else \ + $(call dep_autopatch_gen,$(1)); \ + fi +endef + +define dep_autopatch_noop + printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile +endef + +# Overwrite erlang.mk with the current file by default. +ifeq ($(NO_AUTOPATCH_ERLANG_MK),) +define dep_autopatch_erlang_mk + echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \ + > $(DEPS_DIR)/$1/erlang.mk +endef +else +define dep_autopatch_erlang_mk + : +endef +endif + +define dep_autopatch_gen + printf "%s\n" \ + "ERLC_OPTS = +debug_info" \ + "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile +endef + +define dep_autopatch_fetch_rebar + mkdir -p $(ERLANG_MK_TMP); \ + if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ + git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \ + cd $(ERLANG_MK_TMP)/rebar; \ + git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \ + $(MAKE); \ + cd -; \ + fi +endef + +define dep_autopatch_rebar + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \ + fi; \ + $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \ + rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app +endef + +define dep_autopatch_rebar.erl + application:load(rebar), + application:set_env(rebar, log_level, debug), + Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of + {ok, Conf0} -> Conf0; + _ -> [] + end, + {Conf, OsEnv} = fun() -> + case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of + false -> {Conf1, []}; + true -> + Bindings0 = erl_eval:new_bindings(), + Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0), + Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1), + Before = os:getenv(), + {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings), + {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)} + end + end(), + Write = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append]) + end, + Escape = fun (Text) -> + re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}]) + end, + Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package " + "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"), + Write("C_SRC_DIR = /path/do/not/exist\n"), + Write("C_SRC_TYPE = rebar\n"), + Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"), + Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]), + fun() -> + Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), + case lists:keyfind(erl_opts, 1, Conf) of + false -> ok; + {_, ErlOpts} -> + lists:foreach(fun + ({d, D}) -> + Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + ({i, I}) -> + Write(["ERLC_OPTS += -I ", I, "\n"]); + ({platform_define, Regex, D}) -> + case rebar_utils:is_arch(Regex) of + true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + false -> ok + end; + ({parse_transform, PT}) -> + Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n"); + (_) -> ok + end, ErlOpts) + end, + Write("\n") + end(), + fun() -> + File = case lists:keyfind(deps, 1, Conf) of + false -> []; + {_, Deps} -> + [begin case case Dep of + {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}}; + {N, S} when is_tuple(S) -> {N, S}; + {N, _, S} -> {N, S}; + {N, _, S, _} -> {N, S}; + _ -> false + end of + false -> ok; + {Name, Source} -> + {Method, Repo, Commit} = case Source of + {hex, V} -> {hex, V, undefined}; + {git, R} -> {git, R, master}; + {M, R, {branch, C}} -> {M, R, C}; + {M, R, {ref, C}} -> {M, R, C}; + {M, R, {tag, C}} -> {M, R, C}; + {M, R, C} -> {M, R, C} + end, + Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit])) + end end || Dep <- Deps] + end + end(), + fun() -> + case lists:keyfind(erl_first_files, 1, Conf) of + false -> ok; + {_, Files} -> + Names = [[" ", case lists:reverse(F) of + "lre." ++ Elif -> lists:reverse(Elif); + Elif -> lists:reverse(Elif) + end] || "src/" ++ F <- Files], + Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) + end + end(), + FindFirst = fun(F, Fd) -> + case io:parse_erl_form(Fd, undefined) of + {ok, {attribute, _, compile, {parse_transform, PT}}, _} -> + [PT, F(F, Fd)]; + {ok, {attribute, _, compile, CompileOpts}, _} when is_list(CompileOpts) -> + case proplists:get_value(parse_transform, CompileOpts) of + undefined -> [F(F, Fd)]; + PT -> [PT, F(F, Fd)] + end; + {ok, {attribute, _, include, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end + end; + {ok, {attribute, _, include_lib, "$(1)/include/" ++ Hrl}, _} -> + {ok, HrlFd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]), + [F(F, HrlFd), F(F, Fd)]; + {ok, {attribute, _, include_lib, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {ok, {attribute, _, import, {Imp, _}}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(Imp) ++ ".erl", [read]) of + {ok, ImpFd} -> [Imp, F(F, ImpFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {eof, _} -> + file:close(Fd), + []; + _ -> + F(F, Fd) + end + end, + fun() -> + ErlFiles = filelib:wildcard("$(call core_native_path,$(DEPS_DIR)/$1/src/)*.erl"), + First0 = lists:usort(lists:flatten([begin + {ok, Fd} = file:open(F, [read]), + FindFirst(FindFirst, Fd) + end || F <- ErlFiles])), + First = lists:flatten([begin + {ok, Fd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", [read]), + FindFirst(FindFirst, Fd) + end || M <- First0, lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)]) ++ First0, + Write(["COMPILE_FIRST +=", [[" ", atom_to_list(M)] || M <- First, + lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)], "\n"]) + end(), + Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"), + Write("\npreprocess::\n"), + Write("\npre-deps::\n"), + Write("\npre-app::\n"), + PatchHook = fun(Cmd) -> + case Cmd of + "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + _ -> Escape(Cmd) + end + end, + fun() -> + case lists:keyfind(pre_hooks, 1, Conf) of + false -> ok; + {_, Hooks} -> + [case H of + {'get-deps', Cmd} -> + Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n"); + {compile, Cmd} -> + Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + {Regex, compile, Cmd} -> + case rebar_utils:is_arch(Regex) of + true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + false -> ok + end; + _ -> ok + end || H <- Hooks] + end + end(), + ShellToMk = fun(V) -> + re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]), + "-Werror\\\\b", "", [{return, list}, global]) + end, + PortSpecs = fun() -> + case lists:keyfind(port_specs, 1, Conf) of + false -> + case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of + false -> []; + true -> + [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"), + proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}] + end; + {_, Specs} -> + lists:flatten([case S of + {Output, Input} -> {ShellToMk(Output), Input, []}; + {Regex, Output, Input} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, []}; + false -> [] + end; + {Regex, Output, Input, [{env, Env}]} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, Env}; + false -> [] + end + end || S <- Specs]) + end + end(), + PortSpecWrite = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append]) + end, + case PortSpecs of + [] -> ok; + _ -> + Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"), + PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I ~s/erts-~s/include -I ~s\n", + [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])), + PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L ~s -lerl_interface -lei\n", + [code:lib_dir(erl_interface, lib)])), + [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv], + FilterEnv = fun(Env) -> + lists:flatten([case E of + {_, _} -> E; + {Regex, K, V} -> + case rebar_utils:is_arch(Regex) of + true -> {K, V}; + false -> [] + end + end || E <- Env]) + end, + MergeEnv = fun(Env) -> + lists:foldl(fun ({K, V}, Acc) -> + case lists:keyfind(K, 1, Acc) of + false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc]; + {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc] + end + end, [], Env) + end, + PortEnv = case lists:keyfind(port_env, 1, Conf) of + false -> []; + {_, PortEnv0} -> FilterEnv(PortEnv0) + end, + PortSpec = fun ({Output, Input0, Env}) -> + filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output), + Input = [[" ", I] || I <- Input0], + PortSpecWrite([ + [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))], + case $(PLATFORM) of + darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress"; + _ -> "" + end, + "\n\nall:: ", Output, "\n\n", + "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))], + Output, ": $$\(foreach ext,.c .C .cc .cpp,", + "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n", + "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)", + case filename:extension(Output) of + [] -> "\n"; + _ -> " -shared\n" + end]) + end, + [PortSpec(S) || S <- PortSpecs] + end, + Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"), + RunPlugin = fun(Plugin, Step) -> + case erlang:function_exported(Plugin, Step, 2) of + false -> ok; + true -> + c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"), + Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(), + dict:store(base_dir, "", dict:new())}, undefined), + io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret]) + end + end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins} -> + [begin + case lists:keyfind(deps, 1, Conf) of + false -> ok; + {_, Deps} -> + case lists:keyfind(P, 1, Deps) of + false -> ok; + _ -> + Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P), + io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]), + io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]), + code:add_patha(Path ++ "/ebin") + end + end + end || P <- Plugins], + [case code:load_file(P) of + {module, P} -> ok; + _ -> + case lists:keyfind(plugin_dir, 1, Conf) of + false -> ok; + {_, PluginsDir} -> + ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl", + {ok, P, Bin} = compile:file(ErlFile, [binary]), + {module, P} = code:load_binary(P, ErlFile, Bin) + end + end || P <- Plugins], + [RunPlugin(P, preprocess) || P <- Plugins], + [RunPlugin(P, pre_compile) || P <- Plugins], + [RunPlugin(P, compile) || P <- Plugins] + end + end(), + halt() +endef + +define dep_autopatch_app.erl + UpdateModules = fun(App) -> + case filelib:is_regular(App) of + false -> ok; + true -> + {ok, [{application, '$(1)', L0}]} = file:consult(App), + Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true, + fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []), + L = lists:keystore(modules, 1, L0, {modules, Mods}), + ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}])) + end + end, + UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"), + halt() +endef + +define dep_autopatch_appsrc.erl + AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", + AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end, + case filelib:is_regular(AppSrcIn) of + false -> ok; + true -> + {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), + L1 = lists:keystore(modules, 1, L0, {modules, []}), + L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end, + L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end, + ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])), + case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end + end, + halt() +endef + +define dep_fetch_git + git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1)); +endef + +define dep_fetch_git-submodule + git submodule update --init -- $(DEPS_DIR)/$1; +endef + +define dep_fetch_hg + hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1)); +endef + +define dep_fetch_svn + svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_cp + cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_hex.erl + ssl:start(), + inets:start(), + {ok, {{_, 200, _}, _, Body}} = httpc:request(get, + {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []}, + [], [{body_format, binary}]), + {ok, Files} = erl_tar:extract({binary, Body}, [memory]), + {_, Source} = lists:keyfind("contents.tar.gz", 1, Files), + ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]), + halt() +endef + +# Hex only has a package version. No need to look in the Erlang.mk packages. +define dep_fetch_hex + $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1)))))); +endef + +define dep_fetch_fail + echo "Error: Unknown or invalid dependency: $(1)." >&2; \ + exit 78; +endef + +# Kept for compatibility purposes with older Erlang.mk configuration. +define dep_fetch_legacy + $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \ + git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \ + cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master); +endef + +define dep_fetch + $(if $(dep_$(1)), \ + $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \ + $(word 1,$(dep_$(1))), \ + $(if $(IS_DEP),legacy,fail)), \ + $(if $(filter $(1),$(PACKAGES)), \ + $(pkg_$(1)_fetch), \ + fail)) +endef + +define dep_target +$(DEPS_DIR)/$(call dep_name,$1): + $(eval DEP_NAME := $(call dep_name,$1)) + $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) + $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ + echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \ + exit 17; \ + fi + $(verbose) mkdir -p $(DEPS_DIR) + $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$1)),$1) + $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure.ac -o -f $(DEPS_DIR)/$(DEP_NAME)/configure.in ]; then \ + echo " AUTO " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && autoreconf -Wall -vif -I m4; \ + fi + - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \ + echo " CONF " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \ + fi +ifeq ($(filter $(1),$(NO_AUTOPATCH)),) + $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi; \ + if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \ + echo " PATCH Downloading rabbitmq-server"; \ + git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \ + fi; \ + ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \ + elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi \ + else \ + $$(call dep_autopatch,$(DEP_NAME)) \ + fi +endif +endef + +$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep)))) + +ifndef IS_APP +clean:: clean-apps + +clean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \ + done + +distclean:: distclean-apps + +distclean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \ + done +endif + +ifndef SKIP_DEPS +distclean:: distclean-deps + +distclean-deps: + $(gen_verbose) rm -rf $(DEPS_DIR) +endif + +# Forward-declare variables used in core/deps-tools.mk. This is required +# in case plugins use them. + +ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/list-deps.log +ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/list-doc-deps.log +ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/list-rel-deps.log +ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/list-test-deps.log +ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/list-shell-deps.log + +# External plugins. + +DEP_PLUGINS ?= + +define core_dep_plugin +-include $(DEPS_DIR)/$(1) + +$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; +endef + +$(foreach p,$(DEP_PLUGINS),\ + $(eval $(if $(findstring /,$p),\ + $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ + $(call core_dep_plugin,$p/plugins.mk,$p)))) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Configuration. + +DTL_FULL_PATH ?= +DTL_PATH ?= templates/ +DTL_SUFFIX ?= _dtl + +# Verbosity. + +dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); +dtl_verbose = $(dtl_verbose_$(V)) + +# Core targets. + +define erlydtl_compile.erl + [begin + Module0 = case "$(strip $(DTL_FULL_PATH))" of + "" -> + filename:basename(F, ".dtl"); + _ -> + "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"), + re:replace(F2, "/", "_", [{return, list}, global]) + end, + Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), + case erlydtl:compile(F, Module, [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of + ok -> ok; + {ok, _} -> ok + end + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) + +DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl)) + +ifdef DTL_FULL_PATH +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%)))) +else +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES)))) +endif + +ifneq ($(words $(DTL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(DTL_FILES); \ + fi + @touch $@ + +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl +endif + +ebin/$(PROJECT).app:: $(DTL_FILES) + $(if $(strip $?),\ + $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?,-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Verbosity. + +proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); +proto_verbose = $(proto_verbose_$(V)) + +# Core targets. + +define compile_proto + $(verbose) mkdir -p ebin/ include/ + $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1))) + $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl + $(verbose) rm ebin/*.erl +endef + +define compile_proto.erl + [begin + Dir = filename:dirname(filename:dirname(F)), + protobuffs_compile:generate_source(F, + [{output_include_dir, Dir ++ "/include"}, + {output_src_dir, Dir ++ "/ebin"}]) + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) +ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto)) + $(if $(strip $?),$(call compile_proto,$?)) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-app + +# Configuration. + +ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \ + +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec +COMPILE_FIRST ?= +COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST))) +ERLC_EXCLUDE ?= +ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE))) + +ERLC_MIB_OPTS ?= +COMPILE_MIB_FIRST ?= +COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST))) + +# Verbosity. + +app_verbose_0 = @echo " APP " $(PROJECT); +app_verbose_2 = set -x; +app_verbose = $(app_verbose_$(V)) + +appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src; +appsrc_verbose_2 = set -x; +appsrc_verbose = $(appsrc_verbose_$(V)) + +makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d; +makedep_verbose_2 = set -x; +makedep_verbose = $(makedep_verbose_$(V)) + +erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\ + $(filter %.erl %.core,$(?F))); +erlc_verbose_2 = set -x; +erlc_verbose = $(erlc_verbose_$(V)) + +xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F)); +xyrl_verbose_2 = set -x; +xyrl_verbose = $(xyrl_verbose_$(V)) + +asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F)); +asn1_verbose_2 = set -x; +asn1_verbose = $(asn1_verbose_$(V)) + +mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F)); +mib_verbose_2 = set -x; +mib_verbose = $(mib_verbose_$(V)) + +ifneq ($(wildcard src/),) + +# Targets. + +ifeq ($(wildcard ebin/test),) +app:: deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +else +app:: clean deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +endif + +ifeq ($(wildcard src/$(PROJECT)_app.erl),) +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, []}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]} +]}. +endef +else +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}, + {mod, {$(PROJECT)_app, []}} +]}. +endef +endif + +app-build: ebin/$(PROJECT).app + $(verbose) : + +# Source files. + +ERL_FILES = $(sort $(call core_find,src/,*.erl)) +CORE_FILES = $(sort $(call core_find,src/,*.core)) + +# ASN.1 files. + +ifneq ($(wildcard asn1/),) +ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1)) +ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +define compile_asn1 + $(verbose) mkdir -p include/ + $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1) + $(verbose) mv asn1/*.erl src/ + $(verbose) mv asn1/*.hrl include/ + $(verbose) mv asn1/*.asn1db include/ +endef + +$(PROJECT).d:: $(ASN1_FILES) + $(if $(strip $?),$(call compile_asn1,$?)) +endif + +# SNMP MIB files. + +ifneq ($(wildcard mibs/),) +MIB_FILES = $(sort $(call core_find,mibs/,*.mib)) + +$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES) + $(verbose) mkdir -p include/ priv/mibs/ + $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $? + $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?))) +endif + +# Leex and Yecc files. + +XRL_FILES = $(sort $(call core_find,src/,*.xrl)) +XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES)))) +ERL_FILES += $(XRL_ERL_FILES) + +YRL_FILES = $(sort $(call core_find,src/,*.yrl)) +YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES)))) +ERL_FILES += $(YRL_ERL_FILES) + +$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES) + $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?) + +# Erlang and Core Erlang files. + +define makedep.erl + ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")), + Modules = [{filename:basename(F, ".erl"), F} || F <- ErlFiles], + Add = fun (Dep, Acc) -> + case lists:keyfind(atom_to_list(Dep), 1, Modules) of + {_, DepFile} -> [DepFile|Acc]; + false -> Acc + end + end, + AddHd = fun (Dep, Acc) -> + case {Dep, lists:keymember(Dep, 2, Modules)} of + {"src/" ++ _, false} -> [Dep|Acc]; + {"include/" ++ _, false} -> [Dep|Acc]; + _ -> Acc + end + end, + CompileFirst = fun (Deps) -> + First0 = [case filename:extension(D) of + ".erl" -> filename:basename(D, ".erl"); + _ -> [] + end || D <- Deps], + case lists:usort(First0) of + [] -> []; + [[]] -> []; + First -> ["COMPILE_FIRST +=", [[" ", F] || F <- First], "\n"] + end + end, + Depend = [begin + case epp:parse_file(F, ["include/"], []) of + {ok, Forms} -> + Deps = lists:usort(lists:foldl(fun + ({attribute, _, behavior, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, behaviour, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, compile, {parse_transform, Dep}}, Acc) -> Add(Dep, Acc); + ({attribute, _, file, {Dep, _}}, Acc) -> AddHd(Dep, Acc); + (_, Acc) -> Acc + end, [], Forms)), + case Deps of + [] -> ""; + _ -> [F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n", CompileFirst(Deps)] + end; + {error, enoent} -> + [] + end + end || F <- ErlFiles], + ok = file:write_file("$(1)", Depend), + halt() +endef + +ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),) +$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST) + $(makedep_verbose) $(call erlang,$(call makedep.erl,$@)) +endif + +ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \ + touch -c $(PROJECT).d; \ + fi + @touch $@ + +$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change +endif + +-include $(PROJECT).d + +ebin/$(PROJECT).app:: ebin/ + +ebin/: + $(verbose) mkdir -p ebin/ + +define compile_erl + $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \ + -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1)) +endef + +ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src) + $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?)) + $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE))) + $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true)) + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES))))))) +ifeq ($(wildcard src/$(PROJECT).app.src),) + $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \ + > ebin/$(PROJECT).app +else + $(verbose) if [ -z "$$(grep -E '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \ + echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \ + exit 1; \ + fi + $(appsrc_verbose) cat src/$(PROJECT).app.src \ + | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \ + | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(GITDESCRIBE)\"}/" \ + > ebin/$(PROJECT).app +endif + +clean:: clean-app + +clean-app: + $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \ + $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \ + $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +endif + +# Copyright (c) 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: docs-deps + +# Configuration. + +ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS)) + +# Targets. + +$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +doc-deps: +else +doc-deps: $(ALL_DOC_DEPS_DIRS) + $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rel-deps + +# Configuration. + +ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS)) + +# Targets. + +$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +rel-deps: +else +rel-deps: $(ALL_REL_DEPS_DIRS) + $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: test-deps test-dir test-build clean-test-dir + +# Configuration. + +TEST_DIR ?= $(CURDIR)/test + +ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS)) + +TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard +TEST_ERLC_OPTS += -DTEST=1 + +# Targets. + +$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +test-deps: +else +test-deps: $(ALL_TEST_DEPS_DIRS) + $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done +endif + +ifneq ($(wildcard $(TEST_DIR)),) +test-dir: + $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \ + $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/ +endif + +ifeq ($(wildcard ebin/test),) +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: clean deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" + $(gen_verbose) touch ebin/test +else +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" +endif + +clean:: clean-test-dir + +clean-test-dir: +ifneq ($(wildcard $(TEST_DIR)/*.beam),) + $(gen_verbose) rm -f $(TEST_DIR)/*.beam +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rebar.config + +# We strip out -Werror because we don't want to fail due to +# warnings when used as a dependency. + +compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/') + +define compat_convert_erlc_opts +$(if $(filter-out -Werror,$1),\ + $(if $(findstring +,$1),\ + $(shell echo $1 | cut -b 2-))) +endef + +define compat_rebar_config +{deps, [$(call comma_list,$(foreach d,$(DEPS),\ + {$(call dep_name,$d),".*",{git,"$(call dep_repo,$d)","$(call dep_commit,$d)"}}))]}. +{erl_opts, [$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$(ERLC_OPTS)),\ + $(call compat_convert_erlc_opts,$o)))]}. +endef + +$(eval _compat_rebar_config = $$(compat_rebar_config)) +$(eval export _compat_rebar_config) + +rebar.config: + $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc + +MAN_INSTALL_PATH ?= /usr/local/share/man +MAN_SECTIONS ?= 3 7 + +docs:: asciidoc + +asciidoc: distclean-asciidoc doc-deps asciidoc-guide asciidoc-manual + +ifeq ($(wildcard doc/src/guide/book.asciidoc),) +asciidoc-guide: +else +asciidoc-guide: + a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf + a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ +endif + +ifeq ($(wildcard doc/src/manual/*.asciidoc),) +asciidoc-manual: +else +asciidoc-manual: + for f in doc/src/manual/*.asciidoc ; do \ + a2x -v -f manpage $$f ; \ + done + for s in $(MAN_SECTIONS); do \ + mkdir -p doc/man$$s/ ; \ + mv doc/src/manual/*.$$s doc/man$$s/ ; \ + gzip doc/man$$s/*.$$s ; \ + done + +install-docs:: install-asciidoc + +install-asciidoc: asciidoc-manual + for s in $(MAN_SECTIONS); do \ + mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \ + install -g 0 -o 0 -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \ + done +endif + +distclean:: distclean-asciidoc + +distclean-asciidoc: + $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/ + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Bootstrap targets:" \ + " bootstrap Generate a skeleton of an OTP application" \ + " bootstrap-lib Generate a skeleton of an OTP library" \ + " bootstrap-rel Generate the files needed to build a release" \ + " new-app n=NAME Create a new local OTP application NAME" \ + " new-lib n=NAME Create a new local OTP library NAME" \ + " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ + " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \ + " list-templates List available templates" + +# Bootstrap templates. + +define bs_appsrc +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]}, + {mod, {$p_app, []}}, + {env, []} +]}. +endef + +define bs_appsrc_lib +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. +endef + +ifdef SP +define bs_Makefile +PROJECT = $p +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.0.1 + +# Whitespace to be used when creating files from templates. +SP = $(SP) + +include erlang.mk +endef +else +define bs_Makefile +PROJECT = $p +include erlang.mk +endef +endif + +define bs_apps_Makefile +PROJECT = $p +include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk +endef + +define bs_app +-module($p_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). + +start(_Type, _Args) -> + $p_sup:start_link(). + +stop(_State) -> + ok. +endef + +define bs_relx_config +{release, {$p_release, "1"}, [$p]}. +{extended_start_script, true}. +{sys_config, "rel/sys.config"}. +{vm_args, "rel/vm.args"}. +endef + +define bs_sys_config +[ +]. +endef + +define bs_vm_args +-name $p@127.0.0.1 +-setcookie $p +-heart +endef + +# Normal templates. + +define tpl_supervisor +-module($(n)). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +init([]) -> + Procs = [], + {ok, {{one_for_one, 1, 5}, Procs}}. +endef + +define tpl_gen_server +-module($(n)). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +%% gen_server. + +init([]) -> + {ok, #state{}}. + +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +endef + +define tpl_cowboy_http +-module($(n)). +-behaviour(cowboy_http_handler). + +-export([init/3]). +-export([handle/2]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {ok, Req, #state{}}. + +handle(Req, State=#state{}) -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_gen_fsm +-module($(n)). +-behaviour(gen_fsm). + +%% API. +-export([start_link/0]). + +%% gen_fsm. +-export([init/1]). +-export([state_name/2]). +-export([handle_event/3]). +-export([state_name/3]). +-export([handle_sync_event/4]). +-export([handle_info/3]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_fsm:start_link(?MODULE, [], []). + +%% gen_fsm. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_Event, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_Event, StateName, StateData) -> + {next_state, StateName, StateData}. + +state_name(_Event, _From, StateData) -> + {reply, ignored, state_name, StateData}. + +handle_sync_event(_Event, _From, StateName, StateData) -> + {reply, ignored, StateName, StateData}. + +handle_info(_Info, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + +define tpl_cowboy_loop +-module($(n)). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {loop, Req, #state{}, 5000, hibernate}. + +info(_Info, Req, State) -> + {loop, Req, State, hibernate}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_cowboy_rest +-module($(n)). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_html/2]). + +init(_, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. + +get_html(Req, State) -> + {<<"This is REST!">>, Req, State}. +endef + +define tpl_cowboy_ws +-module($(n)). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +-record(state, { +}). + +init(_, _, _) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, #state{}}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_ranch_protocol +-module($(n)). +-behaviour(ranch_protocol). + +-export([start_link/4]). +-export([init/4]). + +-type opts() :: []. +-export_type([opts/0]). + +-record(state, { + socket :: inet:socket(), + transport :: module() +}). + +start_link(Ref, Socket, Transport, Opts) -> + Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]), + {ok, Pid}. + +-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Ref, Socket, Transport, _Opts) -> + ok = ranch:accept_ack(Ref), + loop(#state{socket=Socket, transport=Transport}). + +loop(State) -> + loop(State). +endef + +# Plugin-specific targets. + +define render_template + $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) +endef + +ifndef WS +ifdef SP +WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a)) +else +WS = $(tab) +endif +endif + +bootstrap: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(eval n := $(PROJECT)_sup) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc,src/$(PROJECT).app.src) +endif + $(call render_template,bs_app,src/$(PROJECT)_app.erl) + $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl) + +bootstrap-lib: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src) +endif + +bootstrap-rel: +ifneq ($(wildcard relx.config),) + $(error Error: relx.config already exists) +endif +ifneq ($(wildcard rel/),) + $(error Error: rel/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_relx_config,relx.config) + $(verbose) mkdir rel/ + $(call render_template,bs_sys_config,rel/sys.config) + $(call render_template,bs_vm_args,rel/vm.args) + +new-app: +ifndef in + $(error Usage: $(MAKE) new-app in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(eval n := $(in)_sup) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) +endif + $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) + $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) + +new-lib: +ifndef in + $(error Usage: $(MAKE) new-lib in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) +endif + +new: +ifeq ($(wildcard src/)$(in),) + $(error Error: src/ directory does not exist) +endif +ifndef t + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifndef tpl_$(t) + $(error Unknown template) +endif +ifndef n + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in= +else + $(call render_template,tpl_$(t),src/$(n).erl) +endif + +list-templates: + $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-c_src distclean-c_src-env + +# Configuration. + +C_SRC_DIR ?= $(CURDIR)/c_src +C_SRC_ENV ?= $(C_SRC_DIR)/env.mk +C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT).so +C_SRC_TYPE ?= shared + +# System type and C compiler/flags. + +ifeq ($(PLATFORM),darwin) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall + LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress +else ifeq ($(PLATFORM),freebsd) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +else ifeq ($(PLATFORM),linux) + CC ?= gcc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +endif + +CFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) +CXXFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) + +LDLIBS += -L $(ERL_INTERFACE_LIB_DIR) -lerl_interface -lei + +# Verbosity. + +c_verbose_0 = @echo " C " $(?F); +c_verbose = $(c_verbose_$(V)) + +cpp_verbose_0 = @echo " CPP " $(?F); +cpp_verbose = $(cpp_verbose_$(V)) + +link_verbose_0 = @echo " LD " $(@F); +link_verbose = $(link_verbose_$(V)) + +# Targets. + +ifeq ($(wildcard $(C_SRC_DIR)),) +else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),) +app:: app-c_src + +test-build:: app-c_src + +app-c_src: + $(MAKE) -C $(C_SRC_DIR) + +clean:: + $(MAKE) -C $(C_SRC_DIR) clean + +else + +ifeq ($(SOURCES),) +SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat)))) +endif +OBJECTS = $(addsuffix .o, $(basename $(SOURCES))) + +COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c +COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c + +app:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +$(C_SRC_OUTPUT): $(OBJECTS) + $(verbose) mkdir -p priv/ + $(link_verbose) $(CC) $(OBJECTS) \ + $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \ + -o $(C_SRC_OUTPUT) + +%.o: %.c + $(COMPILE_C) $(OUTPUT_OPTION) $< + +%.o: %.cc + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.C + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.cpp + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +clean:: clean-c_src + +clean-c_src: + $(gen_verbose) rm -f $(C_SRC_OUTPUT) $(OBJECTS) + +endif + +ifneq ($(wildcard $(C_SRC_DIR)),) +$(C_SRC_ENV): + $(verbose) $(ERL) -eval "file:write_file(\"$(C_SRC_ENV)\", \ + io_lib:format( \ + \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \ + \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \ + \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \ + [code:root_dir(), erlang:system_info(version), \ + code:lib_dir(erl_interface, include), \ + code:lib_dir(erl_interface, lib)])), \ + halt()." + +distclean:: distclean-c_src-env + +distclean-c_src-env: + $(gen_verbose) rm -f $(C_SRC_ENV) + +-include $(C_SRC_ENV) +endif + +# Templates. + +define bs_c_nif +#include "erl_nif.h" + +static int loads = 0; + +static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info) +{ + /* Initialize private data. */ + *priv_data = NULL; + + loads++; + + return 0; +} + +static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info) +{ + /* Convert the private data to the new version. */ + *priv_data = *old_priv_data; + + loads++; + + return 0; +} + +static void unload(ErlNifEnv* env, void* priv_data) +{ + if (loads == 1) { + /* Destroy the private data. */ + } + + loads--; +} + +static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{ + if (enif_is_atom(env, argv[0])) { + return enif_make_tuple2(env, + enif_make_atom(env, "hello"), + argv[0]); + } + + return enif_make_tuple2(env, + enif_make_atom(env, "error"), + enif_make_atom(env, "badarg")); +} + +static ErlNifFunc nif_funcs[] = { + {"hello", 1, hello} +}; + +ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload) +endef + +define bs_erl_nif +-module($n). + +-export([hello/1]). + +-on_load(on_load/0). +on_load() -> + PrivDir = case code:priv_dir(?MODULE) of + {error, _} -> + AppPath = filename:dirname(filename:dirname(code:which(?MODULE))), + filename:join(AppPath, "priv"); + Path -> + Path + end, + erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0). + +hello(_) -> + erlang:nif_error({not_loaded, ?MODULE}). +endef + +new-nif: +ifneq ($(wildcard $(C_SRC_DIR)/$n.c),) + $(error Error: $(C_SRC_DIR)/$n.c already exists) +endif +ifneq ($(wildcard src/$n.erl),) + $(error Error: src/$n.erl already exists) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in= +else + $(verbose) mkdir -p $(C_SRC_DIR) src/ + $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c) + $(call render_template,bs_erl_nif,src/$n.erl) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ci ci-setup distclean-kerl + +KERL ?= $(CURDIR)/kerl +export KERL + +KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl + +OTP_GIT ?= https://github.com/erlang/otp + +CI_INSTALL_DIR ?= $(HOME)/erlang +CI_OTP ?= + +ifeq ($(strip $(CI_OTP)),) +ci:: +else +ci:: $(addprefix ci-,$(CI_OTP)) + +ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP)) + +ci-setup:: + +ci_verbose_0 = @echo " CI " $(1); +ci_verbose = $(ci_verbose_$(V)) + +define ci_target +ci-$(1): $(CI_INSTALL_DIR)/$(1) + $(ci_verbose) \ + PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \ + CI_OTP_RELEASE="$(1)" \ + CT_OPTS="-label $(1)" \ + $(MAKE) clean ci-setup tests +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp)))) + +define ci_otp_target +ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),) +$(CI_INSTALL_DIR)/$(1): $(KERL) + $(KERL) build git $(OTP_GIT) $(1) $(1) + $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1) +endif +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp)))) + +$(KERL): + $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL)) + $(verbose) chmod +x $(KERL) + +help:: + $(verbose) printf "%s\n" "" \ + "Continuous Integration targets:" \ + " ci Run '$(MAKE) tests' on all configured Erlang versions." \ + "" \ + "The CI_OTP variable must be defined with the Erlang versions" \ + "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3" + +distclean:: distclean-kerl + +distclean-kerl: + $(gen_verbose) rm -rf $(KERL) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ct distclean-ct + +# Configuration. + +CT_OPTS ?= +ifneq ($(wildcard $(TEST_DIR)),) + CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl)))) +else + CT_SUITES ?= +endif + +# Core targets. + +tests:: ct + +distclean:: distclean-ct + +help:: + $(verbose) printf "%s\n" "" \ + "Common_test targets:" \ + " ct Run all the common_test suites for this project" \ + "" \ + "All your common_test suites have their associated targets." \ + "A suite named http_SUITE can be ran using the ct-http target." + +# Plugin-specific targets. + +CT_RUN = ct_run \ + -no_auto_compile \ + -noinput \ + -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(TEST_DIR) \ + -dir $(TEST_DIR) \ + -logdir $(CURDIR)/logs + +ifeq ($(CT_SUITES),) +ct: +else +ct: test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) +endif + +define ct_suite_target +ct-$(1): test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(1)) $(CT_OPTS) +endef + +$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) + +distclean-ct: + $(gen_verbose) rm -rf $(CURDIR)/logs/ + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: plt distclean-plt dialyze + +# Configuration. + +DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt +export DIALYZER_PLT + +PLT_APPS ?= +DIALYZER_DIRS ?= --src -r src +DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions \ + -Wunmatched_returns # -Wunderspecs + +# Core targets. + +check:: dialyze + +distclean:: distclean-plt + +help:: + $(verbose) printf "%s\n" "" \ + "Dialyzer targets:" \ + " plt Build a PLT file for this project" \ + " dialyze Analyze the project using Dialyzer" + +# Plugin-specific targets. + +$(DIALYZER_PLT): deps app + $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS) + +plt: $(DIALYZER_PLT) + +distclean-plt: + $(gen_verbose) rm -f $(DIALYZER_PLT) + +ifneq ($(wildcard $(DIALYZER_PLT)),) +dialyze: +else +dialyze: $(DIALYZER_PLT) +endif + $(verbose) dialyzer --no_native $(DIALYZER_DIRS) $(DIALYZER_OPTS) + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: elvis distclean-elvis + +# Configuration. + +ELVIS_CONFIG ?= $(CURDIR)/elvis.config + +ELVIS ?= $(CURDIR)/elvis +export ELVIS + +ELVIS_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis +ELVIS_CONFIG_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis.config +ELVIS_OPTS ?= + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Elvis targets:" \ + " elvis Run Elvis using the local elvis.config or download the default otherwise" + +distclean:: distclean-elvis + +# Plugin-specific targets. + +$(ELVIS): + $(gen_verbose) $(call core_http_get,$(ELVIS),$(ELVIS_URL)) + $(verbose) chmod +x $(ELVIS) + +$(ELVIS_CONFIG): + $(verbose) $(call core_http_get,$(ELVIS_CONFIG),$(ELVIS_CONFIG_URL)) + +elvis: $(ELVIS) $(ELVIS_CONFIG) + $(verbose) $(ELVIS) rock -c $(ELVIS_CONFIG) $(ELVIS_OPTS) + +distclean-elvis: + $(gen_verbose) rm -rf $(ELVIS) + +# Copyright (c) 2014 Dave Cottlehuber +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-escript escript + +# Configuration. + +ESCRIPT_NAME ?= $(PROJECT) +ESCRIPT_COMMENT ?= This is an -*- erlang -*- file + +ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*" +ESCRIPT_SYS_CONFIG ?= "rel/sys.config" +ESCRIPT_EMU_ARGS ?= -pa . \ + -sasl errlog_type error \ + -escript main $(ESCRIPT_NAME) +ESCRIPT_SHEBANG ?= /usr/bin/env escript +ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**" + +# Core targets. + +distclean:: distclean-escript + +help:: + $(verbose) printf "%s\n" "" \ + "Escript targets:" \ + " escript Build an executable escript archive" \ + +# Plugin-specific targets. + +# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl +# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center +# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE : +# Software may only be used for the great good and the true happiness of all +# sentient beings. + +define ESCRIPT_RAW +'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\ +'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\ +' [F || F <- A, not filelib:is_dir(F) ] end,'\ +'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\ +'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\ +'Ez = fun(Escript) ->'\ +' Static = Files([$(ESCRIPT_STATIC)]),'\ +' Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\ +' Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\ +' escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\ +' {archive, Archive, [memory]},'\ +' {shebang, "$(ESCRIPT_SHEBANG)"},'\ +' {comment, "$(ESCRIPT_COMMENT)"},'\ +' {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\ +' ]),'\ +' file:change_mode(Escript, 8#755)'\ +'end,'\ +'Ez("$(ESCRIPT_NAME)"),'\ +'halt().' +endef + +ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW)) + +escript:: distclean-escript deps app + $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND) + +distclean-escript: + $(gen_verbose) rm -f $(ESCRIPT_NAME) + +# Copyright (c) 2014, Enrique Fernandez +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: eunit + +# Configuration + +EUNIT_OPTS ?= + +# Core targets. + +tests:: eunit + +help:: + $(verbose) printf "%s\n" "" \ + "EUnit targets:" \ + " eunit Run all the EUnit tests for this project" + +# Plugin-specific targets. + +define eunit.erl + case "$(COVER)" of + "" -> ok; + _ -> + case cover:compile_beam_directory("ebin") of + {error, _} -> halt(1); + _ -> ok + end + end, + case eunit:test([$(call comma_list,$(1))], [$(EUNIT_OPTS)]) of + ok -> ok; + error -> halt(2) + end, + case "$(COVER)" of + "" -> ok; + _ -> + cover:export("eunit.coverdata") + end, + halt() +endef + +EUNIT_EBIN_MODS = $(notdir $(basename $(call core_find,ebin/,*.beam))) +EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.beam))) +EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \ + $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),{module,'$(mod)'}) + +eunit: test-build + $(gen_verbose) $(ERL) -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin ebin \ + -eval "$(subst $(newline),,$(subst ",\",$(call eunit.erl,$(EUNIT_MODS))))" + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: relx-rel distclean-relx-rel distclean-relx run + +# Configuration. + +RELX ?= $(CURDIR)/relx +RELX_CONFIG ?= $(CURDIR)/relx.config + +RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.5.0/relx +RELX_OPTS ?= +RELX_OUTPUT_DIR ?= _rel + +ifeq ($(firstword $(RELX_OPTS)),-o) + RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) +else + RELX_OPTS += -o $(RELX_OUTPUT_DIR) +endif + +# Core targets. + +ifeq ($(IS_DEP),) +ifneq ($(wildcard $(RELX_CONFIG)),) +rel:: relx-rel +endif +endif + +distclean:: distclean-relx-rel distclean-relx + +# Plugin-specific targets. + +$(RELX): + $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) + $(verbose) chmod +x $(RELX) + +relx-rel: $(RELX) rel-deps app + $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) + +distclean-relx-rel: + $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) + +distclean-relx: + $(gen_verbose) rm -rf $(RELX) + +# Run target. + +ifeq ($(wildcard $(RELX_CONFIG)),) +run: +else + +define get_relx_release.erl + {ok, Config} = file:consult("$(RELX_CONFIG)"), + {release, {Name, _}, _} = lists:keyfind(release, 1, Config), + io:format("~s", [Name]), + halt(0). +endef + +RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))` + +run: all + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console + +help:: + $(verbose) printf "%s\n" "" \ + "Relx targets:" \ + " run Compile the project, build the release and run it" + +endif + +# Copyright (c) 2014, M Robert Martin +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: shell + +# Configuration. + +SHELL_ERL ?= erl +SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin +SHELL_OPTS ?= + +ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) + +# Core targets + +help:: + $(verbose) printf "%s\n" "" \ + "Shell targets:" \ + " shell Run an erlang shell with SHELL_OPTS or reasonable default" + +# Plugin-specific targets. + +$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) + +build-shell-deps: $(ALL_SHELL_DEPS_DIRS) + $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done + +shell: build-shell-deps + $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS) + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq) +.PHONY: triq + +# Targets. + +tests:: triq + +define triq_check.erl + code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]), + try + case $(1) of + all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]); + module -> triq:check($(2)); + function -> triq:check($(2)) + end + of + true -> halt(0); + _ -> halt(1) + catch error:undef -> + io:format("Undefined property or module~n"), + halt(0) + end. +endef + +ifdef t +ifeq (,$(findstring :,$(t))) +triq: test-build + $(verbose) $(call erlang,$(call triq_check.erl,module,$(t))) +else +triq: test-build + $(verbose) echo Testing $(t)/0 + $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)())) +endif +else +triq: test-build + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam)))))) + $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES))) +endif +endif + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: xref distclean-xref + +# Configuration. + +ifeq ($(XREF_CONFIG),) + XREF_ARGS := +else + XREF_ARGS := -c $(XREF_CONFIG) +endif + +XREFR ?= $(CURDIR)/xrefr +export XREFR + +XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Xref targets:" \ + " xref Run Xrefr using $XREF_CONFIG as config file if defined" + +distclean:: distclean-xref + +# Plugin-specific targets. + +$(XREFR): + $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL)) + $(verbose) chmod +x $(XREFR) + +xref: deps app $(XREFR) + $(gen_verbose) $(XREFR) $(XREFR_ARGS) + +distclean-xref: + $(gen_verbose) rm -rf $(XREFR) + +# Copyright 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +COVER_REPORT_DIR = cover + +# Hook in coverage to ct + +ifdef COVER +ifdef CT_RUN +# All modules in 'ebin' +COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam))) + +test-build:: $(TEST_DIR)/ct.cover.spec + +$(TEST_DIR)/ct.cover.spec: + $(verbose) echo Cover mods: $(COVER_MODS) + $(gen_verbose) printf "%s\n" \ + '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \ + '{export,"$(CURDIR)/ct.coverdata"}.' > $@ + +CT_RUN += -cover $(TEST_DIR)/ct.cover.spec +endif +endif + +# Core targets + +ifdef COVER +ifneq ($(COVER_REPORT_DIR),) +tests:: + $(verbose) $(MAKE) --no-print-directory cover-report +endif +endif + +clean:: coverdata-clean + +ifneq ($(COVER_REPORT_DIR),) +distclean:: cover-report-clean +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Cover targets:" \ + " cover-report Generate a HTML coverage report from previously collected" \ + " cover data." \ + " all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \ + "" \ + "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \ + "target tests additionally generates a HTML coverage report from the combined" \ + "coverdata files from each of these testing tools. HTML reports can be disabled" \ + "by setting COVER_REPORT_DIR to empty." + +# Plugin specific targets + +COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata)) + +.PHONY: coverdata-clean +coverdata-clean: + $(gen_verbose) rm -f *.coverdata ct.cover.spec + +# Merge all coverdata files into one. +all.coverdata: $(COVERDATA) + $(gen_verbose) $(ERL) -eval ' \ + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \ + cover:export("$@"), halt(0).' + +# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to +# empty if you want the coverdata files but not the HTML report. +ifneq ($(COVER_REPORT_DIR),) + +.PHONY: cover-report-clean cover-report + +cover-report-clean: + $(gen_verbose) rm -rf $(COVER_REPORT_DIR) + +ifeq ($(COVERDATA),) +cover-report: +else + +# Modules which include eunit.hrl always contain one line without coverage +# because eunit defines test/0 which is never called. We compensate for this. +EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \ + grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ + | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq)) + +define cover_report.erl + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) + Ms = cover:imported_modules(), + [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M) + ++ ".COVER.html", [html]) || M <- Ms], + Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms], + EunitHrlMods = [$(EUNIT_HRL_MODS)], + Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of + true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report], + TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]), + TotalN = lists:sum([N || {_, {_, N}} <- Report1]), + TotalPerc = round(100 * TotalY / (TotalY + TotalN)), + {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]), + io:format(F, "~n" + "~n" + "Coverage report~n" + "~n", []), + io:format(F, "

Coverage

~n

Total: ~p%

~n", [TotalPerc]), + io:format(F, "~n", []), + [io:format(F, "" + "~n", + [M, M, round(100 * Y / (Y + N))]) || {M, {Y, N}} <- Report1], + How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))", + Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")", + io:format(F, "
ModuleCoverage
~p~p%
~n" + "

Generated using ~s and erlang.mk on ~s.

~n" + "", [How, Date]), + halt(). +endef + +cover-report: + $(gen_verbose) mkdir -p $(COVER_REPORT_DIR) + $(gen_verbose) $(call erlang,$(cover_report.erl)) + +endif +endif # ifneq ($(COVER_REPORT_DIR),) + +# Copyright (c) 2013-2015, Loïc Hoguin +# Copyright (c) 2015, Jean-Sébastien Pédron +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Fetch dependencies (without building them). + +.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \ + fetch-shell-deps + +ifneq ($(SKIP_DEPS),) +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: + @: +else +# By default, we fetch "normal" dependencies. They are also included no +# matter the type of requested dependencies. +# +# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS). +fetch-deps: $(ALL_DEPS_DIRS) +fetch-doc-deps: $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS) +fetch-rel-deps: $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS) +fetch-test-deps: $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS) +fetch-shell-deps: $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(filter doc,$(DEP_TYPES)),) +fetch-deps: $(ALL_DOC_DEPS_DIRS) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +fetch-deps: $(ALL_REL_DEPS_DIRS) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +fetch-deps: $(ALL_TEST_DEPS_DIRS) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +fetch-deps: $(ALL_SHELL_DEPS_DIRS) +endif + +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep $@ IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/$@.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $^ ; do \ + if ! grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/$@.log; then \ + echo $$dep >> $(ERLANG_MK_TMP)/$@.log; \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C $$dep fetch-deps IS_DEP=1 || exit $$?; \ + fi \ + fi \ + done +endif # ifneq ($(SKIP_DEPS),) + +# List dependencies recursively. + +.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \ + list-shell-deps + +ifneq ($(SKIP_DEPS),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): + $(verbose) :> $@ +else +LIST_DIRS = $(ALL_DEPS_DIRS) +LIST_DEPS = $(BUILD_DEPS) $(DEPS) + +$(ERLANG_MK_RECURSIVE_DEPS_LIST): fetch-deps + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DIRS += $(ALL_DOC_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DEPS += $(DOC_DEPS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-doc-deps +else +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DIRS += $(ALL_REL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DEPS += $(REL_DEPS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-rel-deps +else +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DIRS += $(ALL_TEST_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DEPS += $(TEST_DEPS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-test-deps +else +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DIRS += $(ALL_SHELL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DEPS += $(SHELL_DEPS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-shell-deps +else +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-deps +endif + +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): +ifneq ($(IS_DEP),1) + $(verbose) rm -f $@.orig +endif +ifndef IS_APP + $(verbose) for app in $(filter-out $(CURDIR),$(ALL_APPS_DIRS)); do \ + $(MAKE) -C "$$app" --no-print-directory $@ IS_APP=1 || :; \ + done +endif + $(verbose) for dep in $(filter-out $(CURDIR),$(LIST_DIRS)); do \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C "$$dep" --no-print-directory $@ IS_DEP=1; \ + fi; \ + done + $(verbose) for dep in $(LIST_DEPS); do \ + echo $(DEPS_DIR)/$$dep; \ + done >> $@.orig +ifndef IS_APP +ifneq ($(IS_DEP),1) + $(verbose) sort < $@.orig | uniq > $@ + $(verbose) rm -f $@.orig +endif +endif +endif # ifneq ($(SKIP_DEPS),) + +ifneq ($(SKIP_DEPS),) +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + @: +else +list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST) +list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(IS_DEP),1) +ifneq ($(filter doc,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) +endif +endif + +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + $(verbose) cat $^ | sort | uniq +endif # ifneq ($(SKIP_DEPS),) diff --git a/rabbitmq-server/include/rabbit.hrl b/rabbitmq-server/deps/rabbit_common/include/rabbit.hrl similarity index 79% rename from rabbitmq-server/include/rabbit.hrl rename to rabbitmq-server/deps/rabbit_common/include/rabbit.hrl index 5b90956..40193be 100644 --- a/rabbitmq-server/include/rabbit.hrl +++ b/rabbitmq-server/deps/rabbit_common/include/rabbit.hrl @@ -23,9 +23,18 @@ -record(auth_user, {username, tags, impl}). +%% Passed to authz backends. +-record(authz_socket_info, {sockname, peername}). %% Implementation for the internal auth backend --record(internal_user, {username, password_hash, tags}). +-record(internal_user, { + username, + password_hash, + tags, + %% password hashing implementation module, + %% typically rabbit_password_hashing_* but can + %% come from a plugin + hashing_algorithm}). -record(permission, {configure, write, read}). -record(user_vhost, {username, virtual_host}). -record(user_permission, {user_vhost, permission}). @@ -42,7 +51,13 @@ payload_fragments_rev %% list of binaries, in reverse order (!) }). --record(resource, {virtual_host, kind, name}). +-record(resource, { + virtual_host, + %% exchange, queue, ... + kind, + %% name as a binary + name +}). %% fields described as 'transient' here are cleared when writing to %% rabbit_durable_ @@ -84,11 +99,21 @@ -record(runtime_parameters, {key, value}). --record(basic_message, {exchange_name, routing_keys = [], content, id, - is_persistent}). +-record(basic_message, + {exchange_name, %% The exchange where the message was received + routing_keys = [], %% Routing keys used during publish + content, %% The message content + id, %% A `rabbit_guid:gen()` generated id + is_persistent}). %% Whether the message was published as persistent + +-record(delivery, + {mandatory, %% Whether the message was published as mandatory + confirm, %% Whether the message needs confirming + sender, %% The pid of the process that created the delivery + message, %% The #basic_message record + msg_seq_no, %% Msg Sequence Number from the channel publish_seqno field + flow}). %% Should flow control be used for this delivery --record(ssl_socket, {tcp, ssl}). --record(delivery, {mandatory, confirm, sender, message, msg_seq_no, flow}). -record(amqp_error, {name, explanation = "", method = none}). -record(event, {type, props, reference = undefined, timestamp}). @@ -104,9 +129,10 @@ %%---------------------------------------------------------------------------- --define(COPYRIGHT_MESSAGE, "Copyright (C) 2007-2015 Pivotal Software, Inc."). +-define(COPYRIGHT_MESSAGE, "Copyright (C) 2007-2016 Pivotal Software, Inc."). -define(INFORMATION_MESSAGE, "Licensed under the MPL. See http://www.rabbitmq.com/"). --define(ERTS_MINIMUM, "5.6.3"). +-define(OTP_MINIMUM, "R16B03"). +-define(ERTS_MINIMUM, "5.10.4"). %% EMPTY_FRAME_SIZE, 8 = 1 + 2 + 4 + 1 %% - 1 byte of frame type @@ -131,6 +157,10 @@ -define(ROUTING_HEADERS, [<<"CC">>, <<"BCC">>]). -define(DELETED_HEADER, <<"BCC">>). +-define(EXCHANGE_DELETE_IN_PROGRESS_COMPONENT, <<"exchange-delete-in-progress">>). + +-define(CHANNEL_OPERATION_TIMEOUT, rabbit_misc:get_channel_operation_timeout()). + %% Trying to send a term across a cluster larger than 2^31 bytes will %% cause the VM to exit with "Absurdly large distribution output data %% buffer". So we limit the max message size to 2^31 - 10^6 bytes (1MB diff --git a/rabbitmq-server/include/rabbit_msg_store.hrl b/rabbitmq-server/deps/rabbit_common/include/rabbit_msg_store.hrl similarity index 100% rename from rabbitmq-server/include/rabbit_msg_store.hrl rename to rabbitmq-server/deps/rabbit_common/include/rabbit_msg_store.hrl diff --git a/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-components.mk b/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-components.mk new file mode 100644 index 0000000..eed26fd --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-components.mk @@ -0,0 +1,331 @@ +ifeq ($(.DEFAULT_GOAL),) +# Define default goal to `all` because this file defines some targets +# before the inclusion of erlang.mk leading to the wrong target becoming +# the default. +.DEFAULT_GOAL = all +endif + +# Automatically add rabbitmq-common to the dependencies, at least for +# the Makefiles. +ifneq ($(PROJECT),rabbit_common) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +ifeq ($(filter rabbit_common,$(DEPS)),) +DEPS += rabbit_common +endif +endif +endif + +# -------------------------------------------------------------------- +# RabbitMQ components. +# -------------------------------------------------------------------- + +# For RabbitMQ repositories, we want to checkout branches which match +# the parent project. For instance, if the parent project is on a +# release tag, dependencies must be on the same release tag. If the +# parent project is on a topic branch, dependencies must be on the same +# topic branch or fallback to `stable` or `master` whichever was the +# base of the topic branch. + +dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_clusterer = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_lvc = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_visualiser = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master +dep_sockjs = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master +dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master + +dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master + +# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk +# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch +# needs to add "ranch" as a BUILD_DEPS. The list of projects needing +# this workaround are: +# o rabbitmq-web-stomp +dep_ranch = git https://github.com/ninenines/ranch 1.2.1 + +RABBITMQ_COMPONENTS = amqp_client \ + rabbit \ + rabbit_common \ + rabbitmq_amqp1_0 \ + rabbitmq_auth_backend_amqp \ + rabbitmq_auth_backend_http \ + rabbitmq_auth_backend_ldap \ + rabbitmq_auth_mechanism_ssl \ + rabbitmq_boot_steps_visualiser \ + rabbitmq_clusterer \ + rabbitmq_codegen \ + rabbitmq_consistent_hash_exchange \ + rabbitmq_delayed_message_exchange \ + rabbitmq_dotnet_client \ + rabbitmq_event_exchange \ + rabbitmq_federation \ + rabbitmq_federation_management \ + rabbitmq_java_client \ + rabbitmq_lvc \ + rabbitmq_management \ + rabbitmq_management_agent \ + rabbitmq_management_exchange \ + rabbitmq_management_themes \ + rabbitmq_management_visualiser \ + rabbitmq_message_timestamp \ + rabbitmq_metronome \ + rabbitmq_mqtt \ + rabbitmq_recent_history_exchange \ + rabbitmq_rtopic_exchange \ + rabbitmq_sharding \ + rabbitmq_shovel \ + rabbitmq_shovel_management \ + rabbitmq_stomp \ + rabbitmq_test \ + rabbitmq_toke \ + rabbitmq_top \ + rabbitmq_tracing \ + rabbitmq_web_dispatch \ + rabbitmq_web_stomp \ + rabbitmq_web_stomp_examples \ + rabbitmq_website + +# Several components have a custom erlang.mk/build.config, mainly +# to disable eunit. Therefore, we can't use the top-level project's +# erlang.mk copy. +NO_AUTOPATCH += $(RABBITMQ_COMPONENTS) + +ifeq ($(origin current_rmq_ref),undefined) +ifneq ($(wildcard .git),) +current_rmq_ref := $(shell (\ + ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\ + if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi)) +else +current_rmq_ref := master +endif +endif +export current_rmq_ref + +ifeq ($(origin base_rmq_ref),undefined) +ifneq ($(wildcard .git),) +base_rmq_ref := $(shell \ + (git rev-parse --verify -q stable >/dev/null && \ + git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \ + echo stable) || \ + echo master) +else +base_rmq_ref := master +endif +endif +export base_rmq_ref + +# Repository URL selection. +# +# First, we infer other components' location from the current project +# repository URL, if it's a Git repository: +# - We take the "origin" remote URL as the base +# - The current project name and repository name is replaced by the +# target's properties: +# eg. rabbitmq-common is replaced by rabbitmq-codegen +# eg. rabbit_common is replaced by rabbitmq_codegen +# +# If cloning from this computed location fails, we fallback to RabbitMQ +# upstream which is GitHub. + +# Maccro to transform eg. "rabbit_common" to "rabbitmq-common". +rmq_cmp_repo_name = $(word 2,$(dep_$(1))) + +# Upstream URL for the current project. +RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT)) +RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git +RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git + +# Current URL for the current project. If this is not a Git clone, +# default to the upstream Git repository. +ifneq ($(wildcard .git),) +git_origin_fetch_url := $(shell git config remote.origin.url) +git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url) +RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url) +RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url) +else +RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL) +RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL) +endif + +# Macro to replace the following pattern: +# 1. /foo.git -> /bar.git +# 2. /foo -> /bar +# 3. /foo/ -> /bar/ +subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3)))) + +# Macro to replace both the project's name (eg. "rabbit_common") and +# repository name (eg. "rabbitmq-common") by the target's equivalent. +# +# This macro is kept on one line because we don't want whitespaces in +# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell +# single-quoted string. +dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo)) + +dep_rmq_commits = $(if $(dep_$(1)), \ + $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \ + $(pkg_$(1)_commit)) + +define dep_fetch_git_rmq + fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \ + fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \ + if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \ + git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url1"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \ + elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url2"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \ + fi; \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \ + $(foreach ref,$(call dep_rmq_commits,$(1)), \ + git checkout -q $(ref) >/dev/null 2>&1 || \ + ) \ + (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \ + 1>&2 && false) ) && \ + (test "$$$$fetch_url" = "$$$$push_url" || \ + git remote set-url --push origin "$$$$push_url") +endef + +# -------------------------------------------------------------------- +# Component distribution. +# -------------------------------------------------------------------- + +list-dist-deps:: + @: + +prepare-dist:: + @: + +# -------------------------------------------------------------------- +# Run a RabbitMQ node (moved from rabbitmq-run.mk as a workaround). +# -------------------------------------------------------------------- + +# Add "rabbit" to the build dependencies when the user wants to start +# a broker or to the test dependencies when the user wants to test a +# project. +# +# NOTE: This should belong to rabbitmq-run.mk. Unfortunately, it is +# loaded *after* erlang.mk which is too late to add a dependency. That's +# why rabbitmq-components.mk knows the list of targets which start a +# broker and add "rabbit" to the dependencies in this case. + +ifneq ($(PROJECT),rabbit) +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS)),) +RUN_RMQ_TARGETS = run-broker \ + run-background-broker \ + run-node \ + run-background-node \ + start-background-node + +ifneq ($(filter $(RUN_RMQ_TARGETS),$(MAKECMDGOALS)),) +BUILD_DEPS += rabbit +endif +endif + +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +ifneq ($(filter check tests tests-with-broker test,$(MAKECMDGOALS)),) +TEST_DEPS += rabbit +endif +endif +endif + +ifeq ($(filter rabbit_public_umbrella amqp_client rabbit_common rabbitmq_test,$(PROJECT)),) +ifeq ($(filter rabbitmq_test,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +TEST_DEPS += rabbitmq_test +endif +endif + +# -------------------------------------------------------------------- +# rabbitmq-components.mk checks. +# -------------------------------------------------------------------- + +ifeq ($(PROJECT),rabbit_common) +else ifdef SKIP_RMQCOMP_CHECK +else ifeq ($(IS_DEP),1) +else ifneq ($(filter co up,$(MAKECMDGOALS)),) +else +# In all other cases, rabbitmq-components.mk must be in sync. +deps:: check-rabbitmq-components.mk +fetch-deps: check-rabbitmq-components.mk +endif + +# If this project is under the Umbrella project, we override $(DEPS_DIR) +# to point to the Umbrella's one. We also disable `make distclean` so +# $(DEPS_DIR) is not accidentally removed. + +ifneq ($(wildcard ../../UMBRELLA.md),) +UNDER_UMBRELLA = 1 +else ifneq ($(wildcard UMBRELLA.md),) +UNDER_UMBRELLA = 1 +endif + +ifeq ($(UNDER_UMBRELLA),1) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +DEPS_DIR ?= $(abspath ..) + +distclean:: distclean-components + @: + +distclean-components: +endif + +ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),) +SKIP_DEPS = 1 +endif +endif + +UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk + +check-rabbitmq-components.mk: + $(verbose) cmp -s rabbitmq-components.mk \ + $(UPSTREAM_RMQ_COMPONENTS_MK) || \ + (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \ + false) + +ifeq ($(PROJECT),rabbit_common) +rabbitmq-components-mk: + @: +else +rabbitmq-components-mk: + $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) . +ifeq ($(DO_COMMIT),yes) + $(verbose) git diff --quiet rabbitmq-components.mk \ + || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk +endif +endif diff --git a/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-dist.mk b/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-dist.mk new file mode 100644 index 0000000..52153c1 --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-dist.mk @@ -0,0 +1,159 @@ +.PHONY: dist test-dist do-dist clean-dist + +DIST_DIR = plugins + +dist_verbose_0 = @echo " DIST " $@; +dist_verbose_2 = set -x; +dist_verbose = $(dist_verbose_$(V)) + +# We take the version of an Erlang application from the .app file. This +# macro is called like this: +# +# $(call get_app_version,/path/to/name.app.src) + +define get_app_version +$(shell awk ' +/{ *vsn *, *"/ { + vsn=$$0; + sub(/.*{ *vsn, *"/, "", vsn); + sub(/".*/, "", vsn); + print vsn; + exit; +}' $(1)) +endef + +# Our type specs rely on dict:dict/0 etc, which are only available in +# 17.0 upwards. +define compare_version +$(shell awk 'BEGIN { + split("$(1)", v1, "."); + version1 = v1[1] * 1000000 + v1[2] * 10000 + v1[3] * 100 + v1[4]; + + split("$(2)", v2, "."); + version2 = v2[1] * 1000000 + v2[2] * 10000 + v2[3] * 100 + v2[4]; + + if (version1 $(3) version2) { + print "true"; + } else { + print "false"; + } +}') +endef + +# Define the target to create an .ez plugin archive. This macro is +# called like this: +# +# $(call do_ez_target,app_name,app_version,app_dir) + +define do_ez_target +dist_$(1)_ez_dir = $$(if $(2),$(DIST_DIR)/$(1)-$(2),$$(if $$(VERSION),$(DIST_DIR)/$(1)-$$(VERSION),$(DIST_DIR)/$(1))) +dist_$(1)_ez = $$(dist_$(1)_ez_dir).ez + + +$$(dist_$(1)_ez): APP = $(1) +$$(dist_$(1)_ez): VSN = $(2) +$$(dist_$(1)_ez): SRC_DIR = $(3) +$$(dist_$(1)_ez): EZ_DIR = $$(abspath $$(dist_$(1)_ez_dir)) +$$(dist_$(1)_ez): EZ = $$(dist_$(1)_ez) +$$(dist_$(1)_ez): $$(if $$(wildcard $(3)/ebin $(3)/include $(3)/priv),\ + $$(call core_find,$$(wildcard $(3)/ebin $(3)/include $(3)/priv),*),) + +# If the application's Makefile defines a `list-dist-deps` target, we +# use it to populate the dependencies list. This is useful when the +# application has also a `prepare-dist` target to modify the created +# tree before we make an archive out of it. + +ifeq ($$(shell test -f $(3)/rabbitmq-components.mk \ + && grep -q '^list-dist-deps::' $(3)/Makefile && echo yes),yes) +$$(dist_$(1)_ez): $$(patsubst %,$(3)/%, \ + $$(shell $(MAKE) --no-print-directory -C $(3) list-dist-deps \ + APP=$(1) VSN=$(2) EZ_DIR=$$(abspath $$(dist_$(1)_ez_dir)))) +endif + +DIST_EZS += $$(dist_$(1)_ez) + +endef + +# Real entry point: it tests the existence of an .app file to determine +# if it is an Erlang application (and therefore if it should be provided +# as an .ez plugin archive). Then, if calls do_ez_target. It should be +# called as: +# +# $(call ez_target,app_name) + +define ez_target +dist_$(1)_appdir = $$(if $$(filter $(PROJECT),$(1)),$(CURDIR),$(DEPS_DIR)/$(1)) +dist_$(1)_appfile = $$(dist_$(1)_appdir)/ebin/$(1).app + +$$(if $$(shell test -f $$(dist_$(1)_appfile) && echo OK), \ + $$(eval $$(call do_ez_target,$(1),$$(call get_app_version,$$(dist_$(1)_appfile)),$$(dist_$(1)_appdir)))) + +endef + +ifneq ($(filter do-dist,$(MAKECMDGOALS)),) +# The following code is evaluated only when running "make do-dist", +# otherwise it would trigger an infinite loop, as this code calls "make +# list-dist-deps" (see do_ez_target). +ifdef DIST_PLUGINS_LIST +# Now, try to create an .ez target for the top-level project and all +# dependencies. + +ifeq ($(wildcard $(DIST_PLUGINS_LIST)),) +$(error DIST_PLUGINS_LIST ($(DIST_PLUGINS_LIST)) is missing) +endif + +$(eval $(foreach app, \ + $(filter-out rabbit,$(sort $(notdir $(shell cat $(DIST_PLUGINS_LIST)))) $(PROJECT)), \ + $(call ez_target,$(app)))) +endif +endif + +# The actual recipe to create the .ez plugin archive. Some variables are +# defined in the do_ez_target macro above. All .ez archives are also +# listed in this do_ez_target macro. + +RSYNC ?= rsync +RSYNC_V_0 = +RSYNC_V_1 = -v +RSYNC_V = $(RSYNC_V_$(V)) + +ZIP ?= zip +ZIP_V_0 = -q +ZIP_V_1 = +ZIP_V = $(ZIP_V_$(V)) + +$(DIST_DIR)/%.ez: + $(verbose) rm -rf $(EZ_DIR) $(EZ) + $(verbose) mkdir -p $(EZ_DIR) + $(dist_verbose) $(RSYNC) -a $(RSYNC_V) \ + --include '/ebin/***' \ + --include '/include/***' \ + --include '/priv/***' \ + --exclude '*' \ + $(SRC_DIR)/ $(EZ_DIR)/ + @# Give a chance to the application to make any modification it + @# wants to the tree before we make an archive. + $(verbose) ! (test -f $(SRC_DIR)/rabbitmq-components.mk \ + && grep -q '^prepare-dist::' $(SRC_DIR)/Makefile) || \ + $(MAKE) --no-print-directory -C $(SRC_DIR) prepare-dist \ + APP=$(APP) VSN=$(VSN) EZ_DIR=$(EZ_DIR) + $(verbose) (cd $(DIST_DIR) && $(ZIP) $(ZIP_V) -r $*.ez $*) + $(verbose) rm -rf $(EZ_DIR) + +# We need to recurse because the top-level make instance is evaluated +# before dependencies are downloaded. + +dist:: $(ERLANG_MK_RECURSIVE_DEPS_LIST) all + $(gen_verbose) $(MAKE) do-dist DIST_PLUGINS_LIST=$(ERLANG_MK_RECURSIVE_DEPS_LIST) + +test-dist:: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) test-build + $(gen_verbose) $(MAKE) do-dist DIST_PLUGINS_LIST=$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) + +do-dist:: $(DIST_EZS) + $(verbose) unwanted='$(filter-out $(DIST_EZS),$(wildcard $(DIST_DIR)/*.ez))'; \ + test -z "$$unwanted" || (echo " RM $$unwanted" && rm -f $$unwanted) + +clean-dist:: + $(gen_verbose) rm -rf $(DIST_DIR) + +clean:: clean-dist diff --git a/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-plugin.mk b/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-plugin.mk new file mode 100644 index 0000000..6b0e5e5 --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-plugin.mk @@ -0,0 +1,15 @@ +ifeq ($(filter rabbitmq-dist.mk,$(notdir $(MAKEFILE_LIST))),) +include $(dir $(lastword $(MAKEFILE_LIST)))rabbitmq-dist.mk +endif + +ifeq ($(filter rabbitmq-run.mk,$(notdir $(MAKEFILE_LIST))),) +include $(dir $(lastword $(MAKEFILE_LIST)))rabbitmq-run.mk +endif + +ifeq ($(filter rabbitmq-tests.mk,$(notdir $(MAKEFILE_LIST))),) +include $(dir $(lastword $(MAKEFILE_LIST)))rabbitmq-tests.mk +endif + +ifeq ($(filter rabbitmq-tools.mk,$(notdir $(MAKEFILE_LIST))),) +include $(dir $(lastword $(MAKEFILE_LIST)))rabbitmq-tools.mk +endif diff --git a/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-run.mk b/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-run.mk new file mode 100644 index 0000000..72ce8b0 --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-run.mk @@ -0,0 +1,280 @@ +.PHONY: run-broker run-background-broker run-node run-background-node \ + run-tests run-lazy-vq-tests run-qc \ + start-background-node start-rabbit-on-node \ + stop-rabbit-on-node set-resource-alarm clear-resource-alarm \ + stop-node clean-node-db start-cover stop-cover + +ifeq ($(filter rabbitmq-dist.mk,$(notdir $(MAKEFILE_LIST))),) +include $(dir $(lastword $(MAKEFILE_LIST)))rabbitmq-dist.mk +endif + +exec_verbose_0 = @echo " EXEC " $@; +exec_verbose_2 = set -x; +exec_verbose = $(exec_verbose_$(V)) + +ifeq ($(PLATFORM),msys2) +TEST_TMPDIR ?= $(TEMP)/rabbitmq-test-instances +else +TMPDIR ?= /tmp +TEST_TMPDIR ?= $(TMPDIR)/rabbitmq-test-instances +endif + +# Location of the scripts controlling the broker. +ifeq ($(PROJECT),rabbit) +RABBITMQ_BROKER_DIR ?= $(CURDIR) +else +RABBITMQ_BROKER_DIR ?= $(DEPS_DIR)/rabbit +endif +RABBITMQ_SCRIPTS_DIR ?= $(RABBITMQ_BROKER_DIR)/scripts + +ifeq ($(PLATFORM),msys2) +RABBITMQ_PLUGINS ?= $(RABBITMQ_SCRIPTS_DIR)/rabbitmq-plugins.bat +RABBITMQ_SERVER ?= $(RABBITMQ_SCRIPTS_DIR)/rabbitmq-server.bat +RABBITMQCTL ?= $(RABBITMQ_SCRIPTS_DIR)/rabbitmqctl.bat +else +RABBITMQ_PLUGINS ?= $(RABBITMQ_SCRIPTS_DIR)/rabbitmq-plugins +RABBITMQ_SERVER ?= $(RABBITMQ_SCRIPTS_DIR)/rabbitmq-server +RABBITMQCTL ?= $(RABBITMQ_SCRIPTS_DIR)/rabbitmqctl +endif + +export RABBITMQ_SCRIPTS_DIR RABBITMQCTL RABBITMQ_PLUGINS RABBITMQ_SERVER + +# We need to pass the location of codegen to the Java client ant +# process. +CODEGEN_DIR = $(DEPS_DIR)/rabbitmq_codegen +PYTHONPATH = $(CODEGEN_DIR) +export PYTHONPATH + +ANT ?= ant +ANT_FLAGS += -Dmake.bin=$(MAKE) \ + -DUMBRELLA_AVAILABLE=true \ + -Drabbitmqctl.bin=$(RABBITMQCTL) \ + -Dsibling.codegen.dir=$(CODEGEN_DIR) +ifeq ($(PROJECT),rabbitmq_test) +ANT_FLAGS += -Dsibling.rabbitmq_test.dir=$(CURDIR) +else +ANT_FLAGS += -Dsibling.rabbitmq_test.dir=$(DEPS_DIR)/rabbitmq_test +endif +export ANT ANT_FLAGS + +node_tmpdir = $(TEST_TMPDIR)/$(1) +node_pid_file = $(call node_tmpdir,$(1))/$(1).pid +node_log_base = $(call node_tmpdir,$(1))/log +node_mnesia_base = $(call node_tmpdir,$(1))/mnesia +node_plugins_expand_dir = $(call node_tmpdir,$(1))/plugins +node_enabled_plugins_file = $(call node_tmpdir,$(1))/enabled_plugins + +# Broker startup variables for the test environment. +RABBITMQ_NODENAME ?= rabbit +NODE_TMPDIR ?= $(call node_tmpdir,$(RABBITMQ_NODENAME)) + +RABBITMQ_PID_FILE ?= $(call node_pid_file,$(RABBITMQ_NODENAME)) +RABBITMQ_LOG_BASE ?= $(call node_log_base,$(RABBITMQ_NODENAME)) +RABBITMQ_MNESIA_BASE ?= $(call node_mnesia_base,$(RABBITMQ_NODENAME)) +RABBITMQ_PLUGINS_EXPAND_DIR ?= $(call node_plugins_expand_dir,$(RABBITMQ_NODENAME)) +RABBITMQ_ENABLED_PLUGINS_FILE ?= $(call node_enabled_plugins_file,$(RABBITMQ_NODENAME)) + +# erlang.mk adds dependencies' ebin directory to ERL_LIBS. This is +# a sane default, but we prefer to rely on the .ez archives in the +# `plugins` directory so the plugin code is executed. The `plugins` +# directory is added to ERL_LIBS by rabbitmq-env. +DIST_ERL_LIBS = $(shell echo "$(filter-out $(DEPS_DIR),$(subst :, ,$(ERL_LIBS)))" | tr ' ' :) + +define basic_script_env_settings +MAKE="$(MAKE)" \ +ERL_LIBS="$(DIST_ERL_LIBS)" \ +RABBITMQ_NODENAME="$(1)" \ +RABBITMQ_NODE_IP_ADDRESS="$(RABBITMQ_NODE_IP_ADDRESS)" \ +RABBITMQ_NODE_PORT="$(2)" \ +RABBITMQ_PID_FILE="$(call node_pid_file,$(1))" \ +RABBITMQ_LOG_BASE="$(call node_log_base,$(1))" \ +RABBITMQ_MNESIA_BASE="$(call node_mnesia_base,$(1))" \ +RABBITMQ_PLUGINS_DIR="$(CURDIR)/$(DIST_DIR)" \ +RABBITMQ_PLUGINS_EXPAND_DIR="$(call node_plugins_expand_dir,$(1))" \ +RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS)" +endef + +BASIC_SCRIPT_ENV_SETTINGS = $(call basic_script_env_settings,$(RABBITMQ_NODENAME),$(RABBITMQ_NODE_PORT)) \ + RABBITMQ_ENABLED_PLUGINS_FILE="$(RABBITMQ_ENABLED_PLUGINS_FILE)" + +# NOTE: Running a plugin requires RabbitMQ itself. As this file is +# loaded *after* erlang.mk, it is too late to add "rabbit" to the +# dependencies. Therefore, this is done in rabbitmq-components.mk. +# +# rabbitmq-components.mk knows the list of targets which starts +# a broker. When we add a target here, it needs to be listed in +# rabbitmq-components.mk as well. +# +# FIXME: This is fragile, how can we fix this? + +ERL_CALL ?= erl_call +ERL_CALL_OPTS ?= -sname $(RABBITMQ_NODENAME) -e + +test-tmpdir: + $(verbose) mkdir -p $(TEST_TMPDIR) + +virgin-test-tmpdir: + $(gen_verbose) rm -rf $(TEST_TMPDIR) + $(verbose) mkdir -p $(TEST_TMPDIR) + +node-tmpdir: + $(verbose) mkdir -p $(RABBITMQ_LOG_BASE) \ + $(RABBITMQ_MNESIA_BASE) \ + $(RABBITMQ_PLUGINS_EXPAND_DIR) + +virgin-node-tmpdir: + $(gen_verbose) rm -rf $(NODE_TMPDIR) + $(verbose) mkdir -p $(RABBITMQ_LOG_BASE) \ + $(RABBITMQ_MNESIA_BASE) \ + $(RABBITMQ_PLUGINS_EXPAND_DIR) + +.PHONY: test-tmpdir virgin-test-tmpdir node-tmpdir virgin-node-tmpdir + +ifeq ($(wildcard ebin/test),) +$(RABBITMQ_ENABLED_PLUGINS_FILE): dist +endif + +$(RABBITMQ_ENABLED_PLUGINS_FILE): node-tmpdir + $(verbose) rm -f $@ + $(gen_verbose) $(BASIC_SCRIPT_ENV_SETTINGS) \ + $(RABBITMQ_PLUGINS) set --offline \ + $$($(BASIC_SCRIPT_ENV_SETTINGS) $(RABBITMQ_PLUGINS) list -m | tr '\n' ' ') + +# -------------------------------------------------------------------- +# Run a full RabbitMQ. +# -------------------------------------------------------------------- + +run-broker: node-tmpdir $(RABBITMQ_ENABLED_PLUGINS_FILE) + $(BASIC_SCRIPT_ENV_SETTINGS) \ + RABBITMQ_ALLOW_INPUT=true \ + $(RABBITMQ_SERVER) + +run-background-broker: node-tmpdir $(RABBITMQ_ENABLED_PLUGINS_FILE) + $(BASIC_SCRIPT_ENV_SETTINGS) \ + $(RABBITMQ_SERVER) -detached + +# -------------------------------------------------------------------- +# Run a bare Erlang node. +# -------------------------------------------------------------------- + +run-node: node-tmpdir $(RABBITMQ_ENABLED_PLUGINS_FILE) + $(BASIC_SCRIPT_ENV_SETTINGS) \ + RABBITMQ_NODE_ONLY=true \ + RABBITMQ_ALLOW_INPUT=true \ + $(RABBITMQ_SERVER) + +run-background-node: virgin-node-tmpdir $(RABBITMQ_ENABLED_PLUGINS_FILE) + $(BASIC_SCRIPT_ENV_SETTINGS) \ + RABBITMQ_NODE_ONLY=true \ + $(RABBITMQ_SERVER) -detached + +# -------------------------------------------------------------------- +# Used by rabbitmq-test. +# -------------------------------------------------------------------- + +# TODO: Move this to rabbitmq-tests. +run-tests: + $(verbose) echo 'code:add_path("$(TEST_EBIN_DIR)").' | $(ERL_CALL) $(ERL_CALL_OPTS) | sed -E '/^\{ok, true\}$$/d' + $(verbose) echo 'code:add_path("$(TEST_EBIN_DIR)").' | $(ERL_CALL) $(ERL_CALL_OPTS) -n hare | sed -E '/^\{ok, true\}$$/d' + OUT=$$(RABBITMQ_PID_FILE='$(RABBITMQ_PID_FILE)' \ + echo "rabbit_tests:all_tests()." | $(ERL_CALL) $(ERL_CALL_OPTS)) ; \ + echo $$OUT ; echo $$OUT | grep '^{ok, passed}$$' > /dev/null + +run-lazy-vq-tests: + $(verbose) echo 'code:add_path("$(TEST_EBIN_DIR)").' | $(ERL_CALL) $(ERL_CALL_OPTS) | sed -E '/^\{ok, true\}$$/d' + $(verbose) echo 'code:add_path("$(TEST_EBIN_DIR)").' | $(ERL_CALL) $(ERL_CALL_OPTS) -n hare | sed -E '/^\{ok, true\}$$/d' + OUT=$$(RABBITMQ_PID_FILE='$(RABBITMQ_PID_FILE)' \ + echo "rabbit_tests:test_lazy_variable_queue()." | $(ERL_CALL) $(ERL_CALL_OPTS)) ; \ + echo $$OUT ; echo $$OUT | grep '^{ok, passed}$$' > /dev/null + +run-qc: + echo 'code:add_path("$(TEST_EBIN_DIR)").' | $(ERL_CALL) $(ERL_CALL_OPTS) + ./quickcheck $(RABBITMQ_NODENAME) rabbit_backing_queue_qc 100 40 + ./quickcheck $(RABBITMQ_NODENAME) gm_qc 1000 200 + +ifneq ($(LOG_TO_STDIO),yes) +REDIRECT_STDIO = > $(RABBITMQ_LOG_BASE)/startup_log \ + 2> $(RABBITMQ_LOG_BASE)/startup_err +endif + +start-background-node: node-tmpdir $(RABBITMQ_ENABLED_PLUGINS_FILE) + $(BASIC_SCRIPT_ENV_SETTINGS) \ + RABBITMQ_NODE_ONLY=true \ + $(RABBITMQ_SERVER) \ + $(REDIRECT_STDIO) & + ERL_LIBS="$(DIST_ERL_LIBS)" \ + $(RABBITMQCTL) -n $(RABBITMQ_NODENAME) wait $(RABBITMQ_PID_FILE) kernel + +start-rabbit-on-node: + $(exec_verbose) echo 'rabbit:start().' | $(ERL_CALL) $(ERL_CALL_OPTS) | sed -E '/^\{ok, ok\}$$/d' + $(verbose) ERL_LIBS="$(DIST_ERL_LIBS)" \ + $(RABBITMQCTL) -n $(RABBITMQ_NODENAME) wait $(RABBITMQ_PID_FILE) + +stop-rabbit-on-node: + $(exec_verbose) echo 'rabbit:stop().' | $(ERL_CALL) $(ERL_CALL_OPTS) | sed -E '/^\{ok, ok\}$$/d' + +set-resource-alarm: + $(exec_verbose) echo 'rabbit_alarm:set_alarm({{resource_limit, $(SOURCE), node()}, []}).' | \ + $(ERL_CALL) $(ERL_CALL_OPTS) + +clear-resource-alarm: + $(exec-verbose) echo 'rabbit_alarm:clear_alarm({resource_limit, $(SOURCE), node()}).' | \ + $(ERL_CALL) $(ERL_CALL_OPTS) + +stop-node: + $(exec_verbose) ( \ + pid=$$(test -f $(RABBITMQ_PID_FILE) && cat $(RABBITMQ_PID_FILE)) && \ + $(ERL_CALL) $(ERL_CALL_OPTS) -q && \ + while ps -p "$$pid" >/dev/null 2>&1; do sleep 1; done \ + ) || : + +clean-node-db: + $(exec_verbose) rm -rf $(RABBITMQ_MNESIA_BASE)/$(RABBITMQ_NODENAME)/* + +start-cover: + $(exec_verbose) echo "rabbit_misc:start_cover([\"rabbit\", \"hare\"])." | $(ERL_CALL) $(ERL_CALL_OPTS) | sed -E '/^\{ok, ok\}$$/d' + $(verbose) echo "rabbit_misc:enable_cover([\"$(RABBITMQ_BROKER_DIR)\"])." | $(ERL_CALL) $(ERL_CALL_OPTS) | sed -E '/^\{ok, ok\}$$/d' + +stop-cover: + $(exec_verbose) echo "rabbit_misc:report_cover(), cover:stop()." | $(ERL_CALL) $(ERL_CALL_OPTS) | sed -E '/^\{ok, ok\}$$/d' + $(verbose) cat cover/summary.txt + +.PHONY: other-node-tmpdir virgin-other-node-tmpdir start-other-node \ + cluster-other-node reset-other-node stop-other-node + +other-node-tmpdir: + $(verbose) mkdir -p $(call node_log_base,$(OTHER_NODE)) \ + $(call node_mnesia_base,$(OTHER_NODE)) \ + $(call node_plugins_expand_dir,$(OTHER_NODE)) + +virgin-other-node-tmpdir: + $(exec_verbose) rm -rf $(call node_tmpdir,$(OTHER_NODE)) + $(verbose) mkdir -p $(call node_log_base,$(OTHER_NODE)) \ + $(call node_mnesia_base,$(OTHER_NODE)) \ + $(call node_plugins_expand_dir,$(OTHER_NODE)) + +start-other-node: other-node-tmpdir + $(exec_verbose) $(call basic_script_env_settings,$(OTHER_NODE),$(OTHER_PORT)) \ + RABBITMQ_ENABLED_PLUGINS_FILE="$(if $(OTHER_PLUGINS),$(OTHER_PLUGINS),$($(call node_enabled_plugins_file,$(OTHER_NODE))))" \ + RABBITMQ_CONFIG_FILE="$(CURDIR)/etc/$(if $(OTHER_CONFIG),$(OTHER_CONFIG),$(OTHER_NODE))" \ + RABBITMQ_NODE_ONLY='' \ + $(RABBITMQ_SERVER) \ + > $(call node_log_base,$(OTHER_NODE))/startup_log \ + 2> $(call node_log_base,$(OTHER_NODE))/startup_err & + $(verbose) $(RABBITMQCTL) -n $(OTHER_NODE) wait \ + $(call node_pid_file,$(OTHER_NODE)) + +cluster-other-node: + $(exec_verbose) $(RABBITMQCTL) -n $(OTHER_NODE) stop_app + $(verbose) $(RABBITMQCTL) -n $(OTHER_NODE) reset + $(verbose) $(RABBITMQCTL) -n $(OTHER_NODE) join_cluster \ + $(if $(MAIN_NODE),$(MAIN_NODE),$(RABBITMQ_NODENAME)@$$(hostname -s)) + $(verbose) $(RABBITMQCTL) -n $(OTHER_NODE) start_app + +reset-other-node: + $(exec_verbose) $(RABBITMQCTL) -n $(OTHER_NODE) stop_app + $(verbose) $(RABBITMQCTL) -n $(OTHER_NODE) reset + $(verbose) $(RABBITMQCTL) -n $(OTHER_NODE) start_app + +stop-other-node: + $(exec_verbose) $(RABBITMQCTL) -n $(OTHER_NODE) stop diff --git a/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-tests.mk b/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-tests.mk new file mode 100644 index 0000000..c61252c --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-tests.mk @@ -0,0 +1,86 @@ +.PHONY: tests-with-broker standalone-tests + +ifeq ($(filter rabbitmq-run.mk,$(notdir $(MAKEFILE_LIST))),) +include $(dir $(lastword $(MAKEFILE_LIST)))rabbitmq-run.mk +endif + +test_verbose_0 = @echo " TEST " $@; +test_verbose_2 = set -x; +test_verbose = $(test_verbose_$(V)) + +TEST_BEAM_DIRS = $(CURDIR)/test \ + $(DEPS_DIR)/rabbitmq_test/ebin + +pre-standalone-tests:: virgin-test-tmpdir + +tests:: tests-with-broker standalone-tests + +tests-with-broker: pre-standalone-tests test-dist + $(verbose) rm -f $(TEST_TMPDIR)/.passed + $(verbose) $(MAKE) start-background-node \ + RABBITMQ_SERVER_START_ARGS='$(patsubst %, -pa %,$(TEST_BEAM_DIRS))' \ + $(WITH_BROKER_TEST_MAKEVARS) \ + LOG_TO_STDIO=yes + $(verbose) $(MAKE) start-rabbit-on-node + -$(exec_verbose) echo > $(TEST_TMPDIR)/test-output && \ + if $(foreach SCRIPT,$(WITH_BROKER_SETUP_SCRIPTS), \ + MAKE='$(MAKE)' \ + DEPS_DIR='$(DEPS_DIR)' \ + NODE_TMPDIR='$(NODE_TMPDIR)' \ + RABBITMQCTL='$(RABBITMQCTL)' \ + RABBITMQ_NODENAME='$(RABBITMQ_NODENAME)' \ + $(WITH_BROKER_TEST_ENVVARS) \ + $(SCRIPT) &&) \ + $(foreach CMD,$(WITH_BROKER_TEST_COMMANDS), \ + echo >> $(TEST_TMPDIR)/test-output && \ + echo "$(CMD)." \ + | tee -a $(TEST_TMPDIR)/test-output \ + | $(ERL_CALL) $(ERL_CALL_OPTS) \ + | tee -a $(TEST_TMPDIR)/test-output \ + | egrep '{ok, (ok|passed)}' >/dev/null &&) \ + $(foreach SCRIPT,$(WITH_BROKER_TEST_SCRIPTS), \ + MAKE='$(MAKE)' \ + DEPS_DIR='$(DEPS_DIR)' \ + NODE_TMPDIR='$(NODE_TMPDIR)' \ + RABBITMQCTL='$(RABBITMQCTL)' \ + RABBITMQ_NODENAME='$(RABBITMQ_NODENAME)' \ + $(WITH_BROKER_TEST_ENVVARS) \ + $(SCRIPT) &&) : ; \ + then \ + touch $(TEST_TMPDIR)/.passed ; \ + fi + $(verbose) if test -f $(TEST_TMPDIR)/.passed; then \ + printf "\nPASSED\n" ; \ + else \ + cat $(TEST_TMPDIR)/test-output ; \ + printf "\n\nFAILED\n" ; \ + fi + $(verbose) sleep 1 + $(verbose) echo 'rabbit_misc:report_cover(), init:stop().' | $(ERL_CALL) $(ERL_CALL_OPTS) >/dev/null + $(verbose) sleep 1 + $(verbose) test -f $(TEST_TMPDIR)/.passed + +standalone-tests: pre-standalone-tests test-dist + $(exec_verbose) $(if $(STANDALONE_TEST_COMMANDS), \ + $(foreach CMD,$(STANDALONE_TEST_COMMANDS), \ + MAKE='$(MAKE)' \ + DEPS_DIR='$(DEPS_DIR)' \ + TEST_TMPDIR='$(TEST_TMPDIR)' \ + RABBITMQCTL='$(RABBITMQCTL)' \ + ERL_LIBS='$(CURDIR)/$(DIST_DIR):$(DIST_ERL_LIBS)' \ + $(ERL) $(ERL_OPTS) $(patsubst %,-pa %,$(TEST_BEAM_DIRS)) \ + -sname standalone_test \ + -eval "init:stop(case $(CMD) of ok -> 0; passed -> 0; _Else -> 1 end)" && \ + ) \ + :) + $(verbose) $(if $(STANDALONE_TEST_SCRIPTS), \ + $(foreach SCRIPT,$(STANDALONE_TEST_SCRIPTS), \ + MAKE='$(MAKE)' \ + DEPS_DIR='$(DEPS_DIR)' \ + TEST_TMPDIR='$(TEST_TMPDIR)' \ + RABBITMQCTL='$(RABBITMQCTL)' \ + $(SCRIPT) &&) :) + +# Add an alias for the old `make test` target. +.PHONY: test +test: tests diff --git a/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-tools.mk b/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-tools.mk new file mode 100644 index 0000000..06e4048 --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/mk/rabbitmq-tools.mk @@ -0,0 +1,63 @@ +READY_DEPS = $(foreach DEP,\ + $(filter $(RABBITMQ_COMPONENTS),$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)), \ + $(if $(wildcard $(DEPS_DIR)/$(DEP)),$(DEP),)) + +.PHONY: update-erlang-mk update-rabbitmq-components.mk + +update-erlang-mk: erlang-mk + $(verbose) if test "$(DO_COMMIT)" = 'yes'; then \ + git diff --quiet -- erlang.mk \ + || git commit -m 'Update erlang.mk' -- erlang.mk; \ + fi + $(verbose) for repo in $(READY_DEPS:%=$(DEPS_DIR)/%); do \ + ! test -f $$repo/erlang.mk \ + || $(MAKE) -C $$repo erlang-mk; \ + if test "$(DO_COMMIT)" = 'yes'; then \ + (cd $$repo; \ + git diff --quiet -- erlang.mk \ + || git commit -m 'Update erlang.mk' -- erlang.mk); \ + fi; \ + done + +update-rabbitmq-components-mk: rabbitmq-components-mk + $(verbose) for repo in $(READY_DEPS:%=$(DEPS_DIR)/%); do \ + ! test -f $$repo/rabbitmq-components.mk \ + || $(MAKE) -C $$repo rabbitmq-components-mk; \ + done + +ifneq ($(wildcard .git),) + +.PHONY: sync-gitremote sync-gituser + +sync-gitremote: $(READY_DEPS:%=$(DEPS_DIR)/%+sync-gitremote) + @: + +%+sync-gitremote: + $(exec_verbose) cd $* && \ + git remote set-url origin \ + '$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(notdir $*))' + $(verbose) cd $* && \ + git remote set-url --push origin \ + '$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(notdir $*))' + +RMQ_GIT_GLOBAL_USER_NAME := $(shell git config --global user.name) +RMQ_GIT_GLOBAL_USER_EMAIL := $(shell git config --global user.email) +RMQ_GIT_USER_NAME := $(shell git config user.name) +RMQ_GIT_USER_EMAIL := $(shell git config user.email) + +sync-gituser: $(READY_DEPS:%=$(DEPS_DIR)/%+sync-gituser) + @: + +%+sync-gituser: +ifeq ($(RMQ_GIT_USER_NAME),$(RMQ_GIT_GLOBAL_USER_NAME)) + $(exec_verbose) cd $* && git config --unset user.name || : +else + $(exec_verbose) cd $* && git config user.name "$(RMQ_GIT_USER_NAME)" +endif +ifeq ($(RMQ_GIT_USER_EMAIL),$(RMQ_GIT_GLOBAL_USER_EMAIL)) + $(verbose) cd $* && git config --unset user.email || : +else + $(verbose) cd $* && git config user.email "$(RMQ_GIT_USER_EMAIL)" +endif + +endif # ($(wildcard .git),) diff --git a/rabbitmq-server/src/app_utils.erl b/rabbitmq-server/deps/rabbit_common/src/app_utils.erl similarity index 98% rename from rabbitmq-server/src/app_utils.erl rename to rabbitmq-server/deps/rabbit_common/src/app_utils.erl index bab327e..748f285 100644 --- a/rabbitmq-server/src/app_utils.erl +++ b/rabbitmq-server/deps/rabbit_common/src/app_utils.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(app_utils). diff --git a/rabbitmq-server/src/credit_flow.erl b/rabbitmq-server/deps/rabbit_common/src/credit_flow.erl similarity index 81% rename from rabbitmq-server/src/credit_flow.erl rename to rabbitmq-server/deps/rabbit_common/src/credit_flow.erl index b9547cf..029c55f 100644 --- a/rabbitmq-server/src/credit_flow.erl +++ b/rabbitmq-server/deps/rabbit_common/src/credit_flow.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(credit_flow). @@ -103,6 +103,26 @@ %% flow". -define(STATE_CHANGE_INTERVAL, 1000000). +-ifdef(CREDIT_FLOW_TRACING). +-define(TRACE_BLOCKED(SELF, FROM), rabbit_event:notify(credit_flow_blocked, + [{process, SELF}, + {process_info, erlang:process_info(SELF)}, + {from, FROM}, + {from_info, erlang:process_info(FROM)}, + {timestamp, + time_compat:os_system_time( + milliseconds)}])). +-define(TRACE_UNBLOCKED(SELF, FROM), rabbit_event:notify(credit_flow_unblocked, + [{process, SELF}, + {from, FROM}, + {timestamp, + time_compat:os_system_time( + milliseconds)}])). +-else. +-define(TRACE_BLOCKED(SELF, FROM), ok). +-define(TRACE_UNBLOCKED(SELF, FROM), ok). +-endif. + %%---------------------------------------------------------------------------- %% There are two "flows" here; of messages and of credit, going in @@ -151,7 +171,10 @@ state() -> case blocked() of true -> flow; false -> case get(credit_blocked_at) of undefined -> running; - B -> Diff = timer:now_diff(erlang:now(), B), + B -> Now = time_compat:monotonic_time(), + Diff = time_compat:convert_time_unit(Now - B, + native, + micro_seconds), case Diff < ?STATE_CHANGE_INTERVAL of true -> flow; false -> running @@ -178,18 +201,21 @@ grant(To, Quantity) -> end. block(From) -> + ?TRACE_BLOCKED(self(), From), case blocked() of - false -> put(credit_blocked_at, erlang:now()); + false -> put(credit_blocked_at, time_compat:monotonic_time()); true -> ok end, ?UPDATE(credit_blocked, [], Blocks, [From | Blocks]). unblock(From) -> + ?TRACE_UNBLOCKED(self(), From), ?UPDATE(credit_blocked, [], Blocks, Blocks -- [From]), case blocked() of false -> case erase(credit_deferred) of undefined -> ok; - Credits -> [To ! Msg || {To, Msg} <- Credits] + Credits -> _ = [To ! Msg || {To, Msg} <- Credits], + ok end; true -> ok end. diff --git a/rabbitmq-server/src/gen_server2.erl b/rabbitmq-server/deps/rabbit_common/src/gen_server2.erl similarity index 98% rename from rabbitmq-server/src/gen_server2.erl rename to rabbitmq-server/deps/rabbit_common/src/gen_server2.erl index ffc075d..2349439 100644 --- a/rabbitmq-server/src/gen_server2.erl +++ b/rabbitmq-server/deps/rabbit_common/src/gen_server2.erl @@ -624,7 +624,10 @@ unregister_name(_Name) -> ok. extend_backoff(undefined) -> undefined; extend_backoff({backoff, InitialTimeout, MinimumTimeout, DesiredHibPeriod}) -> - {backoff, InitialTimeout, MinimumTimeout, DesiredHibPeriod, now()}. + {backoff, InitialTimeout, MinimumTimeout, DesiredHibPeriod, + {erlang:phash2([node()]), + time_compat:monotonic_time(), + time_compat:unique_integer()}}. %%%======================================================================== %%% Internal functions @@ -695,7 +698,9 @@ wake_hib(GS2State = #gs2_state { timeout_state = TS }) -> undefined -> undefined; {SleptAt, TimeoutState} -> - adjust_timeout_state(SleptAt, now(), TimeoutState) + adjust_timeout_state(SleptAt, + time_compat:monotonic_time(), + TimeoutState) end, post_hibernate( drain(GS2State #gs2_state { timeout_state = TimeoutState1 })). @@ -703,7 +708,8 @@ wake_hib(GS2State = #gs2_state { timeout_state = TS }) -> hibernate(GS2State = #gs2_state { timeout_state = TimeoutState }) -> TS = case TimeoutState of undefined -> undefined; - {backoff, _, _, _, _} -> {now(), TimeoutState} + {backoff, _, _, _, _} -> {time_compat:monotonic_time(), + TimeoutState} end, proc_lib:hibernate(?MODULE, wake_hib, [GS2State #gs2_state { timeout_state = TS }]). @@ -748,7 +754,8 @@ post_hibernate(GS2State = #gs2_state { state = State, adjust_timeout_state(SleptAt, AwokeAt, {backoff, CurrentTO, MinimumTO, DesiredHibPeriod, RandomState}) -> - NapLengthMicros = timer:now_diff(AwokeAt, SleptAt), + NapLengthMicros = time_compat:convert_time_unit(AwokeAt - SleptAt, + native, micro_seconds), CurrentMicros = CurrentTO * 1000, MinimumMicros = MinimumTO * 1000, DesiredHibMicros = DesiredHibPeriod * 1000, diff --git a/rabbitmq-server/src/mirrored_supervisor.erl b/rabbitmq-server/deps/rabbit_common/src/mirrored_supervisor.erl similarity index 98% rename from rabbitmq-server/src/mirrored_supervisor.erl rename to rabbitmq-server/deps/rabbit_common/src/mirrored_supervisor.erl index 96c1418..8df9039 100644 --- a/rabbitmq-server/src/mirrored_supervisor.erl +++ b/rabbitmq-server/deps/rabbit_common/src/mirrored_supervisor.erl @@ -347,7 +347,7 @@ handle_cast({ensure_monitoring, Pid}, State) -> {noreply, State}; handle_cast({die, Reason}, State = #state{group = Group}) -> - tell_all_peers_to_die(Group, Reason), + _ = tell_all_peers_to_die(Group, Reason), {stop, Reason, State}; handle_cast(Msg, State) -> @@ -364,7 +364,7 @@ handle_info({'DOWN', _Ref, process, Pid, Reason}, %% %% Therefore if we get here we know we need to cause the entire %% mirrored sup to shut down, not just fail over. - tell_all_peers_to_die(Group, Reason), + _ = tell_all_peers_to_die(Group, Reason), {stop, Reason, State}; handle_info({'DOWN', _Ref, process, Pid, _Reason}, @@ -411,14 +411,14 @@ maybe_start(Group, TxFun, Overall, Delegate, ChildSpec) -> check_start(Group, Overall, Delegate, ChildSpec) -> case mnesia:wread({?TABLE, {Group, id(ChildSpec)}}) of - [] -> write(Group, Overall, ChildSpec), + [] -> _ = write(Group, Overall, ChildSpec), start; [S] -> #mirrored_sup_childspec{key = {Group, Id}, mirroring_pid = Pid} = S, case Overall of Pid -> child(Delegate, Id); _ -> case supervisor(Pid) of - dead -> write(Group, Overall, ChildSpec), + dead -> _ = write(Group, Overall, ChildSpec), start; Delegate0 -> child(Delegate0, Id) end diff --git a/rabbitmq-server/src/mochijson2.erl b/rabbitmq-server/deps/rabbit_common/src/mochijson2.erl similarity index 100% rename from rabbitmq-server/src/mochijson2.erl rename to rabbitmq-server/deps/rabbit_common/src/mochijson2.erl diff --git a/rabbitmq-server/src/pmon.erl b/rabbitmq-server/deps/rabbit_common/src/pmon.erl similarity index 99% rename from rabbitmq-server/src/pmon.erl rename to rabbitmq-server/deps/rabbit_common/src/pmon.erl index f425300..cdfdc1c 100644 --- a/rabbitmq-server/src/pmon.erl +++ b/rabbitmq-server/deps/rabbit_common/src/pmon.erl @@ -84,7 +84,7 @@ demonitor(Item, S = #state{dict = M, module = Module}) -> case dict:find(Item, M) of {ok, MRef} -> Module:demonitor(MRef), S#state{dict = dict:erase(Item, M)}; - error -> M + error -> S end. is_monitored(Item, #state{dict = M}) -> dict:is_key(Item, M). diff --git a/rabbitmq-server/src/priority_queue.erl b/rabbitmq-server/deps/rabbit_common/src/priority_queue.erl similarity index 99% rename from rabbitmq-server/src/priority_queue.erl rename to rabbitmq-server/deps/rabbit_common/src/priority_queue.erl index 88c6951..0eab49b 100644 --- a/rabbitmq-server/src/priority_queue.erl +++ b/rabbitmq-server/deps/rabbit_common/src/priority_queue.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% Priority queues have essentially the same interface as ordinary diff --git a/rabbitmq-server/src/rabbit_amqqueue.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_amqqueue.erl similarity index 90% rename from rabbitmq-server/src/rabbit_amqqueue.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_amqqueue.erl index f6cc0fb..c4975b5 100644 --- a/rabbitmq-server/src/rabbit_amqqueue.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_amqqueue.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqqueue). @@ -24,13 +24,14 @@ assert_equivalence/5, check_exclusive_access/2, with_exclusive_access_or_die/3, stat/1, deliver/2, requeue/3, ack/3, reject/4]). --export([list/0, list/1, info_keys/0, info/1, info/2, info_all/1, info_all/2]). +-export([list/0, list/1, info_keys/0, info/1, info/2, info_all/1, info_all/2, + info_all/4]). -export([list_down/1]). -export([force_event_refresh/1, notify_policy_changed/1]). --export([consumers/1, consumers_all/1, consumer_info_keys/0]). +-export([consumers/1, consumers_all/1, consumers_all/3, consumer_info_keys/0]). -export([basic_get/4, basic_consume/10, basic_cancel/4, notify_decorators/1]). -export([notify_sent/2, notify_sent_queue_down/1, resume/2]). --export([notify_down_all/2, activate_limit_all/2, credit/5]). +-export([notify_down_all/2, notify_down_all/3, activate_limit_all/2, credit/5]). -export([on_node_up/1, on_node_down/1]). -export([update/2, store_queue/1, update_decorators/1, policy_changed/2]). -export([start_mirroring/1, stop_mirroring/1, sync_mirrors/1, @@ -43,7 +44,8 @@ -include("rabbit.hrl"). -include_lib("stdlib/include/qlc.hrl"). --define(INTEGER_ARG_TYPES, [byte, short, signedint, long]). +-define(INTEGER_ARG_TYPES, [byte, short, signedint, long, + unsignedbyte, unsignedshort, unsignedint]). -define(MORE_CONSUMER_CREDIT_AFTER, 50). @@ -118,6 +120,8 @@ -spec(info_all/1 :: (rabbit_types:vhost()) -> [rabbit_types:infos()]). -spec(info_all/2 :: (rabbit_types:vhost(), rabbit_types:info_keys()) -> [rabbit_types:infos()]). +-spec(info_all/4 :: (rabbit_types:vhost(), rabbit_types:info_keys(), + reference(), pid()) -> 'ok'). -spec(force_event_refresh/1 :: (reference()) -> 'ok'). -spec(notify_policy_changed/1 :: (rabbit_types:amqqueue()) -> 'ok'). -spec(consumers/1 :: (rabbit_types:amqqueue()) @@ -128,6 +132,9 @@ (rabbit_types:vhost()) -> [{name(), pid(), rabbit_types:ctag(), boolean(), non_neg_integer(), rabbit_framing:amqp_table()}]). +-spec(consumers_all/3 :: + (rabbit_types:vhost(), reference(), pid()) + -> 'ok'). -spec(stat/1 :: (rabbit_types:amqqueue()) -> {'ok', non_neg_integer(), non_neg_integer()}). @@ -153,6 +160,8 @@ -spec(ack/3 :: (pid(), [msg_id()], pid()) -> 'ok'). -spec(reject/4 :: (pid(), [msg_id()], boolean(), pid()) -> 'ok'). -spec(notify_down_all/2 :: (qpids(), pid()) -> ok_or_errors()). +-spec(notify_down_all/3 :: (qpids(), pid(), non_neg_integer()) + -> ok_or_errors()). -spec(activate_limit_all/2 :: (qpids(), pid()) -> ok_or_errors()). -spec(basic_get/4 :: (rabbit_types:amqqueue(), pid(), boolean(), pid()) -> {'ok', non_neg_integer(), qmsg()} | 'empty'). @@ -205,6 +214,19 @@ recover() -> %% faster than other nodes handled DOWN messages from us. on_node_down(node()), DurableQueues = find_durable_queues(), + L = length(DurableQueues), + + %% if there are not enough file handles, the server might hang + %% when trying to recover queues, warn the user: + case file_handle_cache:get_limit() < L of + true -> + rabbit_log:warning( + "Recovering ~p queues, available file handles: ~p. Please increase max open file handles limit to at least ~p!~n", + [L, file_handle_cache:get_limit(), L]); + false -> + ok + end, + {ok, BQ} = application:get_env(rabbit, backing_queue_module), %% We rely on BQ:start/1 returning the recovery terms in the same @@ -274,9 +296,15 @@ declare(QueueName, Durable, AutoDelete, Args, Owner, Node) -> recoverable_slaves = [], gm_pids = [], state = live})), - Node = rabbit_mirror_queue_misc:initial_queue_node(Q, Node), + + Node1 = case rabbit_queue_master_location_misc:get_location(Q) of + {ok, Node0} -> Node0; + {error, _} -> Node + end, + + Node1 = rabbit_mirror_queue_misc:initial_queue_node(Q, Node1), gen_server2:call( - rabbit_amqqueue_sup_sup:start_queue_process(Node, Q, declare), + rabbit_amqqueue_sup_sup:start_queue_process(Node1, Q, declare), {init, new}, infinity). internal_declare(Q, true) -> @@ -385,7 +413,14 @@ not_found_or_absent_dirty(Name) -> end. with(Name, F, E) -> + with(Name, F, E, 2000). + +with(Name, F, E, RetriesLeft) -> case lookup(Name) of + {ok, Q = #amqqueue{}} when RetriesLeft =:= 0 -> + %% Something bad happened to that queue, we are bailing out + %% on processing current request. + E({absent, Q, timeout}); {ok, Q = #amqqueue{state = crashed}} -> E({absent, Q, crashed}); {ok, Q = #amqqueue{pid = QPid}} -> @@ -397,8 +432,8 @@ with(Name, F, E) -> %% the retry loop. rabbit_misc:with_exit_handler( fun () -> false = rabbit_mnesia:is_process_alive(QPid), - timer:sleep(25), - with(Name, F, E) + timer:sleep(30), + with(Name, F, E, RetriesLeft - 1) end, fun () -> F(Q) end); {error, not_found} -> E(not_found_or_absent_dirty(Name)) @@ -468,7 +503,8 @@ declare_args() -> {<<"x-dead-letter-routing-key">>, fun check_dlxrk_arg/2}, {<<"x-max-length">>, fun check_non_neg_int_arg/2}, {<<"x-max-length-bytes">>, fun check_non_neg_int_arg/2}, - {<<"x-max-priority">>, fun check_non_neg_int_arg/2}]. + {<<"x-max-priority">>, fun check_non_neg_int_arg/2}, + {<<"x-queue-mode">>, fun check_queue_mode/2}]. consume_args() -> [{<<"x-priority">>, fun check_int_arg/2}, {<<"x-cancel-on-ha-failover">>, fun check_bool_arg/2}]. @@ -515,6 +551,14 @@ check_dlxrk_arg({longstr, _}, Args) -> check_dlxrk_arg({Type, _}, _Args) -> {error, {unacceptable_type, Type}}. +check_queue_mode({longstr, Val}, _Args) -> + case lists:member(Val, [<<"default">>, <<"lazy">>]) of + true -> ok; + false -> {error, invalid_queue_mode} + end; +check_queue_mode({Type, _}, _Args) -> + {error, {unacceptable_type, Type}}. + list() -> mnesia:dirty_match_object(rabbit_queue, #amqqueue{_ = '_'}). list(VHostPath) -> list(VHostPath, rabbit_queue). @@ -580,6 +624,14 @@ info_all(VHostPath, Items) -> map(list(VHostPath), fun (Q) -> info(Q, Items) end) ++ map(list_down(VHostPath), fun (Q) -> info_down(Q, Items, down) end). +info_all(VHostPath, Items, Ref, AggregatorPid) -> + rabbit_control_misc:emitting_map_with_exit_handler( + AggregatorPid, Ref, fun(Q) -> info(Q, Items) end, list(VHostPath), + continue), + rabbit_control_misc:emitting_map_with_exit_handler( + AggregatorPid, Ref, fun(Q) -> info_down(Q, Items) end, + list_down(VHostPath)). + force_event_refresh(Ref) -> [gen_server2:cast(Q#amqqueue.pid, {force_event_refresh, Ref}) || Q <- list()], @@ -593,15 +645,24 @@ consumers(#amqqueue{ pid = QPid }) -> delegate:call(QPid, consumers). consumer_info_keys() -> ?CONSUMER_INFO_KEYS. consumers_all(VHostPath) -> - ConsumerInfoKeys=consumer_info_keys(), + ConsumerInfoKeys = consumer_info_keys(), lists:append( map(list(VHostPath), - fun (Q) -> - [lists:zip( - ConsumerInfoKeys, - [Q#amqqueue.name, ChPid, CTag, AckRequired, Prefetch, Args]) || - {ChPid, CTag, AckRequired, Prefetch, Args} <- consumers(Q)] - end)). + fun(Q) -> get_queue_consumer_info(Q, ConsumerInfoKeys) end)). + +consumers_all(VHostPath, Ref, AggregatorPid) -> + ConsumerInfoKeys = consumer_info_keys(), + rabbit_control_misc:emitting_map( + AggregatorPid, Ref, + fun(Q) -> get_queue_consumer_info(Q, ConsumerInfoKeys) end, + list(VHostPath)). + +get_queue_consumer_info(Q, ConsumerInfoKeys) -> + lists:flatten( + [lists:zip(ConsumerInfoKeys, + [Q#amqqueue.name, ChPid, CTag, + AckRequired, Prefetch, Args]) || + {ChPid, CTag, AckRequired, Prefetch, Args} <- consumers(Q)]). stat(#amqqueue{pid = QPid}) -> delegate:call(QPid, stat). @@ -630,13 +691,23 @@ reject(QPid, Requeue, MsgIds, ChPid) -> delegate:cast(QPid, {reject, Requeue, MsgIds, ChPid}). notify_down_all(QPids, ChPid) -> - {_, Bads} = delegate:call(QPids, {notify_down, ChPid}), - case lists:filter( - fun ({_Pid, {exit, {R, _}, _}}) -> rabbit_misc:is_abnormal_exit(R); - ({_Pid, _}) -> false - end, Bads) of - [] -> ok; - Bads1 -> {error, Bads1} + notify_down_all(QPids, ChPid, ?CHANNEL_OPERATION_TIMEOUT). + +notify_down_all(QPids, ChPid, Timeout) -> + case rpc:call(node(), delegate, call, + [QPids, {notify_down, ChPid}], Timeout) of + {badrpc, timeout} -> {error, {channel_operation_timeout, Timeout}}; + {badrpc, Reason} -> {error, Reason}; + {_, Bads} -> + case lists:filter( + fun ({_Pid, {exit, {R, _}, _}}) -> + rabbit_misc:is_abnormal_exit(R); + ({_Pid, _}) -> false + end, Bads) of + [] -> ok; + Bads1 -> {error, Bads1} + end; + Error -> {error, Error} end. activate_limit_all(QPids, ChPid) -> diff --git a/rabbitmq-server/src/rabbit_auth_mechanism.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_auth_mechanism.erl similarity index 96% rename from rabbitmq-server/src/rabbit_auth_mechanism.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_auth_mechanism.erl index 78e3e7d..c50a429 100644 --- a/rabbitmq-server/src/rabbit_auth_mechanism.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_auth_mechanism.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_auth_mechanism). diff --git a/rabbitmq-server/src/rabbit_authn_backend.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_authn_backend.erl similarity index 95% rename from rabbitmq-server/src/rabbit_authn_backend.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_authn_backend.erl index b9cb0d3..47404df 100644 --- a/rabbitmq-server/src/rabbit_authn_backend.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_authn_backend.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_authn_backend). diff --git a/rabbitmq-server/src/rabbit_authz_backend.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_authz_backend.erl similarity index 97% rename from rabbitmq-server/src/rabbit_authz_backend.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_authz_backend.erl index 495a796..331de8f 100644 --- a/rabbitmq-server/src/rabbit_authz_backend.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_authz_backend.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_authz_backend). diff --git a/rabbitmq-server/src/rabbit_backing_queue.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_backing_queue.erl similarity index 90% rename from rabbitmq-server/src/rabbit_backing_queue.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_backing_queue.erl index d6cd3ca..862dbc9 100644 --- a/rabbitmq-server/src/rabbit_backing_queue.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_backing_queue.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_backing_queue). @@ -22,7 +22,7 @@ messages_unacknowledged_ram, messages_persistent, message_bytes, message_bytes_ready, message_bytes_unacknowledged, message_bytes_ram, - message_bytes_persistent, + message_bytes_persistent, head_message_timestamp, disk_reads, disk_writes, backing_queue_status]). -ifdef(use_specs). @@ -33,6 +33,10 @@ -type(flow() :: 'flow' | 'noflow'). -type(msg_ids() :: [rabbit_types:msg_id()]). +-type(publish() :: {rabbit_types:basic_message(), + rabbit_types:message_properties(), boolean()}). +-type(delivered_publish() :: {rabbit_types:basic_message(), + rabbit_types:message_properties()}). -type(fetch_result(Ack) :: ('empty' | {rabbit_types:basic_message(), boolean(), Ack})). -type(drop_result(Ack) :: @@ -47,6 +51,8 @@ -type(msg_fun(A) :: fun ((rabbit_types:basic_message(), ack(), A) -> A)). -type(msg_pred() :: fun ((rabbit_types:message_properties()) -> boolean())). +-type(queue_mode() :: atom()). + -spec(info_keys/0 :: () -> rabbit_types:info_keys()). %% Called on startup with a list of durable queue names. The queues @@ -104,6 +110,9 @@ rabbit_types:message_properties(), boolean(), pid(), flow(), state()) -> state(). +%% Like publish/6 but for batches of publishes. +-callback batch_publish([publish()], pid(), flow(), state()) -> state(). + %% Called for messages which have already been passed straight %% out to a client. The queue will be empty for these calls %% (i.e. saves the round trip through the backing queue). @@ -112,6 +121,11 @@ state()) -> {ack(), state()}. +%% Like publish_delivered/5 but for batches of publishes. +-callback batch_publish_delivered([delivered_publish()], pid(), flow(), + state()) + -> {[ack()], state()}. + %% Called to inform the BQ about messages which have reached the %% queue, but are not going to be further passed to BQ. -callback discard(rabbit_types:msg_id(), pid(), flow(), state()) -> state(). @@ -246,6 +260,12 @@ -callback is_duplicate(rabbit_types:basic_message(), state()) -> {boolean(), state()}. +-callback set_queue_mode(queue_mode(), state()) -> state(). + +-callback zip_msgs_and_acks(delivered_publish(), + [ack()], Acc, state()) + -> Acc. + -else. -export([behaviour_info/1]). @@ -253,14 +273,16 @@ behaviour_info(callbacks) -> [{start, 1}, {stop, 0}, {init, 3}, {terminate, 2}, {delete_and_terminate, 2}, {delete_crashed, 1}, {purge, 1}, - {purge_acks, 1}, {publish, 6}, - {publish_delivered, 5}, {discard, 4}, {drain_confirmed, 1}, + {purge_acks, 1}, {publish, 6}, {publish_delivered, 5}, + {batch_publish, 4}, {batch_publish_delivered, 4}, + {discard, 4}, {drain_confirmed, 1}, {dropwhile, 2}, {fetchwhile, 4}, {fetch, 2}, {drop, 2}, {ack, 2}, {requeue, 2}, {ackfold, 4}, {fold, 3}, {len, 1}, {is_empty, 1}, {depth, 1}, {set_ram_duration_target, 2}, {ram_duration, 1}, {needs_timeout, 1}, {timeout, 1}, {handle_pre_hibernate, 1}, {resume, 1}, {msg_rates, 1}, - {info, 2}, {invoke, 3}, {is_duplicate, 2}] ; + {info, 2}, {invoke, 3}, {is_duplicate, 2}, {set_queue_mode, 2}, + {zip_msgs_and_acks, 4}]; behaviour_info(_Other) -> undefined. diff --git a/rabbitmq-server/src/rabbit_basic.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_basic.erl similarity index 97% rename from rabbitmq-server/src/rabbit_basic.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_basic.erl index efc5ce2..ed71d8b 100644 --- a/rabbitmq-server/src/rabbit_basic.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_basic.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_basic). @@ -20,8 +20,8 @@ -export([publish/4, publish/5, publish/1, message/3, message/4, properties/1, prepend_table_header/3, - extract_headers/1, map_headers/2, delivery/4, header_routes/1, - parse_expiration/1, header/2, header/3]). + extract_headers/1, extract_timestamp/1, map_headers/2, delivery/4, + header_routes/1, parse_expiration/1, header/2, header/3]). -export([build_content/2, from_content/1, msg_size/1, maybe_gc_large_msg/1]). %%---------------------------------------------------------------------------- @@ -249,6 +249,11 @@ extract_headers(Content) -> rabbit_binary_parser:ensure_content_decoded(Content), Headers. +extract_timestamp(Content) -> + #content{properties = #'P_basic'{timestamp = Timestamp}} = + rabbit_binary_parser:ensure_content_decoded(Content), + Timestamp. + map_headers(F, Content) -> Content1 = rabbit_binary_parser:ensure_content_decoded(Content), #content{properties = #'P_basic'{headers = Headers} = Props} = Content1, diff --git a/rabbitmq-server/src/rabbit_binary_generator.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_binary_generator.erl similarity index 89% rename from rabbitmq-server/src/rabbit_binary_generator.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_binary_generator.erl index 34f2d60..a2a80d7 100644 --- a/rabbitmq-server/src/rabbit_binary_generator.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_binary_generator.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_binary_generator). @@ -119,20 +119,23 @@ create_frame(TypeInt, ChannelInt, Payload) -> table_field_to_binary({FName, T, V}) -> [short_string_to_binary(FName) | field_value_to_binary(T, V)]. -field_value_to_binary(longstr, V) -> [$S | long_string_to_binary(V)]; -field_value_to_binary(signedint, V) -> [$I, <>]; -field_value_to_binary(decimal, V) -> {Before, After} = V, +field_value_to_binary(longstr, V) -> [$S | long_string_to_binary(V)]; +field_value_to_binary(signedint, V) -> [$I, <>]; +field_value_to_binary(decimal, V) -> {Before, After} = V, [$D, Before, <>]; -field_value_to_binary(timestamp, V) -> [$T, <>]; -field_value_to_binary(table, V) -> [$F | table_to_binary(V)]; -field_value_to_binary(array, V) -> [$A | array_to_binary(V)]; -field_value_to_binary(byte, V) -> [$b, <>]; -field_value_to_binary(double, V) -> [$d, <>]; -field_value_to_binary(float, V) -> [$f, <>]; -field_value_to_binary(long, V) -> [$l, <>]; -field_value_to_binary(short, V) -> [$s, <>]; -field_value_to_binary(bool, V) -> [$t, if V -> 1; true -> 0 end]; -field_value_to_binary(binary, V) -> [$x | long_string_to_binary(V)]; +field_value_to_binary(timestamp, V) -> [$T, <>]; +field_value_to_binary(table, V) -> [$F | table_to_binary(V)]; +field_value_to_binary(array, V) -> [$A | array_to_binary(V)]; +field_value_to_binary(byte, V) -> [$b, <>]; +field_value_to_binary(double, V) -> [$d, <>]; +field_value_to_binary(float, V) -> [$f, <>]; +field_value_to_binary(long, V) -> [$l, <>]; +field_value_to_binary(short, V) -> [$s, <>]; +field_value_to_binary(bool, V) -> [$t, if V -> 1; true -> 0 end]; +field_value_to_binary(binary, V) -> [$x | long_string_to_binary(V)]; +field_value_to_binary(unsignedbyte, V) -> [$B, <>]; +field_value_to_binary(unsignedshort, V) -> [$u, <>]; +field_value_to_binary(unsignedint, V) -> [$i, <>]; field_value_to_binary(void, _V) -> [$V]. table_to_binary(Table) when is_list(Table) -> diff --git a/rabbitmq-server/src/rabbit_binary_parser.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_binary_parser.erl similarity index 93% rename from rabbitmq-server/src/rabbit_binary_parser.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_binary_parser.erl index 8b3bf3e..db8aca9 100644 --- a/rabbitmq-server/src/rabbit_binary_parser.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_binary_parser.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_binary_parser). @@ -53,17 +53,25 @@ parse_table(<>) -> [{NameString, longstr, Value} | parse_table(Rest)]; -?SIMPLE_PARSE_TABLE($I, Value:32/signed, signedint); ?SIMPLE_PARSE_TABLE($T, Value:64/unsigned, timestamp); parse_table(<<>>) -> []; ?SIMPLE_PARSE_TABLE($b, Value:8/signed, byte); +?SIMPLE_PARSE_TABLE($B, Value:8/unsigned, unsignedbyte); + +?SIMPLE_PARSE_TABLE($s, Value:16/signed, short); +?SIMPLE_PARSE_TABLE($u, Value:16/unsigned, unsignedshort); + +?SIMPLE_PARSE_TABLE($I, Value:32/signed, signedint); +?SIMPLE_PARSE_TABLE($i, Value:32/unsigned, unsignedint); + ?SIMPLE_PARSE_TABLE($d, Value:64/float, double); ?SIMPLE_PARSE_TABLE($f, Value:32/float, float); + ?SIMPLE_PARSE_TABLE($l, Value:64/signed, long); -?SIMPLE_PARSE_TABLE($s, Value:16/signed, short); + parse_table(<>) -> @@ -96,17 +104,26 @@ parse_table(<>) -> [{longstr, Value} | parse_array(Rest)]; -?SIMPLE_PARSE_ARRAY($I, Value:32/signed, signedint); ?SIMPLE_PARSE_ARRAY($T, Value:64/unsigned, timestamp); parse_array(<<>>) -> []; ?SIMPLE_PARSE_ARRAY($b, Value:8/signed, byte); +?SIMPLE_PARSE_ARRAY($B, Value:8/unsigned, unsignedbyte); + +?SIMPLE_PARSE_ARRAY($s, Value:16/signed, short); +?SIMPLE_PARSE_ARRAY($u, Value:16/unsigned, unsignedshort); + +?SIMPLE_PARSE_ARRAY($I, Value:32/signed, signedint); +?SIMPLE_PARSE_ARRAY($i, Value:32/unsigned, unsignedint); + ?SIMPLE_PARSE_ARRAY($d, Value:64/float, double); ?SIMPLE_PARSE_ARRAY($f, Value:32/float, float); + ?SIMPLE_PARSE_ARRAY($l, Value:64/signed, long); -?SIMPLE_PARSE_ARRAY($s, Value:16/signed, short); + + parse_array(<<$t, Value:8/unsigned, Rest/binary>>) -> [{bool, (Value /= 0)} | parse_array(Rest)]; diff --git a/rabbitmq-server/src/rabbit_channel.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_channel.erl similarity index 93% rename from rabbitmq-server/src/rabbit_channel.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_channel.erl index b23a841..13520d9 100644 --- a/rabbitmq-server/src/rabbit_channel.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_channel.erl @@ -11,10 +11,42 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_channel). + +%% rabbit_channel processes represent an AMQP 0-9-1 channels. +%% +%% Connections parse protocol frames coming from clients and +%% dispatch them to channel processes. +%% Channels are responsible for implementing the logic behind +%% the various protocol methods, involving other processes as +%% needed: +%% +%% * Routing messages (using functions in various exchange type +%% modules) to queue processes. +%% * Managing queues, exchanges, and bindings. +%% * Keeping track of consumers +%% * Keeping track of unacknowledged deliveries to consumers +%% * Keeping track of publisher confirms +%% * Keeping track of mandatory message routing confirmations +%% and returns +%% * Transaction management +%% * Authorisation (enforcing permissions) +%% * Publishing trace events if tracing is enabled +%% +%% Every channel has a number of dependent processes: +%% +%% * A writer which is responsible for sending frames to clients. +%% * A limiter which controls how many messages can be delivered +%% to consumers according to active QoS prefetch and internal +%% flow control logic. +%% +%% Channels are also aware of their connection's queue collector. +%% When a queue is declared as exclusive on a channel, the channel +%% will notify queue collector of that queue. + -include("rabbit_framing.hrl"). -include("rabbit.hrl"). @@ -23,7 +55,8 @@ -export([start_link/11, do/2, do/3, do_flow/3, flush/1, shutdown/1]). -export([send_command/2, deliver/4, deliver_reply/2, send_credit_reply/2, send_drained/2]). --export([list/0, info_keys/0, info/1, info/2, info_all/0, info_all/1]). +-export([list/0, info_keys/0, info/1, info/2, info_all/0, info_all/1, + info_all/3]). -export([refresh_config_local/0, ready_for_close/1]). -export([force_event_refresh/1]). @@ -32,17 +65,87 @@ prioritise_cast/3, prioritise_info/3, format_message_queue/2]). %% Internal -export([list_local/0, deliver_reply_local/3]). +-export([get_vhost/1, get_user/1]). + +-record(ch, { + %% starting | running | flow | closing + state, + %% same as reader's protocol. Used when instantiating + %% (protocol) exceptions. + protocol, + %% channel number + channel, + %% reader process + reader_pid, + %% writer process + writer_pid, + %% + conn_pid, + %% same as reader's name, see #v1.name + %% in rabbit_reader + conn_name, + %% limiter pid, see rabbit_limiter + limiter, + %% none | {Msgs, Acks} | committing | failed | + tx, + %% (consumer) delivery tag sequence + next_tag, + %% messages pending consumer acknowledgement + unacked_message_q, + %% same as #v1.user in the reader, used in + %% authorisation checks + user, + %% same as #v1.user in the reader + virtual_host, + %% when queue.bind's queue field is empty, + %% this name will be used instead + most_recently_declared_queue, + %% a dictionary of queue pid to queue name + queue_names, + %% queue processes are monitored to update + %% queue names + queue_monitors, + %% a dictionary of consumer tags to + %% consumer details: #amqqueue record, acknowledgement mode, + %% consumer exclusivity, etc + consumer_mapping, + %% a dictionary of queue pids to consumer tag lists + queue_consumers, + %% a set of pids of queues that have unacknowledged + %% deliveries + delivering_queues, + %% when a queue is declared as exclusive, queue + %% collector must be notified. + %% see rabbit_queue_collector for more info. + queue_collector_pid, + %% timer used to emit statistics + stats_timer, + %% are publisher confirms enabled for this channel? + confirm_enabled, + %% publisher confirm delivery tag sequence + publish_seqno, + %% a dtree used to track unconfirmed + %% (to publishers) messages + unconfirmed, + %% a list of tags for published messages that were + %% delivered but are yet to be confirmed to the client + confirmed, + %% a dtree used to track oustanding notifications + %% for messages published as mandatory + mandatory, + %% same as capabilities in the reader + capabilities, + %% tracing exchange resource if tracing is enabled, + %% 'none' otherwise + trace_state, + consumer_prefetch, + %% used by "one shot RPC" (amq. + reply_consumer, + %% flow | noflow, see rabbitmq-server#114 + delivery_flow, + interceptor_state +}). --record(ch, {state, protocol, channel, reader_pid, writer_pid, conn_pid, - conn_name, limiter, tx, next_tag, unacked_message_q, user, - virtual_host, most_recently_declared_queue, - queue_names, queue_monitors, consumer_mapping, - queue_consumers, delivering_queues, - queue_collector_pid, stats_timer, confirm_enabled, publish_seqno, - unconfirmed, confirmed, mandatory, capabilities, trace_state, - consumer_prefetch, reply_consumer, - %% flow | noflow, see rabbitmq-server#114 - delivery_flow}). -define(MAX_PERMISSION_CACHE_SIZE, 12). @@ -83,6 +186,10 @@ -type(channel_number() :: non_neg_integer()). +-export_type([channel/0]). + +-type(channel() :: #ch{}). + -spec(start_link/11 :: (channel_number(), pid(), pid(), pid(), string(), rabbit_types:protocol(), rabbit_types:user(), rabbit_types:vhost(), @@ -112,6 +219,7 @@ -spec(info/2 :: (pid(), rabbit_types:info_keys()) -> rabbit_types:infos()). -spec(info_all/0 :: () -> [rabbit_types:infos()]). -spec(info_all/1 :: (rabbit_types:info_keys()) -> [rabbit_types:infos()]). +-spec(info_all/3 :: (rabbit_types:info_keys(), reference(), pid()) -> 'ok'). -spec(refresh_config_local/0 :: () -> 'ok'). -spec(ready_for_close/1 :: (pid()) -> 'ok'). -spec(force_event_refresh/1 :: (reference()) -> 'ok'). @@ -219,6 +327,10 @@ info_all() -> info_all(Items) -> rabbit_misc:filter_exit_map(fun (C) -> info(C, Items) end, list()). +info_all(Items, Ref, AggregatorPid) -> + rabbit_control_misc:emitting_map_with_exit_handler( + AggregatorPid, Ref, fun(C) -> info(C, Items) end, list()). + refresh_config_local() -> rabbit_misc:upmap( fun (C) -> gen_server2:call(C, refresh_config, infinity) end, @@ -272,12 +384,16 @@ init([Channel, ReaderPid, WriterPid, ConnPid, ConnName, Protocol, User, VHost, trace_state = rabbit_trace:init(VHost), consumer_prefetch = 0, reply_consumer = none, - delivery_flow = Flow}, - State1 = rabbit_event:init_stats_timer(State, #ch.stats_timer), - rabbit_event:notify(channel_created, infos(?CREATION_EVENT_KEYS, State1)), - rabbit_event:if_enabled(State1, #ch.stats_timer, - fun() -> emit_stats(State1) end), - {ok, State1, hibernate, + delivery_flow = Flow, + interceptor_state = undefined}, + State1 = State#ch{ + interceptor_state = rabbit_channel_interceptor:init(State)}, + State2 = rabbit_event:init_stats_timer(State1, #ch.stats_timer), + rabbit_event:notify(channel_created, infos(?CREATION_EVENT_KEYS, State2)), + rabbit_event:if_enabled(State2, #ch.stats_timer, + fun() -> emit_stats(State2) end), + put(channel_operation_timeout, ?CHANNEL_OPERATION_TIMEOUT), + {ok, State2, hibernate, {backoff, ?HIBERNATE_AFTER_MIN, ?HIBERNATE_AFTER_MIN, ?DESIRED_HIBERNATE}}. prioritise_call(Msg, _From, _Len, _State) -> @@ -326,8 +442,8 @@ handle_call(_Request, _From, State) -> noreply(State). handle_cast({method, Method, Content, Flow}, - State = #ch{reader_pid = Reader, - virtual_host = VHost}) -> + State = #ch{reader_pid = Reader, + interceptor_state = IState}) -> case Flow of %% We are going to process a message from the rabbit_reader %% process, so here we ack it. In this case we are accessing @@ -335,9 +451,10 @@ handle_cast({method, Method, Content, Flow}, flow -> credit_flow:ack(Reader); noflow -> ok end, - try handle_method(rabbit_channel_interceptor:intercept_method( - expand_shortcuts(Method, State), VHost), - Content, State) of + + try handle_method(rabbit_channel_interceptor:intercept_in( + expand_shortcuts(Method, State), Content, IState), + State) of {reply, Reply, NewState} -> ok = send(Reply, NewState), noreply(NewState); @@ -484,7 +601,10 @@ handle_pre_hibernate(State) -> ok = clear_permission_cache(), rabbit_event:if_enabled( State, #ch.stats_timer, - fun () -> emit_stats(State, [{idle_since, now()}]) end), + fun () -> emit_stats(State, + [{idle_since, + time_compat:os_system_time(milli_seconds)}]) + end), {hibernate, rabbit_event:stop_stats_timer(State, #ch.stats_timer)}. terminate(Reason, State) -> @@ -533,6 +653,11 @@ send(_Command, #ch{state = closing}) -> send(Command, #ch{writer_pid = WriterPid}) -> ok = rabbit_writer:send_command(WriterPid, Command). +format_soft_error(#amqp_error{name = N, explanation = E, method = M}) -> + io_lib:format("operation ~s caused a channel exception ~s: ~p", [M, N, E]); +format_soft_error(Reason) -> + Reason. + handle_exception(Reason, State = #ch{protocol = Protocol, channel = Channel, writer_pid = WriterPid, @@ -546,9 +671,9 @@ handle_exception(Reason, State = #ch{protocol = Protocol, case rabbit_binary_generator:map_exception(Channel, Reason, Protocol) of {Channel, CloseMethod} -> log(error, "Channel error on connection ~p (~s, vhost: '~s'," - " user: '~s'), channel ~p:~n~p~n", - [ConnPid, ConnName, VHost, User#user.username, - Channel, Reason]), + " user: '~s'), channel ~p:~n~s~n", + [ConnPid, ConnName, VHost, User#user.username, + Channel, format_soft_error(Reason)]), ok = rabbit_writer:send_command(WriterPid, CloseMethod), {noreply, State1}; {0, _} -> @@ -730,6 +855,9 @@ record_confirms([], State) -> record_confirms(MXs, State = #ch{confirmed = C}) -> State#ch{confirmed = [MXs | C]}. +handle_method({Method, Content}, State) -> + handle_method(Method, Content, State). + handle_method(#'channel.open'{}, _, State = #ch{state = starting}) -> %% Don't leave "starting" as the state for 5s. TODO is this TRTTD? State1 = State#ch{state = running}, @@ -739,7 +867,7 @@ handle_method(#'channel.open'{}, _, State = #ch{state = starting}) -> handle_method(#'channel.open'{}, _, _State) -> rabbit_misc:protocol_error( - command_invalid, "second 'channel.open' seen", []); + channel_error, "second 'channel.open' seen", []); handle_method(_Method, _, #ch{state = starting}) -> rabbit_misc:protocol_error(channel_error, "expected 'channel.open'", []); @@ -756,7 +884,7 @@ handle_method(_Method, _, State = #ch{state = closing}) -> {noreply, State}; handle_method(#'channel.close'{}, _, State = #ch{reader_pid = ReaderPid}) -> - {ok, State1} = notify_queues(State), + {_Result, State1} = notify_queues(State), %% We issue the channel.close_ok response after a handshake with %% the reader, the other half of which is ready_for_close. That %% way the reader forgets about the channel before we send the @@ -1643,7 +1771,9 @@ notify_queues(State = #ch{consumer_mapping = Consumers, delivering_queues = DQ }) -> QPids = sets:to_list( sets:union(sets:from_list(consumer_queues(Consumers)), DQ)), - {rabbit_amqqueue:notify_down_all(QPids, self()), State#ch{state = closing}}. + {rabbit_amqqueue:notify_down_all(QPids, self(), + get(channel_operation_timeout)), + State#ch{state = closing}}. foreach_per_queue(_F, []) -> ok; @@ -1847,7 +1977,7 @@ i(messages_uncommitted, #ch{tx = {Msgs, _Acks}}) -> queue:len(Msgs); i(messages_uncommitted, #ch{}) -> 0; i(acks_uncommitted, #ch{tx = {_Msgs, Acks}}) -> ack_len(Acks); i(acks_uncommitted, #ch{}) -> 0; -i(state, #ch{state = running}) -> credit_flow:state(); +i(state, #ch{state = running}) -> credit_flow:state(); i(state, #ch{state = State}) -> State; i(prefetch_count, #ch{consumer_prefetch = C}) -> C; i(global_prefetch_count, #ch{limiter = Limiter}) -> @@ -1895,3 +2025,7 @@ erase_queue_stats(QName) -> [erase({queue_exchange_stats, QX}) || {{queue_exchange_stats, QX = {QName0, _}}, _} <- get(), QName0 =:= QName]. + +get_vhost(#ch{virtual_host = VHost}) -> VHost. + +get_user(#ch{user = User}) -> User. diff --git a/rabbitmq-server/deps/rabbit_common/src/rabbit_channel_interceptor.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_channel_interceptor.erl new file mode 100644 index 0000000..9793459 --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_channel_interceptor.erl @@ -0,0 +1,117 @@ +%% The contents of this file are subject to the Mozilla Public License +%% Version 1.1 (the "License"); you may not use this file except in +%% compliance with the License. You may obtain a copy of the License +%% at http://www.mozilla.org/MPL/ +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and +%% limitations under the License. +%% +%% The Original Code is RabbitMQ. +%% +%% The Initial Developer of the Original Code is GoPivotal, Inc. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. +%% + +-module(rabbit_channel_interceptor). + +-include("rabbit_framing.hrl"). +-include("rabbit.hrl"). + +-export([init/1, intercept_in/3]). + +-ifdef(use_specs). + +-type(method_name() :: rabbit_framing:amqp_method_name()). +-type(original_method() :: rabbit_framing:amqp_method_record()). +-type(processed_method() :: rabbit_framing:amqp_method_record()). +-type(original_content() :: rabbit_types:maybe(rabbit_types:content())). +-type(processed_content() :: rabbit_types:maybe(rabbit_types:content())). +-type(interceptor_state() :: term()). + +-callback description() -> [proplists:property()]. +%% Derive some initial state from the channel. This will be passed back +%% as the third argument of intercept/3. +-callback init(rabbit_channel:channel()) -> interceptor_state(). +-callback intercept(original_method(), original_content(), + interceptor_state()) -> + {processed_method(), processed_content()} | + rabbit_misc:channel_or_connection_exit(). +-callback applies_to() -> list(method_name()). + +-else. + +-export([behaviour_info/1]). + +behaviour_info(callbacks) -> + [{description, 0}, {init, 1}, {intercept, 3}, {applies_to, 0}]; +behaviour_info(_Other) -> + undefined. + +-endif. + +init(Ch) -> + Mods = [M || {_, M} <- rabbit_registry:lookup_all(channel_interceptor)], + check_no_overlap(Mods), + [{Mod, Mod:init(Ch)} || Mod <- Mods]. + +check_no_overlap(Mods) -> + check_no_overlap1([sets:from_list(Mod:applies_to()) || Mod <- Mods]). + +%% Check no non-empty pairwise intersection in a list of sets +check_no_overlap1(Sets) -> + lists:foldl(fun(Set, Union) -> + Is = sets:intersection(Set, Union), + case sets:size(Is) of + 0 -> ok; + _ -> + internal_error("Interceptor: more than one " + "module handles ~p~n", [Is]) + end, + sets:union(Set, Union) + end, + sets:new(), + Sets), + ok. + +intercept_in(M, C, Mods) -> + lists:foldl(fun({Mod, ModState}, {M1, C1}) -> + call_module(Mod, ModState, M1, C1) + end, + {M, C}, + Mods). + +call_module(Mod, St, M, C) -> + % this little dance is because Mod might be unloaded at any point + case (catch {ok, Mod:intercept(M, C, St)}) of + {ok, R} -> validate_response(Mod, M, C, R); + {'EXIT', {undef, [{Mod, intercept, _, _} | _]}} -> {M, C} + end. + +validate_response(Mod, M1, C1, R = {M2, C2}) -> + case {validate_method(M1, M2), validate_content(C1, C2)} of + {true, true} -> R; + {false, _} -> + internal_error("Interceptor: ~p expected to return " + "method: ~p but returned: ~p", + [Mod, rabbit_misc:method_record_type(M1), + rabbit_misc:method_record_type(M2)]); + {_, false} -> + internal_error("Interceptor: ~p expected to return " + "content iff content is provided but " + "content in = ~p; content out = ~p", + [Mod, C1, C2]) + end. + +validate_method(M, M2) -> + rabbit_misc:method_record_type(M) =:= rabbit_misc:method_record_type(M2). + +validate_content(none, none) -> true; +validate_content(#content{}, #content{}) -> true; +validate_content(_, _) -> false. + +%% keep dialyzer happy +-spec internal_error(string(), [any()]) -> no_return(). +internal_error(Format, Args) -> + rabbit_misc:protocol_error(internal_error, Format, Args). diff --git a/rabbitmq-server/src/rabbit_command_assembler.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_command_assembler.erl similarity index 98% rename from rabbitmq-server/src/rabbit_command_assembler.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_command_assembler.erl index f93b85b..49601db 100644 --- a/rabbitmq-server/src/rabbit_command_assembler.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_command_assembler.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_command_assembler). diff --git a/rabbitmq-server/deps/rabbit_common/src/rabbit_common.app.src b/rabbitmq-server/deps/rabbit_common/src/rabbit_common.app.src new file mode 100644 index 0000000..758e314 --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_common.app.src @@ -0,0 +1,13 @@ +% vim:ft=erlang: + +{application, rabbit_common, [ + {description, ""}, + {vsn, "3.6.1"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. diff --git a/rabbitmq-server/deps/rabbit_common/src/rabbit_control_misc.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_control_misc.erl new file mode 100644 index 0000000..2e274e8 --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_control_misc.erl @@ -0,0 +1,96 @@ +%% The contents of this file are subject to the Mozilla Public License +%% Version 1.1 (the "License"); you may not use this file except in +%% compliance with the License. You may obtain a copy of the License +%% at http://www.mozilla.org/MPL/ +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and +%% limitations under the License. +%% +%% The Original Code is RabbitMQ. +%% +%% The Initial Developer of the Original Code is GoPivotal, Inc. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. +%% + +-module(rabbit_control_misc). + +-export([emitting_map/4, emitting_map/5, emitting_map_with_exit_handler/4, + emitting_map_with_exit_handler/5, wait_for_info_messages/5, + print_cmd_result/2]). + +-ifdef(use_specs). + +-spec(emitting_map/4 :: (pid(), reference(), fun(), list()) -> 'ok'). +-spec(emitting_map/5 :: (pid(), reference(), fun(), list(), atom()) -> 'ok'). +-spec(emitting_map_with_exit_handler/4 :: + (pid(), reference(), fun(), list()) -> 'ok'). +-spec(emitting_map_with_exit_handler/5 :: + (pid(), reference(), fun(), list(), atom()) -> 'ok'). +-spec(print_cmd_result/2 :: (atom(), term()) -> 'ok'). + +-endif. + +emitting_map(AggregatorPid, Ref, Fun, List) -> + emitting_map(AggregatorPid, Ref, Fun, List, continue), + AggregatorPid ! {Ref, finished}, + ok. + +emitting_map(AggregatorPid, Ref, Fun, List, continue) -> + _ = emitting_map0(AggregatorPid, Ref, Fun, List, fun step/4), + ok. + +emitting_map_with_exit_handler(AggregatorPid, Ref, Fun, List) -> + emitting_map_with_exit_handler(AggregatorPid, Ref, Fun, List, continue), + AggregatorPid ! {Ref, finished}, + ok. + +emitting_map_with_exit_handler(AggregatorPid, Ref, Fun, List, continue) -> + _ = emitting_map0(AggregatorPid, Ref, Fun, List, fun step_with_exit_handler/4), + ok. + +emitting_map0(AggregatorPid, Ref, Fun, List, StepFun) -> + [StepFun(AggregatorPid, Ref, Fun, Item) || Item <- List]. + +step(AggregatorPid, Ref, Fun, Item) -> + AggregatorPid ! {Ref, Fun(Item), continue}, + ok. + +step_with_exit_handler(AggregatorPid, Ref, Fun, Item) -> + Noop = make_ref(), + case rabbit_misc:with_exit_handler( + fun () -> Noop end, + fun () -> Fun(Item) end) of + Noop -> + ok; + Res -> + AggregatorPid ! {Ref, Res, continue}, + ok + end. + +wait_for_info_messages(Pid, Ref, ArgAtoms, DisplayFun, Timeout) -> + _ = notify_if_timeout(Pid, Ref, Timeout), + wait_for_info_messages(Ref, ArgAtoms, DisplayFun). + +wait_for_info_messages(Ref, InfoItemKeys, DisplayFun) when is_reference(Ref) -> + receive + {Ref, finished} -> + ok; + {Ref, {timeout, T}} -> + exit({error, {timeout, (T / 1000)}}); + {Ref, []} -> + wait_for_info_messages(Ref, InfoItemKeys, DisplayFun); + {Ref, Result, continue} -> + DisplayFun(Result, InfoItemKeys), + wait_for_info_messages(Ref, InfoItemKeys, DisplayFun); + {error, Error} -> + Error; + _ -> + wait_for_info_messages(Ref, InfoItemKeys, DisplayFun) + end. + +notify_if_timeout(Pid, Ref, Timeout) -> + timer:send_after(Timeout, Pid, {Ref, {timeout, Timeout}}). + +print_cmd_result(authenticate_user, _Result) -> io:format("Success~n"). diff --git a/rabbitmq-server/deps/rabbit_common/src/rabbit_data_coercion.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_data_coercion.erl new file mode 100644 index 0000000..22b0dbc --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_data_coercion.erl @@ -0,0 +1,22 @@ +%% The contents of this file are subject to the Mozilla Public License +%% Version 1.1 (the "License"); you may not use this file except in +%% compliance with the License. You may obtain a copy of the License +%% at http://www.mozilla.org/MPL/ +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and +%% limitations under the License. +%% +%% The Original Code is RabbitMQ. +%% +%% The Initial Developer of the Original Code is GoPivotal, Inc. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. +%% + +-module(rabbit_data_coercion). + +-export([to_binary/1]). + +to_binary(Val) when is_list(Val) -> list_to_binary(Val); +to_binary(Val) -> Val. diff --git a/rabbitmq-server/src/rabbit_event.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_event.erl similarity index 96% rename from rabbitmq-server/src/rabbit_event.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_event.erl index 13bf6bc..a069973 100644 --- a/rabbitmq-server/src/rabbit_event.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_event.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_event). @@ -37,8 +37,7 @@ -type(event_type() :: atom()). -type(event_props() :: term()). --type(event_timestamp() :: - {non_neg_integer(), non_neg_integer(), non_neg_integer()}). +-type(event_timestamp() :: non_neg_integer()). -type(event() :: #event { type :: event_type(), props :: event_props(), @@ -160,5 +159,5 @@ event_cons(Type, Props, Ref) -> #event{type = Type, props = Props, reference = Ref, - timestamp = os:timestamp()}. + timestamp = time_compat:os_system_time(milli_seconds)}. diff --git a/rabbitmq-server/src/rabbit_exchange_decorator.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_exchange_decorator.erl similarity index 98% rename from rabbitmq-server/src/rabbit_exchange_decorator.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_exchange_decorator.erl index 7c5bfdf..bf57b2a 100644 --- a/rabbitmq-server/src/rabbit_exchange_decorator.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_exchange_decorator.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_exchange_decorator). diff --git a/rabbitmq-server/src/rabbit_exchange_type.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_exchange_type.erl similarity index 97% rename from rabbitmq-server/src/rabbit_exchange_type.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_exchange_type.erl index 92c1de6..0b7fda6 100644 --- a/rabbitmq-server/src/rabbit_exchange_type.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_exchange_type.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_exchange_type). diff --git a/rabbitmq-server/src/rabbit_heartbeat.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_heartbeat.erl similarity index 99% rename from rabbitmq-server/src/rabbit_heartbeat.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_heartbeat.erl index 9930767..fad9de2 100644 --- a/rabbitmq-server/src/rabbit_heartbeat.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_heartbeat.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_heartbeat). diff --git a/rabbitmq-server/src/rabbit_misc.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_misc.erl similarity index 93% rename from rabbitmq-server/src/rabbit_misc.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_misc.erl index ed5b38e..5267576 100644 --- a/rabbitmq-server/src/rabbit_misc.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_misc.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_misc). @@ -51,11 +51,10 @@ -export([dict_cons/3, orddict_cons/3, gb_trees_cons/3]). -export([gb_trees_fold/3, gb_trees_foreach/2]). -export([all_module_attributes/1, build_acyclic_graph/3]). --export([now_ms/0]). -export([const/1]). -export([ntoa/1, ntoab/1]). -export([is_process_alive/1]). --export([pget/2, pget/3, pget_or_die/2, pset/3]). +-export([pget/2, pget/3, pget_or_die/2, pmerge/3, pset/3, plmerge/2]). -export([format_message_queue/2]). -export([append_rpc_all_nodes/4]). -export([os_cmd/1]). @@ -66,13 +65,14 @@ -export([json_encode/1, json_decode/1, json_to_term/1, term_to_json/1]). -export([check_expiry/1]). -export([base64url/1]). --export([interval_operation/4]). +-export([interval_operation/5]). -export([ensure_timer/4, stop_timer/2, send_after/3, cancel_timer/1]). -export([get_parent/0]). -export([store_proc_name/1, store_proc_name/2]). -export([moving_average/4]). --export([now_to_ms/1]). -export([get_env/3]). +-export([get_channel_operation_timeout/0]). +-export([random/1]). %% Horrible macro to use in guards -define(IS_BENIGN_EXIT(R), @@ -217,12 +217,11 @@ (atom()) -> [{atom(), atom(), [term()]}]). -spec(build_acyclic_graph/3 :: (graph_vertex_fun(), graph_edge_fun(), [{atom(), [term()]}]) - -> rabbit_types:ok_or_error2(digraph:digraph(), + -> rabbit_types:ok_or_error2(digraph:graph(), {'vertex', 'duplicate', digraph:vertex()} | {'edge', ({bad_vertex, digraph:vertex()} | {bad_edge, [digraph:vertex()]}), digraph:vertex(), digraph:vertex()})). --spec(now_ms/0 :: () -> non_neg_integer()). -spec(const/1 :: (A) -> thunk(A)). -spec(ntoa/1 :: (inet:ip_address()) -> string()). -spec(ntoab/1 :: (inet:ip_address()) -> string()). @@ -230,7 +229,9 @@ -spec(pget/2 :: (term(), [term()]) -> term()). -spec(pget/3 :: (term(), [term()], term()) -> term()). -spec(pget_or_die/2 :: (term(), [term()]) -> term() | no_return()). --spec(pset/3 :: (term(), term(), [term()]) -> term()). +-spec(pmerge/3 :: (term(), term(), [term()]) -> [term()]). +-spec(plmerge/2 :: ([term()], [term()]) -> [term()]). +-spec(pset/3 :: (term(), term(), [term()]) -> [term()]). -spec(format_message_queue/2 :: (any(), priority_queue:q()) -> term()). -spec(append_rpc_all_nodes/4 :: ([node()], atom(), atom(), [any()]) -> [any()]). -spec(os_cmd/1 :: (string()) -> string()). @@ -247,8 +248,8 @@ -spec(term_to_json/1 :: (any()) -> any()). -spec(check_expiry/1 :: (integer()) -> rabbit_types:ok_or_error(any())). -spec(base64url/1 :: (binary()) -> string()). --spec(interval_operation/4 :: - ({atom(), atom(), any()}, float(), non_neg_integer(), non_neg_integer()) +-spec(interval_operation/5 :: + ({atom(), atom(), any()}, float(), non_neg_integer(), non_neg_integer(), non_neg_integer()) -> {any(), non_neg_integer()}). -spec(ensure_timer/4 :: (A, non_neg_integer(), non_neg_integer(), any()) -> A). -spec(stop_timer/2 :: (A, non_neg_integer()) -> A). @@ -259,10 +260,10 @@ -spec(store_proc_name/1 :: (rabbit_types:proc_type_and_name()) -> ok). -spec(moving_average/4 :: (float(), float(), float(), float() | 'undefined') -> float()). --spec(now_to_ms/1 :: ({non_neg_integer(), - non_neg_integer(), - non_neg_integer()}) -> pos_integer()). -spec(get_env/3 :: (atom(), atom(), term()) -> term()). +-spec(get_channel_operation_timeout/0 :: () -> non_neg_integer()). +-spec(random/1 :: (non_neg_integer()) -> non_neg_integer()). + -endif. %%---------------------------------------------------------------------------- @@ -310,16 +311,23 @@ absent(#amqqueue{name = QueueName, pid = QPid, durable = true}, nodedown) -> absent(#amqqueue{name = QueueName}, crashed) -> protocol_error(not_found, - "~s has crashed and failed to restart", [rs(QueueName)]). + "~s has crashed and failed to restart", [rs(QueueName)]); -type_class(byte) -> int; -type_class(short) -> int; -type_class(signedint) -> int; -type_class(long) -> int; -type_class(decimal) -> int; -type_class(float) -> float; -type_class(double) -> float; -type_class(Other) -> Other. +absent(#amqqueue{name = QueueName}, timeout) -> + protocol_error(not_found, + "failed to perform operation on ~s due to timeout", [rs(QueueName)]). + +type_class(byte) -> int; +type_class(short) -> int; +type_class(signedint) -> int; +type_class(long) -> int; +type_class(decimal) -> int; +type_class(unsignedbyte) -> int; +type_class(unsignedshort) -> int; +type_class(unsignedint) -> int; +type_class(float) -> float; +type_class(double) -> float; +type_class(Other) -> Other. assert_args_equivalence(Orig, New, Name, Keys) -> [assert_args_equivalence1(Orig, New, Name, Key) || Key <- Keys], @@ -652,18 +660,7 @@ format_many(List) -> lists:flatten([io_lib:format(F ++ "~n", A) || {F, A} <- List]). format_stderr(Fmt, Args) -> - case os:type() of - {unix, _} -> - Port = open_port({fd, 0, 2}, [out]), - port_command(Port, io_lib:format(Fmt, Args)), - port_close(Port); - {win32, _} -> - %% stderr on Windows is buffered and I can't figure out a - %% way to trigger a fflush(stderr) in Erlang. So rather - %% than risk losing output we write to stdout instead, - %% which appears to be unbuffered. - io:format(Fmt, Args) - end, + io:format(standard_error, Fmt, Args), ok. unfold(Fun, Init) -> @@ -804,9 +801,6 @@ gb_trees_fold1(Fun, Acc, {Key, Val, It}) -> gb_trees_foreach(Fun, Tree) -> gb_trees_fold(fun (Key, Val, Acc) -> Fun(Key, Val), Acc end, ok, Tree). -now_ms() -> - timer:now_diff(now(), {0,0,0}) div 1000. - module_attributes(Module) -> case catch Module:module_info(attributes) of {'EXIT', {undef, [{Module, module_info, _} | _]}} -> @@ -882,8 +876,20 @@ is_process_alive(Pid) -> lists:member(Node, [node() | nodes()]) andalso rpc:call(Node, erlang, is_process_alive, [Pid]) =:= true. -pget(K, P) -> proplists:get_value(K, P). -pget(K, P, D) -> proplists:get_value(K, P, D). +pget(K, P) -> + case lists:keyfind(K, 1, P) of + {K, V} -> + V; + _ -> + undefined + end. +pget(K, P, D) -> + case lists:keyfind(K, 1, P) of + {K, V} -> + V; + _ -> + D + end. pget_or_die(K, P) -> case proplists:get_value(K, P) of @@ -891,6 +897,21 @@ pget_or_die(K, P) -> V -> V end. +%% property merge +pmerge(Key, Val, List) -> + case proplists:is_defined(Key, List) of + true -> List; + _ -> [{Key, Val} | List] + end. + +%% proplists merge +plmerge(P1, P2) -> + dict:to_list(dict:merge(fun(_, V, _) -> + V + end, + dict:from_list(P1), + dict:from_list(P2))). + pset(Key, Value, List) -> [{Key, Value} | proplists:delete(Key, List)]. format_message_queue(_Opt, MQ) -> @@ -1038,9 +1059,6 @@ term_to_json(V) when is_binary(V) orelse is_number(V) orelse V =:= null orelse V =:= true orelse V =:= false -> V. -now_to_ms({Mega, Sec, Micro}) -> - (Mega * 1000000 * 1000000 + Sec * 1000000 + Micro) div 1000. - check_expiry(N) when N < 0 -> {error, {value_negative, N}}; check_expiry(_N) -> ok. @@ -1055,12 +1073,13 @@ base64url(In) -> %% want it to take more than MaxRatio of IdealInterval. So if it takes %% more then you want to run it less often. So we time how long it %% takes to run, and then suggest how long you should wait before -%% running it again. Times are in millis. -interval_operation({M, F, A}, MaxRatio, IdealInterval, LastInterval) -> +%% running it again with a user specified max interval. Times are in millis. +interval_operation({M, F, A}, MaxRatio, MaxInterval, IdealInterval, LastInterval) -> {Micros, Res} = timer:tc(M, F, A), {Res, case {Micros > 1000 * (MaxRatio * IdealInterval), Micros > 1000 * (MaxRatio * LastInterval)} of - {true, true} -> round(LastInterval * 1.5); + {true, true} -> lists:min([MaxInterval, + round(LastInterval * 1.5)]); {true, false} -> LastInterval; {false, false} -> lists:max([IdealInterval, round(LastInterval / 1.5)]) @@ -1105,6 +1124,13 @@ get_env(Application, Key, Def) -> undefined -> Def end. +get_channel_operation_timeout() -> + %% Default channel_operation_timeout set to net_ticktime + 10s to + %% give allowance for any down messages to be received first, + %% whenever it is used for cross-node calls with timeouts. + Default = (net_kernel:get_net_ticktime() + 10) * 1000, + application:get_env(rabbit, channel_operation_timeout, Default). + moving_average(_Time, _HalfLife, Next, undefined) -> Next; %% We want the Weight to decrease as Time goes up (since Weight is the @@ -1124,6 +1150,16 @@ moving_average(Time, HalfLife, Next, Current) -> Weight = math:exp(Time * math:log(0.5) / HalfLife), Next * (1 - Weight) + Current * Weight. +random(N) -> + case get(random_seed) of + undefined -> + random:seed(erlang:phash2([node()]), + time_compat:monotonic_time(), + time_compat:unique_integer()); + _ -> ok + end, + random:uniform(N). + %% ------------------------------------------------------------------------- %% Begin copypasta from gen_server2.erl diff --git a/rabbitmq-server/src/rabbit_msg_store_index.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_msg_store_index.erl similarity index 96% rename from rabbitmq-server/src/rabbit_msg_store_index.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_msg_store_index.erl index 0c7a37b..b230ca8 100644 --- a/rabbitmq-server/src/rabbit_msg_store_index.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_msg_store_index.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_msg_store_index). diff --git a/rabbitmq-server/src/rabbit_net.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_net.erl similarity index 79% rename from rabbitmq-server/src/rabbit_net.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_net.erl index 1731d48..e77f2ab 100644 --- a/rabbitmq-server/src/rabbit_net.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_net.erl @@ -11,16 +11,26 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_net). -include("rabbit.hrl"). +-ifdef(define_tls_atom_version). +%% In Erlang R16B03, tls_atom_version() is defined in ssl_internal.hrl, +%% which is not included by ssl_api.hrl. Instead of including it here, +%% we redefine it to avoid too much pollution. +-type tls_atom_version() :: sslv3 | tlsv1 | 'tlsv1.1' | 'tlsv1.2'. +-endif. + +-include_lib("ssl/src/ssl_api.hrl"). + -export([is_ssl/1, ssl_info/1, controlling_process/2, getstat/2, recv/1, sync_recv/2, async_recv/3, port_command/2, getopts/2, setopts/2, send/2, close/1, fast_close/1, sockname/1, peername/1, - peercert/1, connection_string/2, socket_ends/2, is_loopback/1]). + peercert/1, connection_string/2, socket_ends/2, is_loopback/1, + accept_ack/2]). %%--------------------------------------------------------------------------- @@ -33,14 +43,14 @@ 'send_cnt' | 'send_max' | 'send_avg' | 'send_oct' | 'send_pend'). -type(ok_val_or_error(A) :: rabbit_types:ok_or_error2(A, any())). -type(ok_or_any_error() :: rabbit_types:ok_or_error(any())). --type(socket() :: port() | #ssl_socket{}). +-type(socket() :: port() | ssl:sslsocket()). -type(opts() :: [{atom(), any()} | {raw, non_neg_integer(), non_neg_integer(), binary()}]). -type(host_or_ip() :: binary() | inet:ip_address()). -spec(is_ssl/1 :: (socket()) -> boolean()). -spec(ssl_info/1 :: (socket()) -> 'nossl' | ok_val_or_error( - {atom(), {atom(), atom(), atom()}})). + [{atom(), any()}])). -spec(controlling_process/2 :: (socket(), pid()) -> ok_or_any_error()). -spec(getstat/2 :: (socket(), [stat_option()]) @@ -78,6 +88,7 @@ -> ok_val_or_error({host_or_ip(), rabbit_networking:ip_port(), host_or_ip(), rabbit_networking:ip_port()})). -spec(is_loopback/1 :: (socket() | inet:ip_address()) -> boolean()). +-spec(accept_ack/2 :: (any(), socket()) -> ok). -endif. @@ -85,27 +96,35 @@ -define(SSL_CLOSE_TIMEOUT, 5000). --define(IS_SSL(Sock), is_record(Sock, ssl_socket)). +-define(IS_SSL(Sock), is_record(Sock, sslsocket)). is_ssl(Sock) -> ?IS_SSL(Sock). +%% Seems hackish. Is hackish. But the structure is stable and +%% kept this way for backward compatibility reasons. We need +%% it for two reasons: there are no ssl:getstat(Sock) function, +%% and no ssl:close(Timeout) function. Both of them are being +%% worked on as we speak. +ssl_get_socket(Sock) -> + element(2, element(2, Sock)). + ssl_info(Sock) when ?IS_SSL(Sock) -> - ssl:connection_info(Sock#ssl_socket.ssl); + ssl_compat:connection_information(Sock); ssl_info(_Sock) -> nossl. controlling_process(Sock, Pid) when ?IS_SSL(Sock) -> - ssl:controlling_process(Sock#ssl_socket.ssl, Pid); + ssl:controlling_process(Sock, Pid); controlling_process(Sock, Pid) when is_port(Sock) -> gen_tcp:controlling_process(Sock, Pid). getstat(Sock, Stats) when ?IS_SSL(Sock) -> - inet:getstat(Sock#ssl_socket.tcp, Stats); + inet:getstat(ssl_get_socket(Sock), Stats); getstat(Sock, Stats) when is_port(Sock) -> inet:getstat(Sock, Stats). recv(Sock) when ?IS_SSL(Sock) -> - recv(Sock#ssl_socket.ssl, {ssl, ssl_closed, ssl_error}); + recv(Sock, {ssl, ssl_closed, ssl_error}); recv(Sock) when is_port(Sock) -> recv(Sock, {tcp, tcp_closed, tcp_error}). @@ -118,7 +137,7 @@ recv(S, {DataTag, ClosedTag, ErrorTag}) -> end. sync_recv(Sock, Length) when ?IS_SSL(Sock) -> - ssl:recv(Sock#ssl_socket.ssl, Length); + ssl:recv(Sock, Length); sync_recv(Sock, Length) -> gen_tcp:recv(Sock, Length). @@ -127,7 +146,7 @@ async_recv(Sock, Length, Timeout) when ?IS_SSL(Sock) -> Ref = make_ref(), spawn(fun () -> Pid ! {inet_async, Sock, Ref, - ssl:recv(Sock#ssl_socket.ssl, Length, Timeout)} + ssl:recv(Sock, Length, Timeout)} end), {ok, Ref}; @@ -137,7 +156,7 @@ async_recv(Sock, Length, Timeout) when is_port(Sock) -> prim_inet:async_recv(Sock, Length, Timeout). port_command(Sock, Data) when ?IS_SSL(Sock) -> - case ssl:send(Sock#ssl_socket.ssl, Data) of + case ssl:send(Sock, Data) of ok -> self() ! {inet_reply, Sock, ok}, true; {error, Reason} -> erlang:error(Reason) @@ -146,19 +165,19 @@ port_command(Sock, Data) when is_port(Sock) -> erlang:port_command(Sock, Data). getopts(Sock, Options) when ?IS_SSL(Sock) -> - ssl:getopts(Sock#ssl_socket.ssl, Options); + ssl:getopts(Sock, Options); getopts(Sock, Options) when is_port(Sock) -> inet:getopts(Sock, Options). setopts(Sock, Options) when ?IS_SSL(Sock) -> - ssl:setopts(Sock#ssl_socket.ssl, Options); + ssl:setopts(Sock, Options); setopts(Sock, Options) when is_port(Sock) -> inet:setopts(Sock, Options). -send(Sock, Data) when ?IS_SSL(Sock) -> ssl:send(Sock#ssl_socket.ssl, Data); +send(Sock, Data) when ?IS_SSL(Sock) -> ssl:send(Sock, Data); send(Sock, Data) when is_port(Sock) -> gen_tcp:send(Sock, Data). -close(Sock) when ?IS_SSL(Sock) -> ssl:close(Sock#ssl_socket.ssl); +close(Sock) when ?IS_SSL(Sock) -> ssl:close(Sock); close(Sock) when is_port(Sock) -> gen_tcp:close(Sock). fast_close(Sock) when ?IS_SSL(Sock) -> @@ -173,7 +192,7 @@ fast_close(Sock) when ?IS_SSL(Sock) -> %% 0), which may never return if the client doesn't send a FIN or %% that gets swallowed by the network. Since there is no timeout %% variant of ssl:close, we construct our own. - {Pid, MRef} = spawn_monitor(fun () -> ssl:close(Sock#ssl_socket.ssl) end), + {Pid, MRef} = spawn_monitor(fun () -> ssl:close(Sock) end), erlang:send_after(?SSL_CLOSE_TIMEOUT, self(), {Pid, ssl_close_timeout}), receive {Pid, ssl_close_timeout} -> @@ -182,18 +201,18 @@ fast_close(Sock) when ?IS_SSL(Sock) -> {'DOWN', MRef, process, Pid, _Reason} -> ok end, - catch port_close(Sock#ssl_socket.tcp), + catch port_close(ssl_get_socket(Sock)), ok; fast_close(Sock) when is_port(Sock) -> catch port_close(Sock), ok. -sockname(Sock) when ?IS_SSL(Sock) -> ssl:sockname(Sock#ssl_socket.ssl); +sockname(Sock) when ?IS_SSL(Sock) -> ssl:sockname(Sock); sockname(Sock) when is_port(Sock) -> inet:sockname(Sock). -peername(Sock) when ?IS_SSL(Sock) -> ssl:peername(Sock#ssl_socket.ssl); +peername(Sock) when ?IS_SSL(Sock) -> ssl:peername(Sock); peername(Sock) when is_port(Sock) -> inet:peername(Sock). -peercert(Sock) when ?IS_SSL(Sock) -> ssl:peercert(Sock#ssl_socket.ssl); +peercert(Sock) when ?IS_SSL(Sock) -> ssl:peercert(Sock); peercert(Sock) when is_port(Sock) -> nossl. connection_string(Sock, Direction) -> @@ -244,3 +263,19 @@ is_loopback({0,0,0,0,0,65535,AB,CD}) -> is_loopback(ipv4(AB, CD)); is_loopback(_) -> false. ipv4(AB, CD) -> {AB bsr 8, AB band 255, CD bsr 8, CD band 255}. + +accept_ack(Ref, Sock) -> + ok = ranch:accept_ack(Ref), + case tune_buffer_size(Sock) of + ok -> ok; + {error, _} -> rabbit_net:fast_close(Sock), + exit(normal) + end, + ok = file_handle_cache:obtain(). + +tune_buffer_size(Sock) -> + case getopts(Sock, [sndbuf, recbuf, buffer]) of + {ok, BufSizes} -> BufSz = lists:max([Sz || {_Opt, Sz} <- BufSizes]), + setopts(Sock, [{buffer, BufSz}]); + Error -> Error + end. diff --git a/rabbitmq-server/src/rabbit_networking.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_networking.erl similarity index 79% rename from rabbitmq-server/src/rabbit_networking.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_networking.erl index f95f8c5..47309cc 100644 --- a/rabbitmq-server/src/rabbit_networking.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_networking.erl @@ -11,25 +11,36 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_networking). --export([boot/0, start/0, start_tcp_listener/1, start_ssl_listener/2, +%% This module contains various functions that deal with networking, +%% TCP and TLS listeners, and connection information. +%% +%% It also contains a boot step — boot/0 — that starts networking machinery. +%% This module primarily covers AMQP 0-9-1 but some bits are reused in +%% plugins that provide protocol support, e.g. STOMP or MQTT. +%% +%% Functions in this module take care of normalising TCP listener options, +%% including dual IP stack cases, and starting the AMQP 0-9-1 listener(s). +%% +%% See also tcp_listener_sup and tcp_listener. + +-export([boot/0, start_tcp_listener/2, start_ssl_listener/3, stop_tcp_listener/1, on_node_down/1, active_listeners/0, node_listeners/1, register_connection/1, unregister_connection/1, connections/0, connection_info_keys/0, connection_info/1, connection_info/2, - connection_info_all/0, connection_info_all/1, + connection_info_all/0, connection_info_all/1, connection_info_all/3, close_connection/2, force_connection_event_refresh/1, tcp_host/1]). -%%used by TCP-based transports, e.g. STOMP adapter --export([tcp_listener_addresses/1, tcp_listener_spec/6, - ensure_ssl/0, fix_ssl_options/1, poodle_check/1, ssl_transform_fun/1]). +%% Used by TCP-based transports, e.g. STOMP adapter +-export([tcp_listener_addresses/1, tcp_listener_spec/9, + ensure_ssl/0, fix_ssl_options/1, poodle_check/1]). --export([tcp_listener_started/3, tcp_listener_stopped/3, - start_client/1, start_ssl_client/2]). +-export([tcp_listener_started/3, tcp_listener_stopped/3]). %% Internal -export([connections_local/0]). @@ -62,7 +73,6 @@ -type(protocol() :: atom()). -type(label() :: string()). --spec(start/0 :: () -> 'ok'). -spec(start_tcp_listener/1 :: (listener_config()) -> 'ok'). -spec(start_ssl_listener/2 :: (listener_config(), rabbit_types:infos()) -> 'ok'). @@ -82,29 +92,21 @@ -spec(connection_info_all/0 :: () -> [rabbit_types:infos()]). -spec(connection_info_all/1 :: (rabbit_types:info_keys()) -> [rabbit_types:infos()]). +-spec(connection_info_all/3 :: + (rabbit_types:info_keys(), reference(), pid()) -> 'ok'). -spec(close_connection/2 :: (pid(), string()) -> 'ok'). -spec(force_connection_event_refresh/1 :: (reference()) -> 'ok'). -spec(on_node_down/1 :: (node()) -> 'ok'). -spec(tcp_listener_addresses/1 :: (listener_config()) -> [address()]). --spec(tcp_listener_spec/6 :: - (name_prefix(), address(), [gen_tcp:listen_option()], protocol(), - label(), rabbit_types:mfargs()) -> supervisor:child_spec()). +-spec(tcp_listener_spec/9 :: + (name_prefix(), address(), [gen_tcp:listen_option()], module(), module(), protocol(), any(), + non_neg_integer(), label()) -> supervisor:child_spec()). -spec(ensure_ssl/0 :: () -> rabbit_types:infos()). -spec(fix_ssl_options/1 :: (rabbit_types:infos()) -> rabbit_types:infos()). -spec(poodle_check/1 :: (atom()) -> 'ok' | 'danger'). --spec(ssl_transform_fun/1 :: - (rabbit_types:infos()) - -> fun ((rabbit_net:socket()) - -> rabbit_types:ok_or_error(#ssl_socket{}))). -spec(boot/0 :: () -> 'ok'). --spec(start_client/1 :: - (port() | #ssl_socket{ssl::{'sslsocket',_,_}}) -> - atom() | pid() | port() | {atom(),atom()}). --spec(start_ssl_client/2 :: - (_,port() | #ssl_socket{ssl::{'sslsocket',_,_}}) -> - atom() | pid() | port() | {atom(),atom()}). -spec(tcp_listener_started/3 :: (_, string() | @@ -126,33 +128,28 @@ boot() -> ok = record_distribution_listener(), - ok = start(), - ok = boot_tcp(), - ok = boot_ssl(). + _ = application:start(ranch), + ok = boot_tcp(application:get_env(rabbit, num_tcp_acceptors, 10)), + ok = boot_ssl(application:get_env(rabbit, num_ssl_acceptors, 1)). -boot_tcp() -> +boot_tcp(NumAcceptors) -> {ok, TcpListeners} = application:get_env(tcp_listeners), - [ok = start_tcp_listener(Listener) || Listener <- TcpListeners], + [ok = start_tcp_listener(Listener, NumAcceptors) || Listener <- TcpListeners], ok. -boot_ssl() -> +boot_ssl(NumAcceptors) -> case application:get_env(ssl_listeners) of {ok, []} -> ok; {ok, SslListeners} -> SslOpts = ensure_ssl(), case poodle_check('AMQP') of - ok -> [start_ssl_listener(L, SslOpts) || L <- SslListeners]; + ok -> [start_ssl_listener(L, SslOpts, NumAcceptors) || L <- SslListeners]; danger -> ok end, ok end. -start() -> rabbit_sup:start_supervisor_child( - rabbit_tcp_client_sup, rabbit_client_sup, - [{local, rabbit_tcp_client_sup}, - {rabbit_connection_sup,start_link,[]}]). - ensure_ssl() -> {ok, SslAppsConfig} = application:get_env(rabbit, ssl_apps), ok = app_utils:start_applications(SslAppsConfig), @@ -269,35 +266,6 @@ fix_ssl_protocol_versions(Config) -> pset(versions, Configured -- ?BAD_SSL_PROTOCOL_VERSIONS, Config) end. -ssl_timeout() -> - {ok, Val} = application:get_env(rabbit, ssl_handshake_timeout), - Val. - -ssl_transform_fun(SslOpts) -> - fun (Sock) -> - Timeout = ssl_timeout(), - case catch ssl:ssl_accept(Sock, SslOpts, Timeout) of - {ok, SslSock} -> - {ok, #ssl_socket{tcp = Sock, ssl = SslSock}}; - {error, timeout} -> - {error, {ssl_upgrade_error, timeout}}; - {error, Reason} -> - %% We have no idea what state the ssl_connection - %% process is in - it could still be happily - %% going, it might be stuck, or it could be just - %% about to fail. There is little that our caller - %% can do but close the TCP socket, but this could - %% cause ssl alerts to get dropped (which is bad - %% form, according to the TLS spec). So we give - %% the ssl_connection a little bit of time to send - %% such alerts. - timer:sleep(Timeout), - {error, {ssl_upgrade_error, Reason}}; - {'EXIT', Reason} -> - {error, {ssl_upgrade_failure, Reason}} - end - end. - tcp_listener_addresses(Port) when is_integer(Port) -> tcp_listener_addresses_auto(Port); tcp_listener_addresses({"auto", Port}) -> @@ -319,31 +287,34 @@ tcp_listener_addresses_auto(Port) -> Listener <- port_to_listeners(Port)]). tcp_listener_spec(NamePrefix, {IPAddress, Port, Family}, SocketOpts, - Protocol, Label, OnConnect) -> + Transport, ProtoSup, ProtoOpts, Protocol, NumAcceptors, Label) -> {rabbit_misc:tcp_name(NamePrefix, IPAddress, Port), {tcp_listener_sup, start_link, - [IPAddress, Port, [Family | SocketOpts], + [IPAddress, Port, Transport, [Family | SocketOpts], ProtoSup, ProtoOpts, {?MODULE, tcp_listener_started, [Protocol]}, {?MODULE, tcp_listener_stopped, [Protocol]}, - OnConnect, Label]}, + NumAcceptors, Label]}, transient, infinity, supervisor, [tcp_listener_sup]}. -start_tcp_listener(Listener) -> - start_listener(Listener, amqp, "TCP Listener", - {?MODULE, start_client, []}). +start_tcp_listener(Listener, NumAcceptors) -> + start_listener(Listener, NumAcceptors, amqp, "TCP Listener", tcp_opts()). -start_ssl_listener(Listener, SslOpts) -> - start_listener(Listener, 'amqp/ssl', "SSL Listener", - {?MODULE, start_ssl_client, [SslOpts]}). +start_ssl_listener(Listener, SslOpts, NumAcceptors) -> + start_listener(Listener, NumAcceptors, 'amqp/ssl', "SSL Listener", tcp_opts() ++ SslOpts). -start_listener(Listener, Protocol, Label, OnConnect) -> - [start_listener0(Address, Protocol, Label, OnConnect) || +start_listener(Listener, NumAcceptors, Protocol, Label, Opts) -> + [start_listener0(Address, NumAcceptors, Protocol, Label, Opts) || Address <- tcp_listener_addresses(Listener)], ok. -start_listener0(Address, Protocol, Label, OnConnect) -> - Spec = tcp_listener_spec(rabbit_tcp_listener_sup, Address, tcp_opts(), - Protocol, Label, OnConnect), +start_listener0(Address, NumAcceptors, Protocol, Label, Opts) -> + Transport = case Protocol of + amqp -> ranch_tcp; + 'amqp/ssl' -> ranch_ssl + end, + Spec = tcp_listener_spec(rabbit_tcp_listener_sup, Address, Opts, + Transport, rabbit_connection_sup, [], Protocol, + NumAcceptors, Label), case supervisor:start_child(rabbit_sup, Spec) of {ok, _} -> ok; {error, {shutdown, _}} -> {IPAddress, Port, _Family} = Address, @@ -400,28 +371,6 @@ on_node_down(Node) -> "Keep ~s listeners: the node is already back~n", [Node]) end. -start_client(Sock, SockTransform) -> - {ok, _Child, Reader} = supervisor:start_child(rabbit_tcp_client_sup, []), - ok = rabbit_net:controlling_process(Sock, Reader), - Reader ! {go, Sock, SockTransform}, - - %% In the event that somebody floods us with connections, the - %% reader processes can spew log events at error_logger faster - %% than it can keep up, causing its mailbox to grow unbounded - %% until we eat all the memory available and crash. So here is a - %% meaningless synchronous call to the underlying gen_event - %% mechanism. When it returns the mailbox is drained, and we - %% return to our caller to accept more connetions. - gen_event:which_handlers(error_logger), - - Reader. - -start_client(Sock) -> - start_client(Sock, fun (S) -> {ok, S} end). - -start_ssl_client(SslOpts, Sock) -> - start_client(Sock, ssl_transform_fun(SslOpts)). - register_connection(Pid) -> pg_local:join(rabbit_connections, Pid). unregister_connection(Pid) -> pg_local:leave(rabbit_connections, Pid). @@ -440,6 +389,11 @@ connection_info(Pid, Items) -> rabbit_reader:info(Pid, Items). connection_info_all() -> cmap(fun (Q) -> connection_info(Q) end). connection_info_all(Items) -> cmap(fun (Q) -> connection_info(Q, Items) end). +connection_info_all(Items, Ref, AggregatorPid) -> + rabbit_control_misc:emitting_map_with_exit_handler( + AggregatorPid, Ref, fun(Q) -> connection_info(Q, Items) end, + connections()). + close_connection(Pid, Explanation) -> rabbit_log:info("Closing connection ~p because ~p~n", [Pid, Explanation]), case lists:member(Pid, connections()) of @@ -476,21 +430,7 @@ cmap(F) -> rabbit_misc:filter_exit_map(F, connections()). tcp_opts() -> {ok, ConfigOpts} = application:get_env(rabbit, tcp_listen_options), - merge_essential_tcp_listen_options(ConfigOpts). - --define(ESSENTIAL_LISTEN_OPTIONS, - [binary, - {active, false}, - {packet, raw}, - {reuseaddr, true}, - {nodelay, true}]). - -merge_essential_tcp_listen_options(Opts) -> - lists:foldl(fun ({K, _} = Opt, Acc) -> - lists:keystore(K, 1, Acc, Opt); - (Opt, Acc) -> - [Opt | Acc] - end , Opts, ?ESSENTIAL_LISTEN_OPTIONS). + ConfigOpts. %% inet_parse:address takes care of ip string, like "0.0.0.0" %% inet:getaddr returns immediately for ip tuple {0,0,0,0}, diff --git a/rabbitmq-server/src/rabbit_nodes.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_nodes.erl similarity index 96% rename from rabbitmq-server/src/rabbit_nodes.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_nodes.erl index 090aacc..b1370db 100644 --- a/rabbitmq-server/src/rabbit_nodes.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_nodes.erl @@ -11,14 +11,15 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_nodes). -export([names/1, diagnostics/1, make/1, parts/1, cookie_hash/0, is_running/2, is_process_running/2, - cluster_name/0, set_cluster_name/1, ensure_epmd/0]). + cluster_name/0, set_cluster_name/1, ensure_epmd/0, + all_running/0]). -include_lib("kernel/include/inet.hrl"). @@ -42,6 +43,7 @@ -spec(cluster_name/0 :: () -> binary()). -spec(set_cluster_name/1 :: (binary()) -> 'ok'). -spec(ensure_epmd/0 :: () -> 'ok'). +-spec(all_running/0 :: () -> [node()]). -endif. @@ -202,7 +204,7 @@ set_cluster_name(Name) -> ensure_epmd() -> {ok, Prog} = init:get_argument(progname), - ID = random:uniform(1000000000), + ID = rabbit_misc:random(1000000000), Port = open_port( {spawn_executable, os:find_executable(Prog)}, [{args, ["-sname", rabbit_misc:format("epmd-starter-~b", [ID]), @@ -215,3 +217,5 @@ port_shutdown_loop(Port) -> {Port, {exit_status, _Rc}} -> ok; {Port, _} -> port_shutdown_loop(Port) end. + +all_running() -> rabbit_mnesia:cluster_nodes(running). diff --git a/rabbitmq-server/deps/rabbit_common/src/rabbit_password_hashing.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_password_hashing.erl new file mode 100644 index 0000000..54d2535 --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_password_hashing.erl @@ -0,0 +1,33 @@ +%% The contents of this file are subject to the Mozilla Public License +%% Version 1.1 (the "License"); you may not use this file except in +%% compliance with the License. You may obtain a copy of the License +%% at http://www.mozilla.org/MPL/ +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and +%% limitations under the License. +%% +%% The Original Code is RabbitMQ. +%% +%% The Initial Developer of the Original Code is GoPivotal, Inc. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. +%% + +-module(rabbit_password_hashing). +-include("rabbit.hrl"). + +-ifdef(use_specs). + +-callback hash(rabbit_types:password()) -> rabbit_types:password_hash(). + +-else. + +-export([behaviour_info/1]). + +behaviour_info(callbacks) -> + [{hash, 1}]; +behaviour_info(_Other) -> + undefined. + +-endif. diff --git a/rabbitmq-server/src/rabbit_policy_validator.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_policy_validator.erl similarity index 94% rename from rabbitmq-server/src/rabbit_policy_validator.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_policy_validator.erl index 7ebea83..bd89060 100644 --- a/rabbitmq-server/src/rabbit_policy_validator.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_policy_validator.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_policy_validator). diff --git a/rabbitmq-server/src/rabbit_queue_collector.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_queue_collector.erl similarity index 94% rename from rabbitmq-server/src/rabbit_queue_collector.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_queue_collector.erl index 734228b..f6677cb 100644 --- a/rabbitmq-server/src/rabbit_queue_collector.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_queue_collector.erl @@ -11,11 +11,14 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_queue_collector). +%% Queue collector keeps track of exclusive queues and cleans them +%% up e.g. when their connection is closed. + -behaviour(gen_server). -export([start_link/1, register/2, delete_all/1]). diff --git a/rabbitmq-server/src/rabbit_queue_decorator.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_queue_decorator.erl similarity index 94% rename from rabbitmq-server/src/rabbit_queue_decorator.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_queue_decorator.erl index 129f51d..aab2812 100644 --- a/rabbitmq-server/src/rabbit_queue_decorator.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_queue_decorator.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_queue_decorator). @@ -42,7 +42,7 @@ -export([behaviour_info/1]). behaviour_info(callbacks) -> - [{description, 0}, {startup, 1}, {shutdown, 1}, {policy_changed, 2}, + [{startup, 1}, {shutdown, 1}, {policy_changed, 2}, {active_for, 1}, {consumer_state_changed, 3}]; behaviour_info(_Other) -> undefined. diff --git a/rabbitmq-server/deps/rabbit_common/src/rabbit_queue_master_locator.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_queue_master_locator.erl new file mode 100644 index 0000000..a73a307 --- /dev/null +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_queue_master_locator.erl @@ -0,0 +1,33 @@ +%% The contents of this file are subject to the Mozilla Public License +%% Version 1.1 (the "License"); you may not use this file except in +%% compliance with the License. You may obtain a copy of the License at +%% http://www.mozilla.org/MPL/ +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the +%% License for the specific language governing rights and limitations +%% under the License. +%% +%% The Original Code is RabbitMQ. +%% +%% The Initial Developer of the Original Code is GoPivotal, Inc. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. +%% + +-module(rabbit_queue_master_locator). + +-ifdef(use_specs). + +-callback description() -> [proplists:property()]. +-callback queue_master_location(pid()) -> {'ok', node()} | {'error', term()}. + +-else. + +-export([behaviour_info/1]). +behaviour_info(callbacks) -> + [{description, 0}, + {queue_master_location, 1}]; +behaviour_info(_Other) -> + undefined. + +-endif. diff --git a/rabbitmq-server/src/rabbit_reader.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_reader.erl similarity index 81% rename from rabbitmq-server/src/rabbit_reader.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_reader.erl index 8812e1d..73513f9 100644 --- a/rabbitmq-server/src/rabbit_reader.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_reader.erl @@ -11,40 +11,146 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_reader). + +%% This is an AMQP 0-9-1 connection implementation. If AMQP 1.0 plugin is enabled, +%% this module passes control of incoming AMQP 1.0 connections to it. +%% +%% Every connection (as in, a process using this module) +%% is a controlling process for a server socket. +%% +%% Connections have a number of responsibilities: +%% +%% * Performing protocol handshake +%% * Parsing incoming data and dispatching protocol methods +%% * Authenticating clients (with the help of authentication backends) +%% * Enforcing TCP backpressure (throttling clients) +%% * Enforcing connection limits, e.g. channel_max +%% * Channel management +%% * Setting up heartbeater and alarm notifications +%% * Emitting connection and network activity metric events +%% * Gracefully handling client disconnects, channel termination, etc +%% +%% and a few more. +%% +%% Every connection has +%% +%% * a queue collector which is responsible for keeping +%% track of exclusive queues on the connection and their cleanup. +%% * a heartbeater that's responsible for sending heartbeat frames to clients, +%% keeping track of the incoming ones and notifying connection about +%% heartbeat timeouts +%% * Stats timer, a timer that is used to periodically emit metric events +%% +%% Some dependencies are started under a separate supervisor to avoid deadlocks +%% during system shutdown. See rabbit_channel_sup:start_link/0 for details. +%% +%% Reader processes are special processes (in the OTP sense). + -include("rabbit_framing.hrl"). -include("rabbit.hrl"). --export([start_link/1, info_keys/0, info/1, info/2, force_event_refresh/2, +-export([start_link/3, info_keys/0, info/1, info/2, force_event_refresh/2, shutdown/2]). -export([system_continue/3, system_terminate/4, system_code_change/4]). --export([init/2, mainloop/4, recvloop/4]). +-export([init/4, mainloop/4, recvloop/4]). -export([conserve_resources/3, server_properties/1]). -define(NORMAL_TIMEOUT, 3). -define(CLOSING_TIMEOUT, 30). -define(CHANNEL_TERMINATION_TIMEOUT, 3). +%% we wait for this many seconds before closing TCP connection +%% with a client that failed to log in. Provides some relief +%% from connection storms and DoS. -define(SILENT_CLOSE_DELAY, 3). -define(CHANNEL_MIN, 1). %%-------------------------------------------------------------------------- --record(v1, {parent, sock, connection, callback, recv_len, pending_recv, - connection_state, helper_sup, queue_collector, heartbeater, - stats_timer, channel_sup_sup_pid, channel_count, throttle}). - --record(connection, {name, host, peer_host, port, peer_port, - protocol, user, timeout_sec, frame_max, channel_max, vhost, - client_properties, capabilities, - auth_mechanism, auth_state, connected_at}). - --record(throttle, {alarmed_by, last_blocked_by, last_blocked_at}). +-record(v1, { + %% parent process + parent, + %% socket + sock, + %% connection state, see connection record + connection, + callback, + recv_len, + pending_recv, + %% pre_init | securing | running | blocking | blocked | closing | closed | {become, F} + connection_state, + %% see comment in rabbit_connection_sup:start_link/0 + helper_sup, + %% takes care of cleaning up exclusive queues, + %% see rabbit_queue_collector + queue_collector, + %% sends and receives heartbeat frames, + %% see rabbit_heartbeat + heartbeater, + %% timer used to emit statistics + stats_timer, + %% channel supervisor + channel_sup_sup_pid, + %% how many channels this connection has + channel_count, + %% throttling state, for both + %% credit- and resource-driven flow control + throttle}). + +-record(connection, { + %% e.g. <<"127.0.0.1:55054 -> 127.0.0.1:5672">> + name, + %% server host + host, + %% client host + peer_host, + %% server port + port, + %% client port + peer_port, + %% protocol framing implementation module, + %% e.g. rabbit_framing_amqp_0_9_1 + protocol, + user, + %% heartbeat timeout value used, 0 means + %% heartbeats are disabled + timeout_sec, + %% maximum allowed frame size, + %% see frame_max in the AMQP 0-9-1 spec + frame_max, + %% greatest channel number allowed, + %% see channel_max in the AMQP 0-9-1 spec + channel_max, + vhost, + %% client name, version, platform, etc + client_properties, + %% what lists protocol extensions + %% does this client support? + capabilities, + %% authentication mechanism used + %% as a pair of {Name, Module} + auth_mechanism, + %% authentication mechanism state, + %% initialised by rabbit_auth_mechanism:init/1 + %% implementations + auth_state, + %% time of connection + connected_at}). + +-record(throttle, { + %% list of active alarms + alarmed_by, + %% flow | resource + last_blocked_by, + %% never | timestamp() + last_blocked_at +}). -define(STATISTICS_KEYS, [pid, recv_oct, recv_cnt, send_oct, send_cnt, send_pend, state, channels]). @@ -76,23 +182,23 @@ -ifdef(use_specs). --spec(start_link/1 :: (pid()) -> rabbit_types:ok(pid())). +-spec(start_link/3 :: (pid(), any(), rabbit_net:socket()) -> rabbit_types:ok(pid())). -spec(info_keys/0 :: () -> rabbit_types:info_keys()). -spec(info/1 :: (pid()) -> rabbit_types:infos()). -spec(info/2 :: (pid(), rabbit_types:info_keys()) -> rabbit_types:infos()). -spec(force_event_refresh/2 :: (pid(), reference()) -> 'ok'). -spec(shutdown/2 :: (pid(), string()) -> 'ok'). --spec(conserve_resources/3 :: (pid(), atom(), boolean()) -> 'ok'). +-type(resource_alert() :: {WasAlarmSetForNode :: boolean(), + IsThereAnyAlarmsWithSameSourceInTheCluster :: boolean(), + NodeForWhichAlarmWasSetOrCleared :: node()}). +-spec(conserve_resources/3 :: (pid(), atom(), resource_alert()) -> 'ok'). -spec(server_properties/1 :: (rabbit_types:protocol()) -> rabbit_framing:amqp_table()). %% These specs only exists to add no_return() to keep dialyzer happy --spec(init/2 :: (pid(), pid()) -> no_return()). --spec(start_connection/5 :: - (pid(), pid(), any(), rabbit_net:socket(), - fun ((rabbit_net:socket()) -> - rabbit_types:ok_or_error2( - rabbit_net:socket(), any()))) -> no_return()). +-spec(init/4 :: (pid(), pid(), any(), rabbit_net:socket()) -> no_return()). +-spec(start_connection/4 :: + (pid(), pid(), any(), rabbit_net:socket()) -> no_return()). -spec(mainloop/4 :: (_,[binary()], non_neg_integer(), #v1{}) -> any()). -spec(system_code_change/4 :: (_,_,_,_) -> {'ok',_}). @@ -104,18 +210,27 @@ %%-------------------------------------------------------------------------- -start_link(HelperSup) -> - {ok, proc_lib:spawn_link(?MODULE, init, [self(), HelperSup])}. +start_link(HelperSup, Ref, Sock) -> + Pid = proc_lib:spawn_link(?MODULE, init, [self(), HelperSup, Ref, Sock]), + + %% In the event that somebody floods us with connections, the + %% reader processes can spew log events at error_logger faster + %% than it can keep up, causing its mailbox to grow unbounded + %% until we eat all the memory available and crash. So here is a + %% meaningless synchronous call to the underlying gen_event + %% mechanism. When it returns the mailbox is drained, and we + %% return to our caller to accept more connections. + gen_event:which_handlers(error_logger), + + {ok, Pid}. shutdown(Pid, Explanation) -> gen_server:call(Pid, {shutdown, Explanation}, infinity). -init(Parent, HelperSup) -> +init(Parent, HelperSup, Ref, Sock) -> + rabbit_net:accept_ack(Ref, Sock), Deb = sys:debug_options([]), - receive - {go, Sock, SockTransform} -> - start_connection(Parent, HelperSup, Deb, Sock, SockTransform) - end. + start_connection(Parent, HelperSup, Deb, Sock). system_continue(Parent, Deb, {Buf, BufLen, State}) -> mainloop(Deb, Buf, BufLen, State#v1{parent = Parent}). @@ -140,7 +255,7 @@ info(Pid, Items) -> force_event_refresh(Pid, Ref) -> gen_server:cast(Pid, {force_event_refresh, Ref}). -conserve_resources(Pid, Source, Conserve) -> +conserve_resources(Pid, Source, {_, Conserve, _}) -> Pid ! {conserve_resources, Source, Conserve}, ok. @@ -184,7 +299,8 @@ server_capabilities(rabbit_framing_amqp_0_9_1) -> {<<"connection.blocked">>, bool, true}, {<<"consumer_priorities">>, bool, true}, {<<"authentication_failure_close">>, bool, true}, - {<<"per_consumer_qos">>, bool, true}]; + {<<"per_consumer_qos">>, bool, true}, + {<<"direct_reply_to">>, bool, true}]; server_capabilities(_) -> []. @@ -214,12 +330,11 @@ socket_op(Sock, Fun) -> case Fun(Sock) of {ok, Res} -> Res; {error, Reason} -> socket_error(Reason), - %% NB: this is tcp socket, even in case of ssl rabbit_net:fast_close(Sock), exit(normal) end. -start_connection(Parent, HelperSup, Deb, Sock, SockTransform) -> +start_connection(Parent, HelperSup, Deb, Sock) -> process_flag(trap_exit, true), Name = case rabbit_net:connection_string(Sock, inbound) of {ok, Str} -> Str; @@ -230,13 +345,13 @@ start_connection(Parent, HelperSup, Deb, Sock, SockTransform) -> exit(normal) end, {ok, HandshakeTimeout} = application:get_env(rabbit, handshake_timeout), - ClientSock = socket_op(Sock, SockTransform), + InitialFrameMax = application:get_env(rabbit, initial_frame_max, ?FRAME_MIN_SIZE), erlang:send_after(HandshakeTimeout, self(), handshake_timeout), {PeerHost, PeerPort, Host, Port} = socket_op(Sock, fun (S) -> rabbit_net:socket_ends(S, inbound) end), ?store_proc_name(list_to_binary(Name)), State = #v1{parent = Parent, - sock = ClientSock, + sock = Sock, connection = #connection{ name = list_to_binary(Name), host = Host, @@ -246,13 +361,14 @@ start_connection(Parent, HelperSup, Deb, Sock, SockTransform) -> protocol = none, user = none, timeout_sec = (HandshakeTimeout / 1000), - frame_max = ?FRAME_MIN_SIZE, + frame_max = InitialFrameMax, vhost = none, client_properties = none, capabilities = [], auth_mechanism = none, auth_state = none, - connected_at = rabbit_misc:now_to_ms(os:timestamp())}, + connected_at = time_compat:os_system_time( + milli_seconds)}, callback = uninitialized_callback, recv_len = 0, pending_recv = false, @@ -283,27 +399,30 @@ start_connection(Parent, HelperSup, Deb, Sock, SockTransform) -> %% the socket. However, to keep the file_handle_cache %% accounting as accurate as possible we ought to close the %% socket w/o delay before termination. - rabbit_net:fast_close(ClientSock), + rabbit_net:fast_close(Sock), rabbit_networking:unregister_connection(self()), rabbit_event:notify(connection_closed, [{pid, self()}]) end, done. log_connection_exception(Name, Ex) -> - Severity = case Ex of - connection_closed_with_no_data_received -> debug; - connection_closed_abruptly -> warning; - _ -> error - end, - log_connection_exception(Severity, Name, Ex). + Severity = case Ex of + connection_closed_with_no_data_received -> debug; + connection_closed_abruptly -> warning; + _ -> error + end, + log_connection_exception(Severity, Name, Ex). log_connection_exception(Severity, Name, {heartbeat_timeout, TimeoutSec}) -> - %% Long line to avoid extra spaces and line breaks in log - log(Severity, "closing AMQP connection ~p (~s):~nMissed heartbeats from client, timeout: ~ps~n", - [self(), Name, TimeoutSec]); + %% Long line to avoid extra spaces and line breaks in log + log(Severity, "closing AMQP connection ~p (~s):~nmissed heartbeats from client, timeout: ~ps~n", + [self(), Name, TimeoutSec]); +log_connection_exception(Severity, Name, connection_closed_abruptly) -> + log(Severity, "closing AMQP connection ~p (~s):~nclient unexpectedly closed TCP connection~n", + [self(), Name]); log_connection_exception(Severity, Name, Ex) -> - log(Severity, "closing AMQP connection ~p (~s):~n~p~n", - [self(), Name, Ex]). + log(Severity, "closing AMQP connection ~p (~s):~n~p~n", + [self(), Name, Ex]). run({M, F, A}) -> try apply(M, F, A) @@ -371,6 +490,10 @@ mainloop(Deb, Buf, BufLen, State = #v1{sock = Sock, stop(tcp_healthcheck, State); closed -> stop(closed, State); + {other, {heartbeat_send_error, Reason}} -> + %% The only portable way to detect disconnect on blocked + %% connection is to wait for heartbeat send failure. + stop(Reason, State); {error, Reason} -> stop(Reason, State); {other, {system, From, Request}} -> @@ -516,7 +639,8 @@ maybe_block(State = #v1{connection_state = blocking, State1 = State#v1{connection_state = blocked, throttle = update_last_blocked_by( Throttle#throttle{ - last_blocked_at = erlang:now()})}, + last_blocked_at = + time_compat:monotonic_time()})}, case {blocked_by_alarm(State), blocked_by_alarm(State1)} of {false, true} -> ok = send_blocked(State1); {_, _} -> ok @@ -572,7 +696,7 @@ close_connection(State = #v1{queue_collector = Collector, %% current connection, and are deleted when that connection %% closes." This does not strictly imply synchrony, but in %% practice it seems to be what people assume. - rabbit_queue_collector:delete_all(Collector), + clean_up_exclusive_queues(Collector), %% We terminate the connection after the specified interval, but %% no later than ?CLOSING_TIMEOUT seconds. erlang:send_after((if TimeoutSec > 0 andalso @@ -581,6 +705,15 @@ close_connection(State = #v1{queue_collector = Collector, end) * 1000, self(), terminate_connection), State#v1{connection_state = closed}. +%% queue collector will be undefined when connection +%% tuning was never performed or didn't finish. In such cases +%% there's also nothing to clean up. +clean_up_exclusive_queues(undefined) -> + ok; + +clean_up_exclusive_queues(Collector) -> + rabbit_queue_collector:delete_all(Collector). + handle_dependent_exit(ChPid, Reason, State) -> {Channel, State1} = channel_cleanup(ChPid, State), case {Channel, termination_kind(Reason)} of @@ -613,7 +746,8 @@ wait_for_channel_termination(N, TimerRef, connection = #connection{ name = ConnName, user = User, - vhost = VHost}}) -> + vhost = VHost}, + sock = Sock}) -> receive {'DOWN', _MRef, process, ChPid, Reason} -> {Channel, State1} = channel_cleanup(ChPid, State), @@ -630,6 +764,9 @@ wait_for_channel_termination(N, TimerRef, CS, Channel, Reason]), wait_for_channel_termination(N-1, TimerRef, State1) end; + {'EXIT', Sock, _Reason} -> + [channel_cleanup(ChPid, State) || ChPid <- all_channels()], + exit(normal); cancel_wait -> exit(channel_termination_timeout) end. @@ -647,6 +784,14 @@ maybe_close(State) -> termination_kind(normal) -> controlled; termination_kind(_) -> uncontrolled. +format_hard_error(#amqp_error{name = N, explanation = E, method = M}) -> + io_lib:format("operation ~s caused a connection exception ~s: ~p", [M, N, E]); +format_hard_error(Reason) -> + case io_lib:deep_char_list(Reason) of + true -> Reason; + false -> rabbit_misc:format("~p", [Reason]) + end. + log_hard_error(#v1{connection_state = CS, connection = #connection{ name = ConnName, @@ -654,8 +799,8 @@ log_hard_error(#v1{connection_state = CS, vhost = VHost}}, Channel, Reason) -> log(error, "Error on AMQP connection ~p (~s, vhost: '~s'," - " user: '~s', state: ~p), channel ~p:~n~p~n", - [self(), ConnName, VHost, User#user.username, CS, Channel, Reason]). + " user: '~s', state: ~p), channel ~p:~n~s~n", + [self(), ConnName, VHost, User#user.username, CS, Channel, format_hard_error(Reason)]). handle_exception(State = #v1{connection_state = closed}, Channel, Reason) -> log_hard_error(State, Channel, Reason), @@ -664,12 +809,55 @@ handle_exception(State = #v1{connection = #connection{protocol = Protocol}, connection_state = CS}, Channel, Reason) when ?IS_RUNNING(State) orelse CS =:= closing -> - log_hard_error(State, Channel, Reason), - {0, CloseMethod} = - rabbit_binary_generator:map_exception(Channel, Reason, Protocol), - State1 = close_connection(terminate_channels(State)), - ok = send_on_channel0(State1#v1.sock, CloseMethod, Protocol), - State1; + respond_and_close(State, Channel, Protocol, Reason, Reason); +%% authentication failure +handle_exception(State = #v1{connection = #connection{protocol = Protocol, + name = ConnName, + capabilities = Capabilities}, + connection_state = starting}, + Channel, Reason = #amqp_error{name = access_refused, + explanation = ErrMsg}) -> + log(error, + "Error on AMQP connection ~p (~s, state: ~p):~n~s~n", + [self(), ConnName, starting, ErrMsg]), + %% respect authentication failure notification capability + case rabbit_misc:table_lookup(Capabilities, + <<"authentication_failure_close">>) of + {bool, true} -> + send_error_on_channel0_and_close(Channel, Protocol, Reason, State); + _ -> + close_connection(terminate_channels(State)) + end; +%% when loopback-only user tries to connect from a non-local host +%% when user tries to access a vhost it has no permissions for +handle_exception(State = #v1{connection = #connection{protocol = Protocol, + name = ConnName, + user = User}, + connection_state = opening}, + Channel, Reason = #amqp_error{name = not_allowed, + explanation = ErrMsg}) -> + log(error, + "Error on AMQP connection ~p (~s, user: '~s', state: ~p):~n~s~n", + [self(), ConnName, User#user.username, opening, ErrMsg]), + send_error_on_channel0_and_close(Channel, Protocol, Reason, State); +handle_exception(State = #v1{connection = #connection{protocol = Protocol}, + connection_state = CS = opening}, + Channel, Reason = #amqp_error{}) -> + respond_and_close(State, Channel, Protocol, Reason, + {handshake_error, CS, Reason}); +%% when negotiation fails, e.g. due to channel_max being higher than the +%% maxiumum allowed limit +handle_exception(State = #v1{connection = #connection{protocol = Protocol, + name = ConnName, + user = User}, + connection_state = tuning}, + Channel, Reason = #amqp_error{name = not_allowed, + explanation = ErrMsg}) -> + log(error, + "Error on AMQP connection ~p (~s," + " user: '~s', state: ~p):~n~s~n", + [self(), ConnName, User#user.username, tuning, ErrMsg]), + send_error_on_channel0_and_close(Channel, Protocol, Reason, State); handle_exception(State, Channel, Reason) -> %% We don't trust the client at this point - force them to wait %% for a bit so they can't DOS us with repeated failed logins etc. @@ -981,8 +1169,19 @@ handle_method0(#'connection.tune_ok'{frame_max = FrameMax, {ok, Collector} = rabbit_connection_helper_sup:start_queue_collector( SupPid, Connection#connection.name), Frame = rabbit_binary_generator:build_heartbeat_frame(), - SendFun = fun() -> catch rabbit_net:send(Sock, Frame) end, Parent = self(), + SendFun = + fun() -> + case catch rabbit_net:send(Sock, Frame) of + ok -> + ok; + {error, Reason} -> + Parent ! {heartbeat_send_error, Reason}; + Unexpected -> + Parent ! {heartbeat_send_error, Unexpected} + end, + ok + end, ReceiveFun = fun() -> Parent ! heartbeat_timeout end, Heartbeater = rabbit_heartbeat:start( SupPid, Sock, Connection#connection.name, @@ -1044,7 +1243,7 @@ handle_method0(_Method, #v1{connection_state = S}) -> validate_negotiated_integer_value(Field, Min, ClientValue) -> ServerValue = get_env(Field), if ClientValue /= 0 andalso ClientValue < Min -> - fail_negotiation(Field, min, ServerValue, ClientValue); + fail_negotiation(Field, min, Min, ClientValue); ServerValue /= 0 andalso (ClientValue =:= 0 orelse ClientValue > ServerValue) -> fail_negotiation(Field, max, ServerValue, ClientValue); @@ -1202,7 +1401,9 @@ i(state, #v1{connection_state = ConnectionState, (credit_flow:blocked() %% throttled by flow now orelse %% throttled by flow recently (WasBlockedBy =:= flow andalso T =/= never andalso - timer:now_diff(erlang:now(), T) < 5000000)) of + time_compat:convert_time_unit(time_compat:monotonic_time() - T, + native, + micro_seconds) < 5000000)) of true -> flow; false -> ConnectionState end; @@ -1234,14 +1435,19 @@ socket_info(Get, Select, #v1{sock = Sock}) -> end. ssl_info(F, #v1{sock = Sock}) -> - %% The first ok form is R14 - %% The second is R13 - the extra term is exportability (by inspection, - %% the docs are wrong) case rabbit_net:ssl_info(Sock) of - nossl -> ''; - {error, _} -> ''; - {ok, {P, {K, C, H}}} -> F({P, {K, C, H}}); - {ok, {P, {K, C, H, _}}} -> F({P, {K, C, H}}) + nossl -> ''; + {error, _} -> ''; + {ok, Items} -> + P = proplists:get_value(protocol, Items), + CS = proplists:get_value(cipher_suite, Items), + %% The first form is R14. + %% The second is R13 - the extra term is exportability (by + %% inspection, the docs are wrong). + case CS of + {K, C, H} -> F({P, {K, C, H}}); + {K, C, H, _} -> F({P, {K, C, H}}) + end end. cert_info(F, #v1{sock = Sock}) -> @@ -1294,3 +1500,14 @@ pack_for_1_0(Buf, BufLen, #v1{parent = Parent, pending_recv = PendingRecv, helper_sup = SupPid}) -> {Parent, Sock, RecvLen, PendingRecv, SupPid, Buf, BufLen}. + +respond_and_close(State, Channel, Protocol, Reason, LogErr) -> + log_hard_error(State, Channel, LogErr), + send_error_on_channel0_and_close(Channel, Protocol, Reason, State). + +send_error_on_channel0_and_close(Channel, Protocol, Reason, State) -> + {0, CloseMethod} = + rabbit_binary_generator:map_exception(Channel, Reason, Protocol), + State1 = close_connection(terminate_channels(State)), + ok = send_on_channel0(State#v1.sock, CloseMethod, Protocol), + State1. diff --git a/rabbitmq-server/src/rabbit_runtime_parameter.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_runtime_parameter.erl similarity index 95% rename from rabbitmq-server/src/rabbit_runtime_parameter.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_runtime_parameter.erl index 1d4bc0b..4e36068 100644 --- a/rabbitmq-server/src/rabbit_runtime_parameter.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_runtime_parameter.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_runtime_parameter). diff --git a/rabbitmq-server/src/rabbit_writer.erl b/rabbitmq-server/deps/rabbit_common/src/rabbit_writer.erl similarity index 90% rename from rabbitmq-server/src/rabbit_writer.erl rename to rabbitmq-server/deps/rabbit_common/src/rabbit_writer.erl index 7cba717..3153a96 100644 --- a/rabbitmq-server/src/rabbit_writer.erl +++ b/rabbitmq-server/deps/rabbit_common/src/rabbit_writer.erl @@ -11,10 +11,28 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_writer). + +%% This module backs writer processes ("writers"). The responsibility of +%% a writer is to serialise protocol methods and write them to the socket. +%% Every writer is associated with a channel and normally it's the channel +%% that delegates method delivery to it. However, rabbit_reader +%% (connection process) can use this module's functions to send data +%% on channel 0, which is only used for connection negotiation and +%% other "special" purposes. +%% +%% This module provides multiple functions that send protocol commands, +%% including some that are credit flow-aware. +%% +%% Writers perform internal buffering. When the amount of data +%% buffered exceeds a threshold, a socket flush is performed. +%% See FLUSH_THRESHOLD for details. +%% +%% When a socket write fails, writer will exit. + -include("rabbit.hrl"). -include("rabbit_framing.hrl"). @@ -32,8 +50,23 @@ %% internal -export([enter_mainloop/2, mainloop/2, mainloop1/2]). --record(wstate, {sock, channel, frame_max, protocol, reader, - stats_timer, pending}). +-record(wstate, { + %% socket (port) + sock, + %% channel number + channel, + %% connection-negotiated frame_max setting + frame_max, + %% see #connection.protocol in rabbit_reader + protocol, + %% connection (rabbit_reader) process + reader, + %% statistics emission timer + stats_timer, + %% data pending delivery (between socket + %% flushes) + pending +}). -define(HIBERNATE_AFTER, 5000). @@ -311,8 +344,11 @@ internal_send_command_async(MethodRecord, Content, rabbit_basic:maybe_gc_large_msg(Content), maybe_flush(State#wstate{pending = [Frames | Pending]}). +%% When the amount of protocol method data buffered exceeds +%% this threshold, a socket flush is performed. +%% %% This magic number is the tcp-over-ethernet MSS (1460) minus the -%% minimum size of a AMQP basic.deliver method frame (24) plus basic +%% minimum size of a AMQP 0-9-1 basic.deliver method frame (24) plus basic %% content header (22). The idea is that we want to flush just before %% exceeding the MSS. -define(FLUSH_THRESHOLD, 1414). diff --git a/rabbitmq-server/src/ssl_compat.erl b/rabbitmq-server/deps/rabbit_common/src/ssl_compat.erl similarity index 97% rename from rabbitmq-server/src/ssl_compat.erl rename to rabbitmq-server/deps/rabbit_common/src/ssl_compat.erl index fc83fbc..d9cf390 100644 --- a/rabbitmq-server/src/ssl_compat.erl +++ b/rabbitmq-server/deps/rabbit_common/src/ssl_compat.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(ssl_compat). diff --git a/rabbitmq-server/src/supervisor2.erl b/rabbitmq-server/deps/rabbit_common/src/supervisor2.erl similarity index 98% rename from rabbitmq-server/src/supervisor2.erl rename to rabbitmq-server/deps/rabbit_common/src/supervisor2.erl index 7b9421e..c8ffbb1 100644 --- a/rabbitmq-server/src/supervisor2.erl +++ b/rabbitmq-server/deps/rabbit_common/src/supervisor2.erl @@ -1492,7 +1492,7 @@ add_restart(State) -> I = State#state.intensity, P = State#state.period, R = State#state.restarts, - Now = erlang:now(), + Now = time_compat:monotonic_time(), R1 = add_restart([Now|R], Now, P), State1 = State#state{restarts = R1}, case length(R1) of @@ -1513,26 +1513,13 @@ add_restart([], _, _) -> []. inPeriod(Time, Now, Period) -> - case difference(Time, Now) of + case time_compat:convert_time_unit(Now - Time, native, seconds) of T when T > Period -> false; _ -> true end. -%% -%% Time = {MegaSecs, Secs, MicroSecs} (NOTE: MicroSecs is ignored) -%% Calculate the time elapsed in seconds between two timestamps. -%% If MegaSecs is equal just subtract Secs. -%% Else calculate the Mega difference and add the Secs difference, -%% note that Secs difference can be negative, e.g. -%% {827, 999999, 676} diff {828, 1, 653753} == > 2 secs. -%% -difference({TimeM, TimeS, _}, {CurM, CurS, _}) when CurM > TimeM -> - ((CurM - TimeM) * 1000000) + (CurS - TimeS); -difference({_, TimeS, _}, {_, CurS, _}) -> - CurS - TimeS. - %%% ------------------------------------------------------ %%% Error and progress reporting. %%% ------------------------------------------------------ diff --git a/rabbitmq-server/src/time_compat.erl b/rabbitmq-server/deps/rabbit_common/src/time_compat.erl similarity index 100% rename from rabbitmq-server/src/time_compat.erl rename to rabbitmq-server/deps/rabbit_common/src/time_compat.erl diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/CONTRIBUTING.md b/rabbitmq-server/deps/rabbitmq_amqp1_0/CONTRIBUTING.md similarity index 100% rename from rabbitmq-server/plugins-src/eldap-wrapper/CONTRIBUTING.md rename to rabbitmq-server/deps/rabbitmq_amqp1_0/CONTRIBUTING.md diff --git a/rabbitmq-server/deps/rabbitmq_amqp1_0/Makefile b/rabbitmq-server/deps/rabbitmq_amqp1_0/Makefile new file mode 100644 index 0000000..e9e612b --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/Makefile @@ -0,0 +1,61 @@ +PROJECT = rabbitmq_amqp1_0 + +DEPS = amqp_client + +TEST_DEPS = rabbit rabbitmq_java_client + +DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk + +EXTRA_SOURCES += include/rabbit_amqp1_0_framing.hrl \ + src/rabbit_amqp1_0_framing0.erl + +.DEFAULT_GOAL = all +$(PROJECT).d:: $(EXTRA_SOURCES) + +# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be +# reviewed and merged. + +ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git +ERLANG_MK_COMMIT = rabbitmq-tmp + +include rabbitmq-components.mk +include erlang.mk + +# -------------------------------------------------------------------- +# Framing sources generation. +# -------------------------------------------------------------------- + +PYTHON ?= python +CODEGEN = $(CURDIR)/codegen.py +CODEGEN_DIR ?= $(DEPS_DIR)/rabbitmq_codegen +CODEGEN_AMQP = $(CODEGEN_DIR)/amqp_codegen.py +CODEGEN_SPECS = spec/messaging.xml spec/security.xml spec/transport.xml \ + spec/transactions.xml + +include/rabbit_amqp1_0_framing.hrl:: $(CODEGEN) $(CODEGEN_AMQP) \ + $(CODEGEN_SPECS) + $(gen_verbose) env PYTHONPATH=$(CODEGEN_DIR) \ + $(PYTHON) $(CODEGEN) hrl $(CODEGEN_SPECS) > $@ + +src/rabbit_amqp1_0_framing0.erl:: $(CODEGEN) $(CODEGEN_AMQP) \ + $(CODEGEN_SPECS) + $(gen_verbose) env PYTHONPATH=$(CODEGEN_DIR) \ + $(PYTHON) $(CODEGEN) erl $(CODEGEN_SPECS) > $@ + +clean:: clean-extra-sources + +clean-extra-sources: + $(gen_verbose) rm -f $(EXTRA_SOURCES) + +# -------------------------------------------------------------------- +# Testing. +# -------------------------------------------------------------------- + +WITH_BROKER_TEST_SCRIPTS := $(CURDIR)/test/swiftmq/run-tests.sh + +STANDALONE_TEST_COMMANDS := eunit:test(rabbit_amqp1_0_test,[verbose]) + +distclean:: distclean-swiftmq + +distclean-swiftmq: + $(gen_verbose) $(MAKE) -C test/swiftmq clean diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/README.md b/rabbitmq-server/deps/rabbitmq_amqp1_0/README.md similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/README.md rename to rabbitmq-server/deps/rabbitmq_amqp1_0/README.md diff --git a/rabbitmq-server/deps/rabbitmq_amqp1_0/build.config b/rabbitmq-server/deps/rabbitmq_amqp1_0/build.config new file mode 100644 index 0000000..0855303 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/build.config @@ -0,0 +1,43 @@ +# Do *not* comment or remove core modules +# unless you know what you are doing. +# +# Feel free to comment plugins out however. + +# Core modules. +core/core +index/* +core/index +core/deps + +# Plugins that must run before Erlang code gets compiled. +plugins/erlydtl +plugins/protobuffs + +# Core modules, continued. +core/erlc +core/docs +core/rel +core/test +core/compat + +# Plugins. +plugins/asciidoc +plugins/bootstrap +plugins/c_src +plugins/ci +plugins/ct +plugins/dialyzer +plugins/edoc +plugins/elvis +plugins/escript +# plugins/eunit +plugins/relx +plugins/shell +plugins/triq +plugins/xref + +# Plugins enhancing the functionality of other plugins. +plugins/cover + +# Core modules which can use variables from plugins. +core/deps-tools diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/codegen.py b/rabbitmq-server/deps/rabbitmq_amqp1_0/codegen.py similarity index 72% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/codegen.py rename to rabbitmq-server/deps/rabbitmq_amqp1_0/codegen.py index 145cfe5..7dbcfef 100755 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/codegen.py +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/codegen.py @@ -1,4 +1,7 @@ #!/usr/bin/env python + +from __future__ import print_function + import sys import os import re @@ -37,50 +40,50 @@ class AMQPDefines: dom.getElementsByTagName('choice')] def print_erl(types): - print """-module(rabbit_amqp1_0_framing0). + print("""-module(rabbit_amqp1_0_framing0). -export([record_for/1, fields/1, encode/1, symbol_for/1, number_for/1]). --include("rabbit_amqp1_0.hrl").""" +-include("rabbit_amqp1_0.hrl").""") for t in types: - print """record_for({symbol, <<"%s">>}) -> - #'v1_0.%s'{};""" % (t.desc, t.name) + print("""record_for({symbol, <<"%s">>}) -> + #'v1_0.%s'{};""" % (t.desc, t.name)) if t.code: - print """record_for({_, %d}) -> - #'v1_0.%s'{};""" % (t.number, t.name) - print "%% %s\n" % t.code + print("""record_for({_, %d}) -> + #'v1_0.%s'{};""" % (t.number, t.name)) + print("%% %s\n" % t.code) - print """record_for(Other) -> exit({unknown, Other}). + print("""record_for(Other) -> exit({unknown, Other}). -""" +""") for t in types: - print """fields(#'v1_0.%s'{}) -> record_info(fields, 'v1_0.%s');""" % (t.name, t.name) - print """fields(_Other) -> unknown. + print("""fields(#'v1_0.%s'{}) -> record_info(fields, 'v1_0.%s');""" % (t.name, t.name)) + print("""fields(_Other) -> unknown. -""" +""") for t in types: - print """encode(Frame = #'v1_0.%s'{}) -> - rabbit_amqp1_0_framing:encode_described('%s', %s, Frame);""" % (t.name, t.source, t.number) - print """encode(undefined) -> null; + print("""encode(Frame = #'v1_0.%s'{}) -> + rabbit_amqp1_0_framing:encode_described('%s', %s, Frame);""" % (t.name, t.source, t.number)) + print("""encode(undefined) -> null; encode(Other) -> Other. -""" +""") for t in types: - print """symbol_for(#'v1_0.%s'{}) -> - {symbol, <<"%s">>};""" % (t.name, t.desc) - print """symbol_for(Other) -> exit({unknown, Other}). + print("""symbol_for(#'v1_0.%s'{}) -> + {symbol, <<"%s">>};""" % (t.name, t.desc)) + print("""symbol_for(Other) -> exit({unknown, Other}). -""" +""") for t in types: - print """number_for(#'v1_0.%s'{}) -> - {ulong, %s};""" % (t.name, t.number) - print """number_for(Other) -> exit({unknown, Other}).""" + print("""number_for(#'v1_0.%s'{}) -> + {ulong, %s};""" % (t.name, t.number)) + print("""number_for(Other) -> exit({unknown, Other}).""") def print_hrl(types, defines): for t in types: - print """-record('v1_0.%s', {%s}).""" % (t.name, ", ".join(t.fields)) + print("""-record('v1_0.%s', {%s}).""" % (t.name, ", ".join(t.fields))) print_define(t.define(), 'symbol') for d in defines: if len(d.options) > 0: - print """ %% %s""" % (d.name) + print(""" %% %s""" % (d.name)) for opt in d.options: print_define(opt, d.source) @@ -90,7 +93,7 @@ def print_define(opt, source): quoted = '<<"%s">>' % value else: quoted = value - print """-define(V_1_0_%s, {%s, %s}).""" % (name, source, quoted) + print("""-define(V_1_0_%s, {%s, %s}).""" % (name, source, quoted)) def want_type(el): descriptors = el.getElementsByTagName('descriptor') diff --git a/rabbitmq-server/deps/rabbitmq_amqp1_0/erlang.mk b/rabbitmq-server/deps/rabbitmq_amqp1_0/erlang.mk new file mode 100644 index 0000000..9f0c0c3 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/erlang.mk @@ -0,0 +1,6589 @@ +# Copyright (c) 2013-2015, Loïc Hoguin +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +.PHONY: all app deps search rel docs install-docs check tests clean distclean help erlang-mk + +ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) + +ERLANG_MK_VERSION = 2.0.0-pre.2-16-gb52203c-dirty + +# Core configuration. + +PROJECT ?= $(notdir $(CURDIR)) +PROJECT := $(strip $(PROJECT)) + +PROJECT_VERSION ?= rolling + +# Verbosity. + +V ?= 0 + +verbose_0 = @ +verbose_2 = set -x; +verbose = $(verbose_$(V)) + +gen_verbose_0 = @echo " GEN " $@; +gen_verbose_2 = set -x; +gen_verbose = $(gen_verbose_$(V)) + +# Temporary files directory. + +ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk +export ERLANG_MK_TMP + +# "erl" command. + +ERL = erl +A0 -noinput -boot start_clean + +# Platform detection. + +ifeq ($(PLATFORM),) +UNAME_S := $(shell uname -s) + +ifeq ($(UNAME_S),Linux) +PLATFORM = linux +else ifeq ($(UNAME_S),Darwin) +PLATFORM = darwin +else ifeq ($(UNAME_S),SunOS) +PLATFORM = solaris +else ifeq ($(UNAME_S),GNU) +PLATFORM = gnu +else ifeq ($(UNAME_S),FreeBSD) +PLATFORM = freebsd +else ifeq ($(UNAME_S),NetBSD) +PLATFORM = netbsd +else ifeq ($(UNAME_S),OpenBSD) +PLATFORM = openbsd +else ifeq ($(UNAME_S),DragonFly) +PLATFORM = dragonfly +else ifeq ($(shell uname -o),Msys) +PLATFORM = msys2 +else +$(error Unable to detect platform. Please open a ticket with the output of uname -a.) +endif + +export PLATFORM +endif + +# Core targets. + +all:: deps app rel + +# Noop to avoid a Make warning when there's nothing to do. +rel:: + $(verbose) : + +check:: clean app tests + +clean:: clean-crashdump + +clean-crashdump: +ifneq ($(wildcard erl_crash.dump),) + $(gen_verbose) rm -f erl_crash.dump +endif + +distclean:: clean distclean-tmp + +distclean-tmp: + $(gen_verbose) rm -rf $(ERLANG_MK_TMP) + +help:: + $(verbose) printf "%s\n" \ + "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \ + "Copyright (c) 2013-2015 Loïc Hoguin " \ + "" \ + "Usage: [V=1] $(MAKE) [target]..." \ + "" \ + "Core targets:" \ + " all Run deps, app and rel targets in that order" \ + " app Compile the project" \ + " deps Fetch dependencies (if needed) and compile them" \ + " fetch-deps Fetch dependencies (if needed) without compiling them" \ + " list-deps Fetch dependencies (if needed) and list them" \ + " search q=... Search for a package in the built-in index" \ + " rel Build a release for this project, if applicable" \ + " docs Build the documentation for this project" \ + " install-docs Install the man pages for this project" \ + " check Compile and run all tests and analysis for this project" \ + " tests Run the tests for this project" \ + " clean Delete temporary and output files from most targets" \ + " distclean Delete all temporary and output files" \ + " help Display this help and exit" \ + " erlang-mk Update erlang.mk to the latest version" + +# Core functions. + +empty := +space := $(empty) $(empty) +tab := $(empty) $(empty) +comma := , + +define newline + + +endef + +define comma_list +$(subst $(space),$(comma),$(strip $(1))) +endef + +# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. +define erlang +$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk +endef + +ifeq ($(PLATFORM),msys2) +core_native_path = $(subst \,\\\\,$(shell cygpath -w $1)) +else +core_native_path = $1 +endif + +ifeq ($(shell which wget 2>/dev/null | wc -l), 1) +define core_http_get + wget --no-check-certificate -O $(1) $(2)|| rm $(1) +endef +else +define core_http_get.erl + ssl:start(), + inets:start(), + case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of + {ok, {{_, 200, _}, _, Body}} -> + case file:write_file("$(1)", Body) of + ok -> ok; + {error, R1} -> halt(R1) + end; + {error, R2} -> + halt(R2) + end, + halt(0). +endef + +define core_http_get + $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2)) +endef +endif + +core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1))) + +core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2))) + +core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1))))))))))))))))))))))))))) + +core_ls = $(filter-out $(1),$(shell echo $(1))) + +# @todo Use a solution that does not require using perl. +core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2) + +# Automated update. + +ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk +ERLANG_MK_COMMIT ?= +ERLANG_MK_BUILD_CONFIG ?= build.config +ERLANG_MK_BUILD_DIR ?= .erlang.mk.build + +erlang-mk: + git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) +ifdef ERLANG_MK_COMMIT + cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) +endif + if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi + $(MAKE) -C $(ERLANG_MK_BUILD_DIR) + cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk + rm -rf $(ERLANG_MK_BUILD_DIR) + +# The erlang.mk package index is bundled in the default erlang.mk build. +# Search for the string "copyright" to skip to the rest of the code. + +PACKAGES += aberth +pkg_aberth_name = aberth +pkg_aberth_description = Generic BERT-RPC server in Erlang +pkg_aberth_homepage = https://github.com/a13x/aberth +pkg_aberth_fetch = git +pkg_aberth_repo = https://github.com/a13x/aberth +pkg_aberth_commit = master + +PACKAGES += active +pkg_active_name = active +pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running +pkg_active_homepage = https://github.com/proger/active +pkg_active_fetch = git +pkg_active_repo = https://github.com/proger/active +pkg_active_commit = master + +PACKAGES += actordb_core +pkg_actordb_core_name = actordb_core +pkg_actordb_core_description = ActorDB main source +pkg_actordb_core_homepage = http://www.actordb.com/ +pkg_actordb_core_fetch = git +pkg_actordb_core_repo = https://github.com/biokoda/actordb_core +pkg_actordb_core_commit = master + +PACKAGES += actordb_thrift +pkg_actordb_thrift_name = actordb_thrift +pkg_actordb_thrift_description = Thrift API for ActorDB +pkg_actordb_thrift_homepage = http://www.actordb.com/ +pkg_actordb_thrift_fetch = git +pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift +pkg_actordb_thrift_commit = master + +PACKAGES += aleppo +pkg_aleppo_name = aleppo +pkg_aleppo_description = Alternative Erlang Pre-Processor +pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo +pkg_aleppo_fetch = git +pkg_aleppo_repo = https://github.com/ErlyORM/aleppo +pkg_aleppo_commit = master + +PACKAGES += alog +pkg_alog_name = alog +pkg_alog_description = Simply the best logging framework for Erlang +pkg_alog_homepage = https://github.com/siberian-fast-food/alogger +pkg_alog_fetch = git +pkg_alog_repo = https://github.com/siberian-fast-food/alogger +pkg_alog_commit = master + +PACKAGES += amqp_client +pkg_amqp_client_name = amqp_client +pkg_amqp_client_description = RabbitMQ Erlang AMQP client +pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html +pkg_amqp_client_fetch = git +pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git +pkg_amqp_client_commit = master + +PACKAGES += annotations +pkg_annotations_name = annotations +pkg_annotations_description = Simple code instrumentation utilities +pkg_annotations_homepage = https://github.com/hyperthunk/annotations +pkg_annotations_fetch = git +pkg_annotations_repo = https://github.com/hyperthunk/annotations +pkg_annotations_commit = master + +PACKAGES += antidote +pkg_antidote_name = antidote +pkg_antidote_description = Large-scale computation without synchronisation +pkg_antidote_homepage = https://syncfree.lip6.fr/ +pkg_antidote_fetch = git +pkg_antidote_repo = https://github.com/SyncFree/antidote +pkg_antidote_commit = master + +PACKAGES += apns +pkg_apns_name = apns +pkg_apns_description = Apple Push Notification Server for Erlang +pkg_apns_homepage = http://inaka.github.com/apns4erl +pkg_apns_fetch = git +pkg_apns_repo = https://github.com/inaka/apns4erl +pkg_apns_commit = 1.0.4 + +PACKAGES += azdht +pkg_azdht_name = azdht +pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang +pkg_azdht_homepage = https://github.com/arcusfelis/azdht +pkg_azdht_fetch = git +pkg_azdht_repo = https://github.com/arcusfelis/azdht +pkg_azdht_commit = master + +PACKAGES += backoff +pkg_backoff_name = backoff +pkg_backoff_description = Simple exponential backoffs in Erlang +pkg_backoff_homepage = https://github.com/ferd/backoff +pkg_backoff_fetch = git +pkg_backoff_repo = https://github.com/ferd/backoff +pkg_backoff_commit = master + +PACKAGES += barrel_tcp +pkg_barrel_tcp_name = barrel_tcp +pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang. +pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_fetch = git +pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_commit = master + +PACKAGES += basho_bench +pkg_basho_bench_name = basho_bench +pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for. +pkg_basho_bench_homepage = https://github.com/basho/basho_bench +pkg_basho_bench_fetch = git +pkg_basho_bench_repo = https://github.com/basho/basho_bench +pkg_basho_bench_commit = master + +PACKAGES += bcrypt +pkg_bcrypt_name = bcrypt +pkg_bcrypt_description = Bcrypt Erlang / C library +pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt +pkg_bcrypt_fetch = git +pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt +pkg_bcrypt_commit = master + +PACKAGES += beam +pkg_beam_name = beam +pkg_beam_description = BEAM emulator written in Erlang +pkg_beam_homepage = https://github.com/tonyrog/beam +pkg_beam_fetch = git +pkg_beam_repo = https://github.com/tonyrog/beam +pkg_beam_commit = master + +PACKAGES += beanstalk +pkg_beanstalk_name = beanstalk +pkg_beanstalk_description = An Erlang client for beanstalkd +pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_fetch = git +pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_commit = master + +PACKAGES += bear +pkg_bear_name = bear +pkg_bear_description = a set of statistics functions for erlang +pkg_bear_homepage = https://github.com/boundary/bear +pkg_bear_fetch = git +pkg_bear_repo = https://github.com/boundary/bear +pkg_bear_commit = master + +PACKAGES += bertconf +pkg_bertconf_name = bertconf +pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded +pkg_bertconf_homepage = https://github.com/ferd/bertconf +pkg_bertconf_fetch = git +pkg_bertconf_repo = https://github.com/ferd/bertconf +pkg_bertconf_commit = master + +PACKAGES += bifrost +pkg_bifrost_name = bifrost +pkg_bifrost_description = Erlang FTP Server Framework +pkg_bifrost_homepage = https://github.com/thorstadt/bifrost +pkg_bifrost_fetch = git +pkg_bifrost_repo = https://github.com/thorstadt/bifrost +pkg_bifrost_commit = master + +PACKAGES += binpp +pkg_binpp_name = binpp +pkg_binpp_description = Erlang Binary Pretty Printer +pkg_binpp_homepage = https://github.com/jtendo/binpp +pkg_binpp_fetch = git +pkg_binpp_repo = https://github.com/jtendo/binpp +pkg_binpp_commit = master + +PACKAGES += bisect +pkg_bisect_name = bisect +pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang +pkg_bisect_homepage = https://github.com/knutin/bisect +pkg_bisect_fetch = git +pkg_bisect_repo = https://github.com/knutin/bisect +pkg_bisect_commit = master + +PACKAGES += bitcask +pkg_bitcask_name = bitcask +pkg_bitcask_description = because you need another a key/value storage engine +pkg_bitcask_homepage = https://github.com/basho/bitcask +pkg_bitcask_fetch = git +pkg_bitcask_repo = https://github.com/basho/bitcask +pkg_bitcask_commit = master + +PACKAGES += bitstore +pkg_bitstore_name = bitstore +pkg_bitstore_description = A document based ontology development environment +pkg_bitstore_homepage = https://github.com/bdionne/bitstore +pkg_bitstore_fetch = git +pkg_bitstore_repo = https://github.com/bdionne/bitstore +pkg_bitstore_commit = master + +PACKAGES += bootstrap +pkg_bootstrap_name = bootstrap +pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application. +pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap +pkg_bootstrap_fetch = git +pkg_bootstrap_repo = https://github.com/schlagert/bootstrap +pkg_bootstrap_commit = master + +PACKAGES += boss +pkg_boss_name = boss +pkg_boss_description = Erlang web MVC, now featuring Comet +pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_fetch = git +pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_commit = master + +PACKAGES += boss_db +pkg_boss_db_name = boss_db +pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang +pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db +pkg_boss_db_fetch = git +pkg_boss_db_repo = https://github.com/ErlyORM/boss_db +pkg_boss_db_commit = master + +PACKAGES += bson +pkg_bson_name = bson +pkg_bson_description = BSON documents in Erlang, see bsonspec.org +pkg_bson_homepage = https://github.com/comtihon/bson-erlang +pkg_bson_fetch = git +pkg_bson_repo = https://github.com/comtihon/bson-erlang +pkg_bson_commit = master + +PACKAGES += bullet +pkg_bullet_name = bullet +pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy. +pkg_bullet_homepage = http://ninenines.eu +pkg_bullet_fetch = git +pkg_bullet_repo = https://github.com/ninenines/bullet +pkg_bullet_commit = master + +PACKAGES += cache +pkg_cache_name = cache +pkg_cache_description = Erlang in-memory cache +pkg_cache_homepage = https://github.com/fogfish/cache +pkg_cache_fetch = git +pkg_cache_repo = https://github.com/fogfish/cache +pkg_cache_commit = master + +PACKAGES += cake +pkg_cake_name = cake +pkg_cake_description = Really simple terminal colorization +pkg_cake_homepage = https://github.com/darach/cake-erl +pkg_cake_fetch = git +pkg_cake_repo = https://github.com/darach/cake-erl +pkg_cake_commit = v0.1.2 + +PACKAGES += carotene +pkg_carotene_name = carotene +pkg_carotene_description = Real-time server +pkg_carotene_homepage = https://github.com/carotene/carotene +pkg_carotene_fetch = git +pkg_carotene_repo = https://github.com/carotene/carotene +pkg_carotene_commit = master + +PACKAGES += cberl +pkg_cberl_name = cberl +pkg_cberl_description = NIF based Erlang bindings for Couchbase +pkg_cberl_homepage = https://github.com/chitika/cberl +pkg_cberl_fetch = git +pkg_cberl_repo = https://github.com/chitika/cberl +pkg_cberl_commit = master + +PACKAGES += cecho +pkg_cecho_name = cecho +pkg_cecho_description = An ncurses library for Erlang +pkg_cecho_homepage = https://github.com/mazenharake/cecho +pkg_cecho_fetch = git +pkg_cecho_repo = https://github.com/mazenharake/cecho +pkg_cecho_commit = master + +PACKAGES += cferl +pkg_cferl_name = cferl +pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client +pkg_cferl_homepage = https://github.com/ddossot/cferl +pkg_cferl_fetch = git +pkg_cferl_repo = https://github.com/ddossot/cferl +pkg_cferl_commit = master + +PACKAGES += chaos_monkey +pkg_chaos_monkey_name = chaos_monkey +pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes. +pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_fetch = git +pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_commit = master + +PACKAGES += check_node +pkg_check_node_name = check_node +pkg_check_node_description = Nagios Scripts for monitoring Riak +pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios +pkg_check_node_fetch = git +pkg_check_node_repo = https://github.com/basho-labs/riak_nagios +pkg_check_node_commit = master + +PACKAGES += chronos +pkg_chronos_name = chronos +pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests. +pkg_chronos_homepage = https://github.com/lehoff/chronos +pkg_chronos_fetch = git +pkg_chronos_repo = https://github.com/lehoff/chronos +pkg_chronos_commit = master + +PACKAGES += cl +pkg_cl_name = cl +pkg_cl_description = OpenCL binding for Erlang +pkg_cl_homepage = https://github.com/tonyrog/cl +pkg_cl_fetch = git +pkg_cl_repo = https://github.com/tonyrog/cl +pkg_cl_commit = master + +PACKAGES += classifier +pkg_classifier_name = classifier +pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier +pkg_classifier_homepage = https://github.com/inaka/classifier +pkg_classifier_fetch = git +pkg_classifier_repo = https://github.com/inaka/classifier +pkg_classifier_commit = master + +PACKAGES += clique +pkg_clique_name = clique +pkg_clique_description = CLI Framework for Erlang +pkg_clique_homepage = https://github.com/basho/clique +pkg_clique_fetch = git +pkg_clique_repo = https://github.com/basho/clique +pkg_clique_commit = develop + +PACKAGES += cloudi_core +pkg_cloudi_core_name = cloudi_core +pkg_cloudi_core_description = CloudI internal service runtime +pkg_cloudi_core_homepage = http://cloudi.org/ +pkg_cloudi_core_fetch = git +pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core +pkg_cloudi_core_commit = master + +PACKAGES += cloudi_service_api_requests +pkg_cloudi_service_api_requests_name = cloudi_service_api_requests +pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support) +pkg_cloudi_service_api_requests_homepage = http://cloudi.org/ +pkg_cloudi_service_api_requests_fetch = git +pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests +pkg_cloudi_service_api_requests_commit = master + +PACKAGES += cloudi_service_db +pkg_cloudi_service_db_name = cloudi_service_db +pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) +pkg_cloudi_service_db_homepage = http://cloudi.org/ +pkg_cloudi_service_db_fetch = git +pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db +pkg_cloudi_service_db_commit = master + +PACKAGES += cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service +pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_fetch = git +pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_commit = master + +PACKAGES += cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service +pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_cql_fetch = git +pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_commit = master + +PACKAGES += cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service +pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/ +pkg_cloudi_service_db_couchdb_fetch = git +pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_commit = master + +PACKAGES += cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service +pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/ +pkg_cloudi_service_db_elasticsearch_fetch = git +pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_commit = master + +PACKAGES += cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_description = memcached CloudI Service +pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/ +pkg_cloudi_service_db_memcached_fetch = git +pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_commit = master + +PACKAGES += cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_description = MySQL CloudI Service +pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_mysql_fetch = git +pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_commit = master + +PACKAGES += cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service +pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_pgsql_fetch = git +pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_commit = master + +PACKAGES += cloudi_service_db_riak +pkg_cloudi_service_db_riak_name = cloudi_service_db_riak +pkg_cloudi_service_db_riak_description = Riak CloudI Service +pkg_cloudi_service_db_riak_homepage = http://cloudi.org/ +pkg_cloudi_service_db_riak_fetch = git +pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak +pkg_cloudi_service_db_riak_commit = master + +PACKAGES += cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service +pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/ +pkg_cloudi_service_db_tokyotyrant_fetch = git +pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_commit = master + +PACKAGES += cloudi_service_filesystem +pkg_cloudi_service_filesystem_name = cloudi_service_filesystem +pkg_cloudi_service_filesystem_description = Filesystem CloudI Service +pkg_cloudi_service_filesystem_homepage = http://cloudi.org/ +pkg_cloudi_service_filesystem_fetch = git +pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem +pkg_cloudi_service_filesystem_commit = master + +PACKAGES += cloudi_service_http_client +pkg_cloudi_service_http_client_name = cloudi_service_http_client +pkg_cloudi_service_http_client_description = HTTP client CloudI Service +pkg_cloudi_service_http_client_homepage = http://cloudi.org/ +pkg_cloudi_service_http_client_fetch = git +pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client +pkg_cloudi_service_http_client_commit = master + +PACKAGES += cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service +pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/ +pkg_cloudi_service_http_cowboy_fetch = git +pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_commit = master + +PACKAGES += cloudi_service_http_elli +pkg_cloudi_service_http_elli_name = cloudi_service_http_elli +pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service +pkg_cloudi_service_http_elli_homepage = http://cloudi.org/ +pkg_cloudi_service_http_elli_fetch = git +pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli +pkg_cloudi_service_http_elli_commit = master + +PACKAGES += cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service +pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/ +pkg_cloudi_service_map_reduce_fetch = git +pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_commit = master + +PACKAGES += cloudi_service_oauth1 +pkg_cloudi_service_oauth1_name = cloudi_service_oauth1 +pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service +pkg_cloudi_service_oauth1_homepage = http://cloudi.org/ +pkg_cloudi_service_oauth1_fetch = git +pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1 +pkg_cloudi_service_oauth1_commit = master + +PACKAGES += cloudi_service_queue +pkg_cloudi_service_queue_name = cloudi_service_queue +pkg_cloudi_service_queue_description = Persistent Queue Service +pkg_cloudi_service_queue_homepage = http://cloudi.org/ +pkg_cloudi_service_queue_fetch = git +pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue +pkg_cloudi_service_queue_commit = master + +PACKAGES += cloudi_service_quorum +pkg_cloudi_service_quorum_name = cloudi_service_quorum +pkg_cloudi_service_quorum_description = CloudI Quorum Service +pkg_cloudi_service_quorum_homepage = http://cloudi.org/ +pkg_cloudi_service_quorum_fetch = git +pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum +pkg_cloudi_service_quorum_commit = master + +PACKAGES += cloudi_service_router +pkg_cloudi_service_router_name = cloudi_service_router +pkg_cloudi_service_router_description = CloudI Router Service +pkg_cloudi_service_router_homepage = http://cloudi.org/ +pkg_cloudi_service_router_fetch = git +pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router +pkg_cloudi_service_router_commit = master + +PACKAGES += cloudi_service_tcp +pkg_cloudi_service_tcp_name = cloudi_service_tcp +pkg_cloudi_service_tcp_description = TCP CloudI Service +pkg_cloudi_service_tcp_homepage = http://cloudi.org/ +pkg_cloudi_service_tcp_fetch = git +pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp +pkg_cloudi_service_tcp_commit = master + +PACKAGES += cloudi_service_timers +pkg_cloudi_service_timers_name = cloudi_service_timers +pkg_cloudi_service_timers_description = Timers CloudI Service +pkg_cloudi_service_timers_homepage = http://cloudi.org/ +pkg_cloudi_service_timers_fetch = git +pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers +pkg_cloudi_service_timers_commit = master + +PACKAGES += cloudi_service_udp +pkg_cloudi_service_udp_name = cloudi_service_udp +pkg_cloudi_service_udp_description = UDP CloudI Service +pkg_cloudi_service_udp_homepage = http://cloudi.org/ +pkg_cloudi_service_udp_fetch = git +pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp +pkg_cloudi_service_udp_commit = master + +PACKAGES += cloudi_service_validate +pkg_cloudi_service_validate_name = cloudi_service_validate +pkg_cloudi_service_validate_description = CloudI Validate Service +pkg_cloudi_service_validate_homepage = http://cloudi.org/ +pkg_cloudi_service_validate_fetch = git +pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate +pkg_cloudi_service_validate_commit = master + +PACKAGES += cloudi_service_zeromq +pkg_cloudi_service_zeromq_name = cloudi_service_zeromq +pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service +pkg_cloudi_service_zeromq_homepage = http://cloudi.org/ +pkg_cloudi_service_zeromq_fetch = git +pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq +pkg_cloudi_service_zeromq_commit = master + +PACKAGES += cluster_info +pkg_cluster_info_name = cluster_info +pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app +pkg_cluster_info_homepage = https://github.com/basho/cluster_info +pkg_cluster_info_fetch = git +pkg_cluster_info_repo = https://github.com/basho/cluster_info +pkg_cluster_info_commit = master + +PACKAGES += color +pkg_color_name = color +pkg_color_description = ANSI colors for your Erlang +pkg_color_homepage = https://github.com/julianduque/erlang-color +pkg_color_fetch = git +pkg_color_repo = https://github.com/julianduque/erlang-color +pkg_color_commit = master + +PACKAGES += confetti +pkg_confetti_name = confetti +pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids +pkg_confetti_homepage = https://github.com/jtendo/confetti +pkg_confetti_fetch = git +pkg_confetti_repo = https://github.com/jtendo/confetti +pkg_confetti_commit = master + +PACKAGES += couchbeam +pkg_couchbeam_name = couchbeam +pkg_couchbeam_description = Apache CouchDB client in Erlang +pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam +pkg_couchbeam_fetch = git +pkg_couchbeam_repo = https://github.com/benoitc/couchbeam +pkg_couchbeam_commit = master + +PACKAGES += covertool +pkg_covertool_name = covertool +pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports +pkg_covertool_homepage = https://github.com/idubrov/covertool +pkg_covertool_fetch = git +pkg_covertool_repo = https://github.com/idubrov/covertool +pkg_covertool_commit = master + +PACKAGES += cowboy +pkg_cowboy_name = cowboy +pkg_cowboy_description = Small, fast and modular HTTP server. +pkg_cowboy_homepage = http://ninenines.eu +pkg_cowboy_fetch = git +pkg_cowboy_repo = https://github.com/ninenines/cowboy +pkg_cowboy_commit = 1.0.1 + +PACKAGES += cowdb +pkg_cowdb_name = cowdb +pkg_cowdb_description = Pure Key/Value database library for Erlang Applications +pkg_cowdb_homepage = https://github.com/refuge/cowdb +pkg_cowdb_fetch = git +pkg_cowdb_repo = https://github.com/refuge/cowdb +pkg_cowdb_commit = master + +PACKAGES += cowlib +pkg_cowlib_name = cowlib +pkg_cowlib_description = Support library for manipulating Web protocols. +pkg_cowlib_homepage = http://ninenines.eu +pkg_cowlib_fetch = git +pkg_cowlib_repo = https://github.com/ninenines/cowlib +pkg_cowlib_commit = 1.0.1 + +PACKAGES += cpg +pkg_cpg_name = cpg +pkg_cpg_description = CloudI Process Groups +pkg_cpg_homepage = https://github.com/okeuday/cpg +pkg_cpg_fetch = git +pkg_cpg_repo = https://github.com/okeuday/cpg +pkg_cpg_commit = master + +PACKAGES += cqerl +pkg_cqerl_name = cqerl +pkg_cqerl_description = Native Erlang CQL client for Cassandra +pkg_cqerl_homepage = https://matehat.github.io/cqerl/ +pkg_cqerl_fetch = git +pkg_cqerl_repo = https://github.com/matehat/cqerl +pkg_cqerl_commit = master + +PACKAGES += cr +pkg_cr_name = cr +pkg_cr_description = Chain Replication +pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm +pkg_cr_fetch = git +pkg_cr_repo = https://github.com/spawnproc/cr +pkg_cr_commit = master + +PACKAGES += cuttlefish +pkg_cuttlefish_name = cuttlefish +pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me? +pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish +pkg_cuttlefish_fetch = git +pkg_cuttlefish_repo = https://github.com/basho/cuttlefish +pkg_cuttlefish_commit = master + +PACKAGES += damocles +pkg_damocles_name = damocles +pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box. +pkg_damocles_homepage = https://github.com/lostcolony/damocles +pkg_damocles_fetch = git +pkg_damocles_repo = https://github.com/lostcolony/damocles +pkg_damocles_commit = master + +PACKAGES += debbie +pkg_debbie_name = debbie +pkg_debbie_description = .DEB Built In Erlang +pkg_debbie_homepage = https://github.com/crownedgrouse/debbie +pkg_debbie_fetch = git +pkg_debbie_repo = https://github.com/crownedgrouse/debbie +pkg_debbie_commit = master + +PACKAGES += decimal +pkg_decimal_name = decimal +pkg_decimal_description = An Erlang decimal arithmetic library +pkg_decimal_homepage = https://github.com/tim/erlang-decimal +pkg_decimal_fetch = git +pkg_decimal_repo = https://github.com/tim/erlang-decimal +pkg_decimal_commit = master + +PACKAGES += detergent +pkg_detergent_name = detergent +pkg_detergent_description = An emulsifying Erlang SOAP library +pkg_detergent_homepage = https://github.com/devinus/detergent +pkg_detergent_fetch = git +pkg_detergent_repo = https://github.com/devinus/detergent +pkg_detergent_commit = master + +PACKAGES += detest +pkg_detest_name = detest +pkg_detest_description = Tool for running tests on a cluster of erlang nodes +pkg_detest_homepage = https://github.com/biokoda/detest +pkg_detest_fetch = git +pkg_detest_repo = https://github.com/biokoda/detest +pkg_detest_commit = master + +PACKAGES += dh_date +pkg_dh_date_name = dh_date +pkg_dh_date_description = Date formatting / parsing library for erlang +pkg_dh_date_homepage = https://github.com/daleharvey/dh_date +pkg_dh_date_fetch = git +pkg_dh_date_repo = https://github.com/daleharvey/dh_date +pkg_dh_date_commit = master + +PACKAGES += dhtcrawler +pkg_dhtcrawler_name = dhtcrawler +pkg_dhtcrawler_description = dhtcrawler is a DHT crawler written in erlang. It can join a DHT network and crawl many P2P torrents. +pkg_dhtcrawler_homepage = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_fetch = git +pkg_dhtcrawler_repo = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_commit = master + +PACKAGES += dirbusterl +pkg_dirbusterl_name = dirbusterl +pkg_dirbusterl_description = DirBuster successor in Erlang +pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_fetch = git +pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_commit = master + +PACKAGES += dispcount +pkg_dispcount_name = dispcount +pkg_dispcount_description = Erlang task dispatcher based on ETS counters. +pkg_dispcount_homepage = https://github.com/ferd/dispcount +pkg_dispcount_fetch = git +pkg_dispcount_repo = https://github.com/ferd/dispcount +pkg_dispcount_commit = master + +PACKAGES += dlhttpc +pkg_dlhttpc_name = dlhttpc +pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints +pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc +pkg_dlhttpc_fetch = git +pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc +pkg_dlhttpc_commit = master + +PACKAGES += dns +pkg_dns_name = dns +pkg_dns_description = Erlang DNS library +pkg_dns_homepage = https://github.com/aetrion/dns_erlang +pkg_dns_fetch = git +pkg_dns_repo = https://github.com/aetrion/dns_erlang +pkg_dns_commit = master + +PACKAGES += dnssd +pkg_dnssd_name = dnssd +pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation +pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_fetch = git +pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_commit = master + +PACKAGES += dtl +pkg_dtl_name = dtl +pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang. +pkg_dtl_homepage = https://github.com/oinksoft/dtl +pkg_dtl_fetch = git +pkg_dtl_repo = https://github.com/oinksoft/dtl +pkg_dtl_commit = master + +PACKAGES += dynamic_compile +pkg_dynamic_compile_name = dynamic_compile +pkg_dynamic_compile_description = compile and load erlang modules from string input +pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_fetch = git +pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_commit = master + +PACKAGES += e2 +pkg_e2_name = e2 +pkg_e2_description = Library to simply writing correct OTP applications. +pkg_e2_homepage = http://e2project.org +pkg_e2_fetch = git +pkg_e2_repo = https://github.com/gar1t/e2 +pkg_e2_commit = master + +PACKAGES += eamf +pkg_eamf_name = eamf +pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang +pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf +pkg_eamf_fetch = git +pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf +pkg_eamf_commit = master + +PACKAGES += eavro +pkg_eavro_name = eavro +pkg_eavro_description = Apache Avro encoder/decoder +pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_fetch = git +pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_commit = master + +PACKAGES += ecapnp +pkg_ecapnp_name = ecapnp +pkg_ecapnp_description = Cap'n Proto library for Erlang +pkg_ecapnp_homepage = https://github.com/kaos/ecapnp +pkg_ecapnp_fetch = git +pkg_ecapnp_repo = https://github.com/kaos/ecapnp +pkg_ecapnp_commit = master + +PACKAGES += econfig +pkg_econfig_name = econfig +pkg_econfig_description = simple Erlang config handler using INI files +pkg_econfig_homepage = https://github.com/benoitc/econfig +pkg_econfig_fetch = git +pkg_econfig_repo = https://github.com/benoitc/econfig +pkg_econfig_commit = master + +PACKAGES += edate +pkg_edate_name = edate +pkg_edate_description = date manipulation library for erlang +pkg_edate_homepage = https://github.com/dweldon/edate +pkg_edate_fetch = git +pkg_edate_repo = https://github.com/dweldon/edate +pkg_edate_commit = master + +PACKAGES += edgar +pkg_edgar_name = edgar +pkg_edgar_description = Erlang Does GNU AR +pkg_edgar_homepage = https://github.com/crownedgrouse/edgar +pkg_edgar_fetch = git +pkg_edgar_repo = https://github.com/crownedgrouse/edgar +pkg_edgar_commit = master + +PACKAGES += edis +pkg_edis_name = edis +pkg_edis_description = An Erlang implementation of Redis KV Store +pkg_edis_homepage = http://inaka.github.com/edis/ +pkg_edis_fetch = git +pkg_edis_repo = https://github.com/inaka/edis +pkg_edis_commit = master + +PACKAGES += edns +pkg_edns_name = edns +pkg_edns_description = Erlang/OTP DNS server +pkg_edns_homepage = https://github.com/hcvst/erlang-dns +pkg_edns_fetch = git +pkg_edns_repo = https://github.com/hcvst/erlang-dns +pkg_edns_commit = master + +PACKAGES += edown +pkg_edown_name = edown +pkg_edown_description = EDoc extension for generating Github-flavored Markdown +pkg_edown_homepage = https://github.com/uwiger/edown +pkg_edown_fetch = git +pkg_edown_repo = https://github.com/uwiger/edown +pkg_edown_commit = master + +PACKAGES += eep +pkg_eep_name = eep +pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy +pkg_eep_homepage = https://github.com/virtan/eep +pkg_eep_fetch = git +pkg_eep_repo = https://github.com/virtan/eep +pkg_eep_commit = master + +PACKAGES += eep_app +pkg_eep_app_name = eep_app +pkg_eep_app_description = Embedded Event Processing +pkg_eep_app_homepage = https://github.com/darach/eep-erl +pkg_eep_app_fetch = git +pkg_eep_app_repo = https://github.com/darach/eep-erl +pkg_eep_app_commit = master + +PACKAGES += efene +pkg_efene_name = efene +pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX +pkg_efene_homepage = https://github.com/efene/efene +pkg_efene_fetch = git +pkg_efene_repo = https://github.com/efene/efene +pkg_efene_commit = master + +PACKAGES += eganglia +pkg_eganglia_name = eganglia +pkg_eganglia_description = Erlang library to interact with Ganglia +pkg_eganglia_homepage = https://github.com/inaka/eganglia +pkg_eganglia_fetch = git +pkg_eganglia_repo = https://github.com/inaka/eganglia +pkg_eganglia_commit = v0.9.1 + +PACKAGES += egeoip +pkg_egeoip_name = egeoip +pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database. +pkg_egeoip_homepage = https://github.com/mochi/egeoip +pkg_egeoip_fetch = git +pkg_egeoip_repo = https://github.com/mochi/egeoip +pkg_egeoip_commit = master + +PACKAGES += ehsa +pkg_ehsa_name = ehsa +pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules +pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa +pkg_ehsa_fetch = hg +pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa +pkg_ehsa_commit = 2.0.4 + +PACKAGES += ej +pkg_ej_name = ej +pkg_ej_description = Helper module for working with Erlang terms representing JSON +pkg_ej_homepage = https://github.com/seth/ej +pkg_ej_fetch = git +pkg_ej_repo = https://github.com/seth/ej +pkg_ej_commit = master + +PACKAGES += ejabberd +pkg_ejabberd_name = ejabberd +pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform +pkg_ejabberd_homepage = https://github.com/processone/ejabberd +pkg_ejabberd_fetch = git +pkg_ejabberd_repo = https://github.com/processone/ejabberd +pkg_ejabberd_commit = master + +PACKAGES += ejwt +pkg_ejwt_name = ejwt +pkg_ejwt_description = erlang library for JSON Web Token +pkg_ejwt_homepage = https://github.com/artefactop/ejwt +pkg_ejwt_fetch = git +pkg_ejwt_repo = https://github.com/artefactop/ejwt +pkg_ejwt_commit = master + +PACKAGES += ekaf +pkg_ekaf_name = ekaf +pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang. +pkg_ekaf_homepage = https://github.com/helpshift/ekaf +pkg_ekaf_fetch = git +pkg_ekaf_repo = https://github.com/helpshift/ekaf +pkg_ekaf_commit = master + +PACKAGES += elarm +pkg_elarm_name = elarm +pkg_elarm_description = Alarm Manager for Erlang. +pkg_elarm_homepage = https://github.com/esl/elarm +pkg_elarm_fetch = git +pkg_elarm_repo = https://github.com/esl/elarm +pkg_elarm_commit = master + +PACKAGES += eleveldb +pkg_eleveldb_name = eleveldb +pkg_eleveldb_description = Erlang LevelDB API +pkg_eleveldb_homepage = https://github.com/basho/eleveldb +pkg_eleveldb_fetch = git +pkg_eleveldb_repo = https://github.com/basho/eleveldb +pkg_eleveldb_commit = master + +PACKAGES += elli +pkg_elli_name = elli +pkg_elli_description = Simple, robust and performant Erlang web server +pkg_elli_homepage = https://github.com/knutin/elli +pkg_elli_fetch = git +pkg_elli_repo = https://github.com/knutin/elli +pkg_elli_commit = master + +PACKAGES += elvis +pkg_elvis_name = elvis +pkg_elvis_description = Erlang Style Reviewer +pkg_elvis_homepage = https://github.com/inaka/elvis +pkg_elvis_fetch = git +pkg_elvis_repo = https://github.com/inaka/elvis +pkg_elvis_commit = 0.2.4 + +PACKAGES += emagick +pkg_emagick_name = emagick +pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool. +pkg_emagick_homepage = https://github.com/kivra/emagick +pkg_emagick_fetch = git +pkg_emagick_repo = https://github.com/kivra/emagick +pkg_emagick_commit = master + +PACKAGES += emysql +pkg_emysql_name = emysql +pkg_emysql_description = Stable, pure Erlang MySQL driver. +pkg_emysql_homepage = https://github.com/Eonblast/Emysql +pkg_emysql_fetch = git +pkg_emysql_repo = https://github.com/Eonblast/Emysql +pkg_emysql_commit = master + +PACKAGES += enm +pkg_enm_name = enm +pkg_enm_description = Erlang driver for nanomsg +pkg_enm_homepage = https://github.com/basho/enm +pkg_enm_fetch = git +pkg_enm_repo = https://github.com/basho/enm +pkg_enm_commit = master + +PACKAGES += entop +pkg_entop_name = entop +pkg_entop_description = A top-like tool for monitoring an Erlang node +pkg_entop_homepage = https://github.com/mazenharake/entop +pkg_entop_fetch = git +pkg_entop_repo = https://github.com/mazenharake/entop +pkg_entop_commit = master + +PACKAGES += epcap +pkg_epcap_name = epcap +pkg_epcap_description = Erlang packet capture interface using pcap +pkg_epcap_homepage = https://github.com/msantos/epcap +pkg_epcap_fetch = git +pkg_epcap_repo = https://github.com/msantos/epcap +pkg_epcap_commit = master + +PACKAGES += eper +pkg_eper_name = eper +pkg_eper_description = Erlang performance and debugging tools. +pkg_eper_homepage = https://github.com/massemanet/eper +pkg_eper_fetch = git +pkg_eper_repo = https://github.com/massemanet/eper +pkg_eper_commit = master + +PACKAGES += epgsql +pkg_epgsql_name = epgsql +pkg_epgsql_description = Erlang PostgreSQL client library. +pkg_epgsql_homepage = https://github.com/epgsql/epgsql +pkg_epgsql_fetch = git +pkg_epgsql_repo = https://github.com/epgsql/epgsql +pkg_epgsql_commit = master + +PACKAGES += episcina +pkg_episcina_name = episcina +pkg_episcina_description = A simple non intrusive resource pool for connections +pkg_episcina_homepage = https://github.com/erlware/episcina +pkg_episcina_fetch = git +pkg_episcina_repo = https://github.com/erlware/episcina +pkg_episcina_commit = master + +PACKAGES += eplot +pkg_eplot_name = eplot +pkg_eplot_description = A plot engine written in erlang. +pkg_eplot_homepage = https://github.com/psyeugenic/eplot +pkg_eplot_fetch = git +pkg_eplot_repo = https://github.com/psyeugenic/eplot +pkg_eplot_commit = master + +PACKAGES += epocxy +pkg_epocxy_name = epocxy +pkg_epocxy_description = Erlang Patterns of Concurrency +pkg_epocxy_homepage = https://github.com/duomark/epocxy +pkg_epocxy_fetch = git +pkg_epocxy_repo = https://github.com/duomark/epocxy +pkg_epocxy_commit = master + +PACKAGES += epubnub +pkg_epubnub_name = epubnub +pkg_epubnub_description = Erlang PubNub API +pkg_epubnub_homepage = https://github.com/tsloughter/epubnub +pkg_epubnub_fetch = git +pkg_epubnub_repo = https://github.com/tsloughter/epubnub +pkg_epubnub_commit = master + +PACKAGES += eqm +pkg_eqm_name = eqm +pkg_eqm_description = Erlang pub sub with supply-demand channels +pkg_eqm_homepage = https://github.com/loucash/eqm +pkg_eqm_fetch = git +pkg_eqm_repo = https://github.com/loucash/eqm +pkg_eqm_commit = master + +PACKAGES += eredis +pkg_eredis_name = eredis +pkg_eredis_description = Erlang Redis client +pkg_eredis_homepage = https://github.com/wooga/eredis +pkg_eredis_fetch = git +pkg_eredis_repo = https://github.com/wooga/eredis +pkg_eredis_commit = master + +PACKAGES += eredis_pool +pkg_eredis_pool_name = eredis_pool +pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy. +pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_fetch = git +pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_commit = master + +PACKAGES += erl_streams +pkg_erl_streams_name = erl_streams +pkg_erl_streams_description = Streams in Erlang +pkg_erl_streams_homepage = https://github.com/epappas/erl_streams +pkg_erl_streams_fetch = git +pkg_erl_streams_repo = https://github.com/epappas/erl_streams +pkg_erl_streams_commit = master + +PACKAGES += erlang_cep +pkg_erlang_cep_name = erlang_cep +pkg_erlang_cep_description = A basic CEP package written in erlang +pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_fetch = git +pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_commit = master + +PACKAGES += erlang_js +pkg_erlang_js_name = erlang_js +pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime. +pkg_erlang_js_homepage = https://github.com/basho/erlang_js +pkg_erlang_js_fetch = git +pkg_erlang_js_repo = https://github.com/basho/erlang_js +pkg_erlang_js_commit = master + +PACKAGES += erlang_localtime +pkg_erlang_localtime_name = erlang_localtime +pkg_erlang_localtime_description = Erlang library for conversion from one local time to another +pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_fetch = git +pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_commit = master + +PACKAGES += erlang_smtp +pkg_erlang_smtp_name = erlang_smtp +pkg_erlang_smtp_description = Erlang SMTP and POP3 server code. +pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_fetch = git +pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_commit = master + +PACKAGES += erlang_term +pkg_erlang_term_name = erlang_term +pkg_erlang_term_description = Erlang Term Info +pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term +pkg_erlang_term_fetch = git +pkg_erlang_term_repo = https://github.com/okeuday/erlang_term +pkg_erlang_term_commit = master + +PACKAGES += erlastic_search +pkg_erlastic_search_name = erlastic_search +pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface. +pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_fetch = git +pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_commit = master + +PACKAGES += erlasticsearch +pkg_erlasticsearch_name = erlasticsearch +pkg_erlasticsearch_description = Erlang thrift interface to elastic_search +pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_fetch = git +pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_commit = master + +PACKAGES += erlbrake +pkg_erlbrake_name = erlbrake +pkg_erlbrake_description = Erlang Airbrake notification client +pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake +pkg_erlbrake_fetch = git +pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake +pkg_erlbrake_commit = master + +PACKAGES += erlcloud +pkg_erlcloud_name = erlcloud +pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB) +pkg_erlcloud_homepage = https://github.com/gleber/erlcloud +pkg_erlcloud_fetch = git +pkg_erlcloud_repo = https://github.com/gleber/erlcloud +pkg_erlcloud_commit = master + +PACKAGES += erlcron +pkg_erlcron_name = erlcron +pkg_erlcron_description = Erlang cronish system +pkg_erlcron_homepage = https://github.com/erlware/erlcron +pkg_erlcron_fetch = git +pkg_erlcron_repo = https://github.com/erlware/erlcron +pkg_erlcron_commit = master + +PACKAGES += erldb +pkg_erldb_name = erldb +pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang +pkg_erldb_homepage = http://erldb.org +pkg_erldb_fetch = git +pkg_erldb_repo = https://github.com/erldb/erldb +pkg_erldb_commit = master + +PACKAGES += erldis +pkg_erldis_name = erldis +pkg_erldis_description = redis erlang client library +pkg_erldis_homepage = https://github.com/cstar/erldis +pkg_erldis_fetch = git +pkg_erldis_repo = https://github.com/cstar/erldis +pkg_erldis_commit = master + +PACKAGES += erldns +pkg_erldns_name = erldns +pkg_erldns_description = DNS server, in erlang. +pkg_erldns_homepage = https://github.com/aetrion/erl-dns +pkg_erldns_fetch = git +pkg_erldns_repo = https://github.com/aetrion/erl-dns +pkg_erldns_commit = master + +PACKAGES += erldocker +pkg_erldocker_name = erldocker +pkg_erldocker_description = Docker Remote API client for Erlang +pkg_erldocker_homepage = https://github.com/proger/erldocker +pkg_erldocker_fetch = git +pkg_erldocker_repo = https://github.com/proger/erldocker +pkg_erldocker_commit = master + +PACKAGES += erlfsmon +pkg_erlfsmon_name = erlfsmon +pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX +pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon +pkg_erlfsmon_fetch = git +pkg_erlfsmon_repo = https://github.com/proger/erlfsmon +pkg_erlfsmon_commit = master + +PACKAGES += erlgit +pkg_erlgit_name = erlgit +pkg_erlgit_description = Erlang convenience wrapper around git executable +pkg_erlgit_homepage = https://github.com/gleber/erlgit +pkg_erlgit_fetch = git +pkg_erlgit_repo = https://github.com/gleber/erlgit +pkg_erlgit_commit = master + +PACKAGES += erlguten +pkg_erlguten_name = erlguten +pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang. +pkg_erlguten_homepage = https://github.com/richcarl/erlguten +pkg_erlguten_fetch = git +pkg_erlguten_repo = https://github.com/richcarl/erlguten +pkg_erlguten_commit = master + +PACKAGES += erlmc +pkg_erlmc_name = erlmc +pkg_erlmc_description = Erlang memcached binary protocol client +pkg_erlmc_homepage = https://github.com/jkvor/erlmc +pkg_erlmc_fetch = git +pkg_erlmc_repo = https://github.com/jkvor/erlmc +pkg_erlmc_commit = master + +PACKAGES += erlmongo +pkg_erlmongo_name = erlmongo +pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support +pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_fetch = git +pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_commit = master + +PACKAGES += erlog +pkg_erlog_name = erlog +pkg_erlog_description = Prolog interpreter in and for Erlang +pkg_erlog_homepage = https://github.com/rvirding/erlog +pkg_erlog_fetch = git +pkg_erlog_repo = https://github.com/rvirding/erlog +pkg_erlog_commit = master + +PACKAGES += erlpass +pkg_erlpass_name = erlpass +pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever. +pkg_erlpass_homepage = https://github.com/ferd/erlpass +pkg_erlpass_fetch = git +pkg_erlpass_repo = https://github.com/ferd/erlpass +pkg_erlpass_commit = master + +PACKAGES += erlport +pkg_erlport_name = erlport +pkg_erlport_description = ErlPort - connect Erlang to other languages +pkg_erlport_homepage = https://github.com/hdima/erlport +pkg_erlport_fetch = git +pkg_erlport_repo = https://github.com/hdima/erlport +pkg_erlport_commit = master + +PACKAGES += erlsh +pkg_erlsh_name = erlsh +pkg_erlsh_description = Erlang shell tools +pkg_erlsh_homepage = https://github.com/proger/erlsh +pkg_erlsh_fetch = git +pkg_erlsh_repo = https://github.com/proger/erlsh +pkg_erlsh_commit = master + +PACKAGES += erlsha2 +pkg_erlsha2_name = erlsha2 +pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs. +pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2 +pkg_erlsha2_fetch = git +pkg_erlsha2_repo = https://github.com/vinoski/erlsha2 +pkg_erlsha2_commit = master + +PACKAGES += erlsom +pkg_erlsom_name = erlsom +pkg_erlsom_description = XML parser for Erlang +pkg_erlsom_homepage = https://github.com/willemdj/erlsom +pkg_erlsom_fetch = git +pkg_erlsom_repo = https://github.com/willemdj/erlsom +pkg_erlsom_commit = master + +PACKAGES += erlubi +pkg_erlubi_name = erlubi +pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer) +pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi +pkg_erlubi_fetch = git +pkg_erlubi_repo = https://github.com/krestenkrab/erlubi +pkg_erlubi_commit = master + +PACKAGES += erlvolt +pkg_erlvolt_name = erlvolt +pkg_erlvolt_description = VoltDB Erlang Client Driver +pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_fetch = git +pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_commit = master + +PACKAGES += erlware_commons +pkg_erlware_commons_name = erlware_commons +pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components. +pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons +pkg_erlware_commons_fetch = git +pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons +pkg_erlware_commons_commit = master + +PACKAGES += erlydtl +pkg_erlydtl_name = erlydtl +pkg_erlydtl_description = Django Template Language for Erlang. +pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl +pkg_erlydtl_fetch = git +pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl +pkg_erlydtl_commit = master + +PACKAGES += errd +pkg_errd_name = errd +pkg_errd_description = Erlang RRDTool library +pkg_errd_homepage = https://github.com/archaelus/errd +pkg_errd_fetch = git +pkg_errd_repo = https://github.com/archaelus/errd +pkg_errd_commit = master + +PACKAGES += erserve +pkg_erserve_name = erserve +pkg_erserve_description = Erlang/Rserve communication interface +pkg_erserve_homepage = https://github.com/del/erserve +pkg_erserve_fetch = git +pkg_erserve_repo = https://github.com/del/erserve +pkg_erserve_commit = master + +PACKAGES += erwa +pkg_erwa_name = erwa +pkg_erwa_description = A WAMP router and client written in Erlang. +pkg_erwa_homepage = https://github.com/bwegh/erwa +pkg_erwa_fetch = git +pkg_erwa_repo = https://github.com/bwegh/erwa +pkg_erwa_commit = 0.1.1 + +PACKAGES += espec +pkg_espec_name = espec +pkg_espec_description = ESpec: Behaviour driven development framework for Erlang +pkg_espec_homepage = https://github.com/lucaspiller/espec +pkg_espec_fetch = git +pkg_espec_repo = https://github.com/lucaspiller/espec +pkg_espec_commit = master + +PACKAGES += estatsd +pkg_estatsd_name = estatsd +pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite +pkg_estatsd_homepage = https://github.com/RJ/estatsd +pkg_estatsd_fetch = git +pkg_estatsd_repo = https://github.com/RJ/estatsd +pkg_estatsd_commit = master + +PACKAGES += etap +pkg_etap_name = etap +pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output. +pkg_etap_homepage = https://github.com/ngerakines/etap +pkg_etap_fetch = git +pkg_etap_repo = https://github.com/ngerakines/etap +pkg_etap_commit = master + +PACKAGES += etest +pkg_etest_name = etest +pkg_etest_description = A lightweight, convention over configuration test framework for Erlang +pkg_etest_homepage = https://github.com/wooga/etest +pkg_etest_fetch = git +pkg_etest_repo = https://github.com/wooga/etest +pkg_etest_commit = master + +PACKAGES += etest_http +pkg_etest_http_name = etest_http +pkg_etest_http_description = etest Assertions around HTTP (client-side) +pkg_etest_http_homepage = https://github.com/wooga/etest_http +pkg_etest_http_fetch = git +pkg_etest_http_repo = https://github.com/wooga/etest_http +pkg_etest_http_commit = master + +PACKAGES += etoml +pkg_etoml_name = etoml +pkg_etoml_description = TOML language erlang parser +pkg_etoml_homepage = https://github.com/kalta/etoml +pkg_etoml_fetch = git +pkg_etoml_repo = https://github.com/kalta/etoml +pkg_etoml_commit = master + +PACKAGES += eunit +pkg_eunit_name = eunit +pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. +pkg_eunit_homepage = https://github.com/richcarl/eunit +pkg_eunit_fetch = git +pkg_eunit_repo = https://github.com/richcarl/eunit +pkg_eunit_commit = master + +PACKAGES += eunit_formatters +pkg_eunit_formatters_name = eunit_formatters +pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better. +pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_fetch = git +pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_commit = master + +PACKAGES += euthanasia +pkg_euthanasia_name = euthanasia +pkg_euthanasia_description = Merciful killer for your Erlang processes +pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia +pkg_euthanasia_fetch = git +pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia +pkg_euthanasia_commit = master + +PACKAGES += evum +pkg_evum_name = evum +pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM +pkg_evum_homepage = https://github.com/msantos/evum +pkg_evum_fetch = git +pkg_evum_repo = https://github.com/msantos/evum +pkg_evum_commit = master + +PACKAGES += exec +pkg_exec_name = exec +pkg_exec_description = Execute and control OS processes from Erlang/OTP. +pkg_exec_homepage = http://saleyn.github.com/erlexec +pkg_exec_fetch = git +pkg_exec_repo = https://github.com/saleyn/erlexec +pkg_exec_commit = master + +PACKAGES += exml +pkg_exml_name = exml +pkg_exml_description = XML parsing library in Erlang +pkg_exml_homepage = https://github.com/paulgray/exml +pkg_exml_fetch = git +pkg_exml_repo = https://github.com/paulgray/exml +pkg_exml_commit = master + +PACKAGES += exometer +pkg_exometer_name = exometer +pkg_exometer_description = Basic measurement objects and probe behavior +pkg_exometer_homepage = https://github.com/Feuerlabs/exometer +pkg_exometer_fetch = git +pkg_exometer_repo = https://github.com/Feuerlabs/exometer +pkg_exometer_commit = 1.2 + +PACKAGES += exs1024 +pkg_exs1024_name = exs1024 +pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang. +pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024 +pkg_exs1024_fetch = git +pkg_exs1024_repo = https://github.com/jj1bdx/exs1024 +pkg_exs1024_commit = master + +PACKAGES += exs64 +pkg_exs64_name = exs64 +pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang. +pkg_exs64_homepage = https://github.com/jj1bdx/exs64 +pkg_exs64_fetch = git +pkg_exs64_repo = https://github.com/jj1bdx/exs64 +pkg_exs64_commit = master + +PACKAGES += exsplus116 +pkg_exsplus116_name = exsplus116 +pkg_exsplus116_description = Xorshift116plus for Erlang +pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_fetch = git +pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_commit = master + +PACKAGES += exsplus128 +pkg_exsplus128_name = exsplus128 +pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang. +pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_fetch = git +pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_commit = master + +PACKAGES += ezmq +pkg_ezmq_name = ezmq +pkg_ezmq_description = zMQ implemented in Erlang +pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq +pkg_ezmq_fetch = git +pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq +pkg_ezmq_commit = master + +PACKAGES += ezmtp +pkg_ezmtp_name = ezmtp +pkg_ezmtp_description = ZMTP protocol in pure Erlang. +pkg_ezmtp_homepage = https://github.com/a13x/ezmtp +pkg_ezmtp_fetch = git +pkg_ezmtp_repo = https://github.com/a13x/ezmtp +pkg_ezmtp_commit = master + +PACKAGES += fast_disk_log +pkg_fast_disk_log_name = fast_disk_log +pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger +pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_fetch = git +pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_commit = master + +PACKAGES += feeder +pkg_feeder_name = feeder +pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds. +pkg_feeder_homepage = https://github.com/michaelnisi/feeder +pkg_feeder_fetch = git +pkg_feeder_repo = https://github.com/michaelnisi/feeder +pkg_feeder_commit = v1.4.6 + +PACKAGES += fix +pkg_fix_name = fix +pkg_fix_description = http://fixprotocol.org/ implementation. +pkg_fix_homepage = https://github.com/maxlapshin/fix +pkg_fix_fetch = git +pkg_fix_repo = https://github.com/maxlapshin/fix +pkg_fix_commit = master + +PACKAGES += flower +pkg_flower_name = flower +pkg_flower_description = FlowER - a Erlang OpenFlow development platform +pkg_flower_homepage = https://github.com/travelping/flower +pkg_flower_fetch = git +pkg_flower_repo = https://github.com/travelping/flower +pkg_flower_commit = master + +PACKAGES += fn +pkg_fn_name = fn +pkg_fn_description = Function utilities for Erlang +pkg_fn_homepage = https://github.com/reiddraper/fn +pkg_fn_fetch = git +pkg_fn_repo = https://github.com/reiddraper/fn +pkg_fn_commit = master + +PACKAGES += folsom +pkg_folsom_name = folsom +pkg_folsom_description = Expose Erlang Events and Metrics +pkg_folsom_homepage = https://github.com/boundary/folsom +pkg_folsom_fetch = git +pkg_folsom_repo = https://github.com/boundary/folsom +pkg_folsom_commit = master + +PACKAGES += folsom_cowboy +pkg_folsom_cowboy_name = folsom_cowboy +pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper. +pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_fetch = git +pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_commit = master + +PACKAGES += folsomite +pkg_folsomite_name = folsomite +pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics +pkg_folsomite_homepage = https://github.com/campanja/folsomite +pkg_folsomite_fetch = git +pkg_folsomite_repo = https://github.com/campanja/folsomite +pkg_folsomite_commit = master + +PACKAGES += fs +pkg_fs_name = fs +pkg_fs_description = Erlang FileSystem Listener +pkg_fs_homepage = https://github.com/synrc/fs +pkg_fs_fetch = git +pkg_fs_repo = https://github.com/synrc/fs +pkg_fs_commit = master + +PACKAGES += fuse +pkg_fuse_name = fuse +pkg_fuse_description = A Circuit Breaker for Erlang +pkg_fuse_homepage = https://github.com/jlouis/fuse +pkg_fuse_fetch = git +pkg_fuse_repo = https://github.com/jlouis/fuse +pkg_fuse_commit = master + +PACKAGES += gcm +pkg_gcm_name = gcm +pkg_gcm_description = An Erlang application for Google Cloud Messaging +pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang +pkg_gcm_fetch = git +pkg_gcm_repo = https://github.com/pdincau/gcm-erlang +pkg_gcm_commit = master + +PACKAGES += gcprof +pkg_gcprof_name = gcprof +pkg_gcprof_description = Garbage Collection profiler for Erlang +pkg_gcprof_homepage = https://github.com/knutin/gcprof +pkg_gcprof_fetch = git +pkg_gcprof_repo = https://github.com/knutin/gcprof +pkg_gcprof_commit = master + +PACKAGES += geas +pkg_geas_name = geas +pkg_geas_description = Guess Erlang Application Scattering +pkg_geas_homepage = https://github.com/crownedgrouse/geas +pkg_geas_fetch = git +pkg_geas_repo = https://github.com/crownedgrouse/geas +pkg_geas_commit = master + +PACKAGES += geef +pkg_geef_name = geef +pkg_geef_description = Git NEEEEF (Erlang NIF) +pkg_geef_homepage = https://github.com/carlosmn/geef +pkg_geef_fetch = git +pkg_geef_repo = https://github.com/carlosmn/geef +pkg_geef_commit = master + +PACKAGES += gen_cycle +pkg_gen_cycle_name = gen_cycle +pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks +pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_fetch = git +pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_commit = develop + +PACKAGES += gen_icmp +pkg_gen_icmp_name = gen_icmp +pkg_gen_icmp_description = Erlang interface to ICMP sockets +pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp +pkg_gen_icmp_fetch = git +pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp +pkg_gen_icmp_commit = master + +PACKAGES += gen_nb_server +pkg_gen_nb_server_name = gen_nb_server +pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers +pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_fetch = git +pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_commit = master + +PACKAGES += gen_paxos +pkg_gen_paxos_name = gen_paxos +pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol +pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos +pkg_gen_paxos_fetch = git +pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos +pkg_gen_paxos_commit = master + +PACKAGES += gen_smtp +pkg_gen_smtp_name = gen_smtp +pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules +pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_fetch = git +pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_commit = master + +PACKAGES += gen_tracker +pkg_gen_tracker_name = gen_tracker +pkg_gen_tracker_description = supervisor with ets handling of children and their metadata +pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_fetch = git +pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_commit = master + +PACKAGES += gen_unix +pkg_gen_unix_name = gen_unix +pkg_gen_unix_description = Erlang Unix socket interface +pkg_gen_unix_homepage = https://github.com/msantos/gen_unix +pkg_gen_unix_fetch = git +pkg_gen_unix_repo = https://github.com/msantos/gen_unix +pkg_gen_unix_commit = master + +PACKAGES += getopt +pkg_getopt_name = getopt +pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax +pkg_getopt_homepage = https://github.com/jcomellas/getopt +pkg_getopt_fetch = git +pkg_getopt_repo = https://github.com/jcomellas/getopt +pkg_getopt_commit = master + +PACKAGES += gettext +pkg_gettext_name = gettext +pkg_gettext_description = Erlang internationalization library. +pkg_gettext_homepage = https://github.com/etnt/gettext +pkg_gettext_fetch = git +pkg_gettext_repo = https://github.com/etnt/gettext +pkg_gettext_commit = master + +PACKAGES += giallo +pkg_giallo_name = giallo +pkg_giallo_description = Small and flexible web framework on top of Cowboy +pkg_giallo_homepage = https://github.com/kivra/giallo +pkg_giallo_fetch = git +pkg_giallo_repo = https://github.com/kivra/giallo +pkg_giallo_commit = master + +PACKAGES += gin +pkg_gin_name = gin +pkg_gin_description = The guards and for Erlang parse_transform +pkg_gin_homepage = https://github.com/mad-cocktail/gin +pkg_gin_fetch = git +pkg_gin_repo = https://github.com/mad-cocktail/gin +pkg_gin_commit = master + +PACKAGES += gitty +pkg_gitty_name = gitty +pkg_gitty_description = Git access in erlang +pkg_gitty_homepage = https://github.com/maxlapshin/gitty +pkg_gitty_fetch = git +pkg_gitty_repo = https://github.com/maxlapshin/gitty +pkg_gitty_commit = master + +PACKAGES += gold_fever +pkg_gold_fever_name = gold_fever +pkg_gold_fever_description = A Treasure Hunt for Erlangers +pkg_gold_fever_homepage = https://github.com/inaka/gold_fever +pkg_gold_fever_fetch = git +pkg_gold_fever_repo = https://github.com/inaka/gold_fever +pkg_gold_fever_commit = master + +PACKAGES += gossiperl +pkg_gossiperl_name = gossiperl +pkg_gossiperl_description = Gossip middleware in Erlang +pkg_gossiperl_homepage = http://gossiperl.com/ +pkg_gossiperl_fetch = git +pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl +pkg_gossiperl_commit = master + +PACKAGES += gpb +pkg_gpb_name = gpb +pkg_gpb_description = A Google Protobuf implementation for Erlang +pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_fetch = git +pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_commit = master + +PACKAGES += gproc +pkg_gproc_name = gproc +pkg_gproc_description = Extended process registry for Erlang +pkg_gproc_homepage = https://github.com/uwiger/gproc +pkg_gproc_fetch = git +pkg_gproc_repo = https://github.com/uwiger/gproc +pkg_gproc_commit = master + +PACKAGES += grapherl +pkg_grapherl_name = grapherl +pkg_grapherl_description = Create graphs of Erlang systems and programs +pkg_grapherl_homepage = https://github.com/eproxus/grapherl +pkg_grapherl_fetch = git +pkg_grapherl_repo = https://github.com/eproxus/grapherl +pkg_grapherl_commit = master + +PACKAGES += gun +pkg_gun_name = gun +pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang. +pkg_gun_homepage = http//ninenines.eu +pkg_gun_fetch = git +pkg_gun_repo = https://github.com/ninenines/gun +pkg_gun_commit = master + +PACKAGES += gut +pkg_gut_name = gut +pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman +pkg_gut_homepage = https://github.com/unbalancedparentheses/gut +pkg_gut_fetch = git +pkg_gut_repo = https://github.com/unbalancedparentheses/gut +pkg_gut_commit = master + +PACKAGES += hackney +pkg_hackney_name = hackney +pkg_hackney_description = simple HTTP client in Erlang +pkg_hackney_homepage = https://github.com/benoitc/hackney +pkg_hackney_fetch = git +pkg_hackney_repo = https://github.com/benoitc/hackney +pkg_hackney_commit = master + +PACKAGES += hamcrest +pkg_hamcrest_name = hamcrest +pkg_hamcrest_description = Erlang port of Hamcrest +pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_fetch = git +pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_commit = master + +PACKAGES += hanoidb +pkg_hanoidb_name = hanoidb +pkg_hanoidb_description = Erlang LSM BTree Storage +pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_fetch = git +pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_commit = master + +PACKAGES += hottub +pkg_hottub_name = hottub +pkg_hottub_description = Permanent Erlang Worker Pool +pkg_hottub_homepage = https://github.com/bfrog/hottub +pkg_hottub_fetch = git +pkg_hottub_repo = https://github.com/bfrog/hottub +pkg_hottub_commit = master + +PACKAGES += hpack +pkg_hpack_name = hpack +pkg_hpack_description = HPACK Implementation for Erlang +pkg_hpack_homepage = https://github.com/joedevivo/hpack +pkg_hpack_fetch = git +pkg_hpack_repo = https://github.com/joedevivo/hpack +pkg_hpack_commit = master + +PACKAGES += hyper +pkg_hyper_name = hyper +pkg_hyper_description = Erlang implementation of HyperLogLog +pkg_hyper_homepage = https://github.com/GameAnalytics/hyper +pkg_hyper_fetch = git +pkg_hyper_repo = https://github.com/GameAnalytics/hyper +pkg_hyper_commit = master + +PACKAGES += ibrowse +pkg_ibrowse_name = ibrowse +pkg_ibrowse_description = Erlang HTTP client +pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_fetch = git +pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_commit = v4.1.1 + +PACKAGES += ierlang +pkg_ierlang_name = ierlang +pkg_ierlang_description = An Erlang language kernel for IPython. +pkg_ierlang_homepage = https://github.com/robbielynch/ierlang +pkg_ierlang_fetch = git +pkg_ierlang_repo = https://github.com/robbielynch/ierlang +pkg_ierlang_commit = master + +PACKAGES += iota +pkg_iota_name = iota +pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code +pkg_iota_homepage = https://github.com/jpgneves/iota +pkg_iota_fetch = git +pkg_iota_repo = https://github.com/jpgneves/iota +pkg_iota_commit = master + +PACKAGES += irc_lib +pkg_irc_lib_name = irc_lib +pkg_irc_lib_description = Erlang irc client library +pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_fetch = git +pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_commit = master + +PACKAGES += ircd +pkg_ircd_name = ircd +pkg_ircd_description = A pluggable IRC daemon application/library for Erlang. +pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd +pkg_ircd_fetch = git +pkg_ircd_repo = https://github.com/tonyg/erlang-ircd +pkg_ircd_commit = master + +PACKAGES += iris +pkg_iris_name = iris +pkg_iris_description = Iris Erlang binding +pkg_iris_homepage = https://github.com/project-iris/iris-erl +pkg_iris_fetch = git +pkg_iris_repo = https://github.com/project-iris/iris-erl +pkg_iris_commit = master + +PACKAGES += iso8601 +pkg_iso8601_name = iso8601 +pkg_iso8601_description = Erlang ISO 8601 date formatter/parser +pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_fetch = git +pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_commit = master + +PACKAGES += jamdb_sybase +pkg_jamdb_sybase_name = jamdb_sybase +pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE +pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_fetch = git +pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_commit = 0.6.0 + +PACKAGES += jerg +pkg_jerg_name = jerg +pkg_jerg_description = JSON Schema to Erlang Records Generator +pkg_jerg_homepage = https://github.com/ddossot/jerg +pkg_jerg_fetch = git +pkg_jerg_repo = https://github.com/ddossot/jerg +pkg_jerg_commit = master + +PACKAGES += jesse +pkg_jesse_name = jesse +pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang. +pkg_jesse_homepage = https://github.com/klarna/jesse +pkg_jesse_fetch = git +pkg_jesse_repo = https://github.com/klarna/jesse +pkg_jesse_commit = master + +PACKAGES += jiffy +pkg_jiffy_name = jiffy +pkg_jiffy_description = JSON NIFs for Erlang. +pkg_jiffy_homepage = https://github.com/davisp/jiffy +pkg_jiffy_fetch = git +pkg_jiffy_repo = https://github.com/davisp/jiffy +pkg_jiffy_commit = master + +PACKAGES += jiffy_v +pkg_jiffy_v_name = jiffy_v +pkg_jiffy_v_description = JSON validation utility +pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_fetch = git +pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_commit = 0.3.3 + +PACKAGES += jobs +pkg_jobs_name = jobs +pkg_jobs_description = a Job scheduler for load regulation +pkg_jobs_homepage = https://github.com/esl/jobs +pkg_jobs_fetch = git +pkg_jobs_repo = https://github.com/esl/jobs +pkg_jobs_commit = 0.3 + +PACKAGES += joxa +pkg_joxa_name = joxa +pkg_joxa_description = A Modern Lisp for the Erlang VM +pkg_joxa_homepage = https://github.com/joxa/joxa +pkg_joxa_fetch = git +pkg_joxa_repo = https://github.com/joxa/joxa +pkg_joxa_commit = master + +PACKAGES += json +pkg_json_name = json +pkg_json_description = a high level json library for erlang (17.0+) +pkg_json_homepage = https://github.com/talentdeficit/json +pkg_json_fetch = git +pkg_json_repo = https://github.com/talentdeficit/json +pkg_json_commit = master + +PACKAGES += json_rec +pkg_json_rec_name = json_rec +pkg_json_rec_description = JSON to erlang record +pkg_json_rec_homepage = https://github.com/justinkirby/json_rec +pkg_json_rec_fetch = git +pkg_json_rec_repo = https://github.com/justinkirby/json_rec +pkg_json_rec_commit = master + +PACKAGES += jsonerl +pkg_jsonerl_name = jsonerl +pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder +pkg_jsonerl_homepage = https://github.com/lambder/jsonerl +pkg_jsonerl_fetch = git +pkg_jsonerl_repo = https://github.com/lambder/jsonerl +pkg_jsonerl_commit = master + +PACKAGES += jsonpath +pkg_jsonpath_name = jsonpath +pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation +pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_fetch = git +pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_commit = master + +PACKAGES += jsonx +pkg_jsonx_name = jsonx +pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C. +pkg_jsonx_homepage = https://github.com/iskra/jsonx +pkg_jsonx_fetch = git +pkg_jsonx_repo = https://github.com/iskra/jsonx +pkg_jsonx_commit = master + +PACKAGES += jsx +pkg_jsx_name = jsx +pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON. +pkg_jsx_homepage = https://github.com/talentdeficit/jsx +pkg_jsx_fetch = git +pkg_jsx_repo = https://github.com/talentdeficit/jsx +pkg_jsx_commit = master + +PACKAGES += kafka +pkg_kafka_name = kafka +pkg_kafka_description = Kafka consumer and producer in Erlang +pkg_kafka_homepage = https://github.com/wooga/kafka-erlang +pkg_kafka_fetch = git +pkg_kafka_repo = https://github.com/wooga/kafka-erlang +pkg_kafka_commit = master + +PACKAGES += kai +pkg_kai_name = kai +pkg_kai_description = DHT storage by Takeshi Inoue +pkg_kai_homepage = https://github.com/synrc/kai +pkg_kai_fetch = git +pkg_kai_repo = https://github.com/synrc/kai +pkg_kai_commit = master + +PACKAGES += katja +pkg_katja_name = katja +pkg_katja_description = A simple Riemann client written in Erlang. +pkg_katja_homepage = https://github.com/nifoc/katja +pkg_katja_fetch = git +pkg_katja_repo = https://github.com/nifoc/katja +pkg_katja_commit = master + +PACKAGES += kdht +pkg_kdht_name = kdht +pkg_kdht_description = kdht is an erlang DHT implementation +pkg_kdht_homepage = https://github.com/kevinlynx/kdht +pkg_kdht_fetch = git +pkg_kdht_repo = https://github.com/kevinlynx/kdht +pkg_kdht_commit = master + +PACKAGES += key2value +pkg_key2value_name = key2value +pkg_key2value_description = Erlang 2-way map +pkg_key2value_homepage = https://github.com/okeuday/key2value +pkg_key2value_fetch = git +pkg_key2value_repo = https://github.com/okeuday/key2value +pkg_key2value_commit = master + +PACKAGES += keys1value +pkg_keys1value_name = keys1value +pkg_keys1value_description = Erlang set associative map for key lists +pkg_keys1value_homepage = https://github.com/okeuday/keys1value +pkg_keys1value_fetch = git +pkg_keys1value_repo = https://github.com/okeuday/keys1value +pkg_keys1value_commit = master + +PACKAGES += kinetic +pkg_kinetic_name = kinetic +pkg_kinetic_description = Erlang Kinesis Client +pkg_kinetic_homepage = https://github.com/AdRoll/kinetic +pkg_kinetic_fetch = git +pkg_kinetic_repo = https://github.com/AdRoll/kinetic +pkg_kinetic_commit = master + +PACKAGES += kjell +pkg_kjell_name = kjell +pkg_kjell_description = Erlang Shell +pkg_kjell_homepage = https://github.com/karlll/kjell +pkg_kjell_fetch = git +pkg_kjell_repo = https://github.com/karlll/kjell +pkg_kjell_commit = master + +PACKAGES += kraken +pkg_kraken_name = kraken +pkg_kraken_description = Distributed Pubsub Server for Realtime Apps +pkg_kraken_homepage = https://github.com/Asana/kraken +pkg_kraken_fetch = git +pkg_kraken_repo = https://github.com/Asana/kraken +pkg_kraken_commit = master + +PACKAGES += kucumberl +pkg_kucumberl_name = kucumberl +pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber +pkg_kucumberl_homepage = https://github.com/openshine/kucumberl +pkg_kucumberl_fetch = git +pkg_kucumberl_repo = https://github.com/openshine/kucumberl +pkg_kucumberl_commit = master + +PACKAGES += kvc +pkg_kvc_name = kvc +pkg_kvc_description = KVC - Key Value Coding for Erlang data structures +pkg_kvc_homepage = https://github.com/etrepum/kvc +pkg_kvc_fetch = git +pkg_kvc_repo = https://github.com/etrepum/kvc +pkg_kvc_commit = master + +PACKAGES += kvlists +pkg_kvlists_name = kvlists +pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang +pkg_kvlists_homepage = https://github.com/jcomellas/kvlists +pkg_kvlists_fetch = git +pkg_kvlists_repo = https://github.com/jcomellas/kvlists +pkg_kvlists_commit = master + +PACKAGES += kvs +pkg_kvs_name = kvs +pkg_kvs_description = Container and Iterator +pkg_kvs_homepage = https://github.com/synrc/kvs +pkg_kvs_fetch = git +pkg_kvs_repo = https://github.com/synrc/kvs +pkg_kvs_commit = master + +PACKAGES += lager +pkg_lager_name = lager +pkg_lager_description = A logging framework for Erlang/OTP. +pkg_lager_homepage = https://github.com/basho/lager +pkg_lager_fetch = git +pkg_lager_repo = https://github.com/basho/lager +pkg_lager_commit = master + +PACKAGES += lager_amqp_backend +pkg_lager_amqp_backend_name = lager_amqp_backend +pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend +pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_fetch = git +pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_commit = master + +PACKAGES += lager_syslog +pkg_lager_syslog_name = lager_syslog +pkg_lager_syslog_description = Syslog backend for lager +pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog +pkg_lager_syslog_fetch = git +pkg_lager_syslog_repo = https://github.com/basho/lager_syslog +pkg_lager_syslog_commit = master + +PACKAGES += lambdapad +pkg_lambdapad_name = lambdapad +pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang. +pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad +pkg_lambdapad_fetch = git +pkg_lambdapad_repo = https://github.com/gar1t/lambdapad +pkg_lambdapad_commit = master + +PACKAGES += lasp +pkg_lasp_name = lasp +pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations +pkg_lasp_homepage = http://lasp-lang.org/ +pkg_lasp_fetch = git +pkg_lasp_repo = https://github.com/lasp-lang/lasp +pkg_lasp_commit = master + +PACKAGES += lasse +pkg_lasse_name = lasse +pkg_lasse_description = SSE handler for Cowboy +pkg_lasse_homepage = https://github.com/inaka/lasse +pkg_lasse_fetch = git +pkg_lasse_repo = https://github.com/inaka/lasse +pkg_lasse_commit = 0.1.0 + +PACKAGES += ldap +pkg_ldap_name = ldap +pkg_ldap_description = LDAP server written in Erlang +pkg_ldap_homepage = https://github.com/spawnproc/ldap +pkg_ldap_fetch = git +pkg_ldap_repo = https://github.com/spawnproc/ldap +pkg_ldap_commit = master + +PACKAGES += lethink +pkg_lethink_name = lethink +pkg_lethink_description = erlang driver for rethinkdb +pkg_lethink_homepage = https://github.com/taybin/lethink +pkg_lethink_fetch = git +pkg_lethink_repo = https://github.com/taybin/lethink +pkg_lethink_commit = master + +PACKAGES += lfe +pkg_lfe_name = lfe +pkg_lfe_description = Lisp Flavoured Erlang (LFE) +pkg_lfe_homepage = https://github.com/rvirding/lfe +pkg_lfe_fetch = git +pkg_lfe_repo = https://github.com/rvirding/lfe +pkg_lfe_commit = master + +PACKAGES += ling +pkg_ling_name = ling +pkg_ling_description = Erlang on Xen +pkg_ling_homepage = https://github.com/cloudozer/ling +pkg_ling_fetch = git +pkg_ling_repo = https://github.com/cloudozer/ling +pkg_ling_commit = master + +PACKAGES += live +pkg_live_name = live +pkg_live_description = Automated module and configuration reloader. +pkg_live_homepage = http://ninenines.eu +pkg_live_fetch = git +pkg_live_repo = https://github.com/ninenines/live +pkg_live_commit = master + +PACKAGES += lmq +pkg_lmq_name = lmq +pkg_lmq_description = Lightweight Message Queue +pkg_lmq_homepage = https://github.com/iij/lmq +pkg_lmq_fetch = git +pkg_lmq_repo = https://github.com/iij/lmq +pkg_lmq_commit = master + +PACKAGES += locker +pkg_locker_name = locker +pkg_locker_description = Atomic distributed 'check and set' for short-lived keys +pkg_locker_homepage = https://github.com/wooga/locker +pkg_locker_fetch = git +pkg_locker_repo = https://github.com/wooga/locker +pkg_locker_commit = master + +PACKAGES += locks +pkg_locks_name = locks +pkg_locks_description = A scalable, deadlock-resolving resource locker +pkg_locks_homepage = https://github.com/uwiger/locks +pkg_locks_fetch = git +pkg_locks_repo = https://github.com/uwiger/locks +pkg_locks_commit = master + +PACKAGES += log4erl +pkg_log4erl_name = log4erl +pkg_log4erl_description = A logger for erlang in the spirit of Log4J. +pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl +pkg_log4erl_fetch = git +pkg_log4erl_repo = https://github.com/ahmednawras/log4erl +pkg_log4erl_commit = master + +PACKAGES += lol +pkg_lol_name = lol +pkg_lol_description = Lisp on erLang, and programming is fun again +pkg_lol_homepage = https://github.com/b0oh/lol +pkg_lol_fetch = git +pkg_lol_repo = https://github.com/b0oh/lol +pkg_lol_commit = master + +PACKAGES += lucid +pkg_lucid_name = lucid +pkg_lucid_description = HTTP/2 server written in Erlang +pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid +pkg_lucid_fetch = git +pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid +pkg_lucid_commit = master + +PACKAGES += luerl +pkg_luerl_name = luerl +pkg_luerl_description = Lua in Erlang +pkg_luerl_homepage = https://github.com/rvirding/luerl +pkg_luerl_fetch = git +pkg_luerl_repo = https://github.com/rvirding/luerl +pkg_luerl_commit = develop + +PACKAGES += luwak +pkg_luwak_name = luwak +pkg_luwak_description = Large-object storage interface for Riak +pkg_luwak_homepage = https://github.com/basho/luwak +pkg_luwak_fetch = git +pkg_luwak_repo = https://github.com/basho/luwak +pkg_luwak_commit = master + +PACKAGES += lux +pkg_lux_name = lux +pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands +pkg_lux_homepage = https://github.com/hawk/lux +pkg_lux_fetch = git +pkg_lux_repo = https://github.com/hawk/lux +pkg_lux_commit = master + +PACKAGES += machi +pkg_machi_name = machi +pkg_machi_description = Machi file store +pkg_machi_homepage = https://github.com/basho/machi +pkg_machi_fetch = git +pkg_machi_repo = https://github.com/basho/machi +pkg_machi_commit = master + +PACKAGES += mad +pkg_mad_name = mad +pkg_mad_description = Small and Fast Rebar Replacement +pkg_mad_homepage = https://github.com/synrc/mad +pkg_mad_fetch = git +pkg_mad_repo = https://github.com/synrc/mad +pkg_mad_commit = master + +PACKAGES += marina +pkg_marina_name = marina +pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client +pkg_marina_homepage = https://github.com/lpgauth/marina +pkg_marina_fetch = git +pkg_marina_repo = https://github.com/lpgauth/marina +pkg_marina_commit = master + +PACKAGES += mavg +pkg_mavg_name = mavg +pkg_mavg_description = Erlang :: Exponential moving average library +pkg_mavg_homepage = https://github.com/EchoTeam/mavg +pkg_mavg_fetch = git +pkg_mavg_repo = https://github.com/EchoTeam/mavg +pkg_mavg_commit = master + +PACKAGES += mc_erl +pkg_mc_erl_name = mc_erl +pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. +pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl +pkg_mc_erl_fetch = git +pkg_mc_erl_repo = https://github.com/clonejo/mc-erl +pkg_mc_erl_commit = master + +PACKAGES += mcd +pkg_mcd_name = mcd +pkg_mcd_description = Fast memcached protocol client in pure Erlang +pkg_mcd_homepage = https://github.com/EchoTeam/mcd +pkg_mcd_fetch = git +pkg_mcd_repo = https://github.com/EchoTeam/mcd +pkg_mcd_commit = master + +PACKAGES += mcerlang +pkg_mcerlang_name = mcerlang +pkg_mcerlang_description = The McErlang model checker for Erlang +pkg_mcerlang_homepage = https://github.com/fredlund/McErlang +pkg_mcerlang_fetch = git +pkg_mcerlang_repo = https://github.com/fredlund/McErlang +pkg_mcerlang_commit = master + +PACKAGES += meck +pkg_meck_name = meck +pkg_meck_description = A mocking library for Erlang +pkg_meck_homepage = https://github.com/eproxus/meck +pkg_meck_fetch = git +pkg_meck_repo = https://github.com/eproxus/meck +pkg_meck_commit = master + +PACKAGES += mekao +pkg_mekao_name = mekao +pkg_mekao_description = SQL constructor +pkg_mekao_homepage = https://github.com/ddosia/mekao +pkg_mekao_fetch = git +pkg_mekao_repo = https://github.com/ddosia/mekao +pkg_mekao_commit = master + +PACKAGES += memo +pkg_memo_name = memo +pkg_memo_description = Erlang memoization server +pkg_memo_homepage = https://github.com/tuncer/memo +pkg_memo_fetch = git +pkg_memo_repo = https://github.com/tuncer/memo +pkg_memo_commit = master + +PACKAGES += merge_index +pkg_merge_index_name = merge_index +pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop). +pkg_merge_index_homepage = https://github.com/basho/merge_index +pkg_merge_index_fetch = git +pkg_merge_index_repo = https://github.com/basho/merge_index +pkg_merge_index_commit = master + +PACKAGES += merl +pkg_merl_name = merl +pkg_merl_description = Metaprogramming in Erlang +pkg_merl_homepage = https://github.com/richcarl/merl +pkg_merl_fetch = git +pkg_merl_repo = https://github.com/richcarl/merl +pkg_merl_commit = master + +PACKAGES += mimetypes +pkg_mimetypes_name = mimetypes +pkg_mimetypes_description = Erlang MIME types library +pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes +pkg_mimetypes_fetch = git +pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes +pkg_mimetypes_commit = master + +PACKAGES += mixer +pkg_mixer_name = mixer +pkg_mixer_description = Mix in functions from other modules +pkg_mixer_homepage = https://github.com/chef/mixer +pkg_mixer_fetch = git +pkg_mixer_repo = https://github.com/chef/mixer +pkg_mixer_commit = master + +PACKAGES += mochiweb +pkg_mochiweb_name = mochiweb +pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers. +pkg_mochiweb_homepage = https://github.com/mochi/mochiweb +pkg_mochiweb_fetch = git +pkg_mochiweb_repo = https://github.com/mochi/mochiweb +pkg_mochiweb_commit = master + +PACKAGES += mochiweb_xpath +pkg_mochiweb_xpath_name = mochiweb_xpath +pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser +pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_fetch = git +pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_commit = master + +PACKAGES += mockgyver +pkg_mockgyver_name = mockgyver +pkg_mockgyver_description = A mocking library for Erlang +pkg_mockgyver_homepage = https://github.com/klajo/mockgyver +pkg_mockgyver_fetch = git +pkg_mockgyver_repo = https://github.com/klajo/mockgyver +pkg_mockgyver_commit = master + +PACKAGES += modlib +pkg_modlib_name = modlib +pkg_modlib_description = Web framework based on Erlang's inets httpd +pkg_modlib_homepage = https://github.com/gar1t/modlib +pkg_modlib_fetch = git +pkg_modlib_repo = https://github.com/gar1t/modlib +pkg_modlib_commit = master + +PACKAGES += mongodb +pkg_mongodb_name = mongodb +pkg_mongodb_description = MongoDB driver for Erlang +pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_fetch = git +pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_commit = master + +PACKAGES += mongooseim +pkg_mongooseim_name = mongooseim +pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions +pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform +pkg_mongooseim_fetch = git +pkg_mongooseim_repo = https://github.com/esl/MongooseIM +pkg_mongooseim_commit = master + +PACKAGES += moyo +pkg_moyo_name = moyo +pkg_moyo_description = Erlang utility functions library +pkg_moyo_homepage = https://github.com/dwango/moyo +pkg_moyo_fetch = git +pkg_moyo_repo = https://github.com/dwango/moyo +pkg_moyo_commit = master + +PACKAGES += msgpack +pkg_msgpack_name = msgpack +pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang +pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_fetch = git +pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_commit = master + +PACKAGES += mu2 +pkg_mu2_name = mu2 +pkg_mu2_description = Erlang mutation testing tool +pkg_mu2_homepage = https://github.com/ramsay-t/mu2 +pkg_mu2_fetch = git +pkg_mu2_repo = https://github.com/ramsay-t/mu2 +pkg_mu2_commit = master + +PACKAGES += mustache +pkg_mustache_name = mustache +pkg_mustache_description = Mustache template engine for Erlang. +pkg_mustache_homepage = https://github.com/mojombo/mustache.erl +pkg_mustache_fetch = git +pkg_mustache_repo = https://github.com/mojombo/mustache.erl +pkg_mustache_commit = master + +PACKAGES += myproto +pkg_myproto_name = myproto +pkg_myproto_description = MySQL Server Protocol in Erlang +pkg_myproto_homepage = https://github.com/altenwald/myproto +pkg_myproto_fetch = git +pkg_myproto_repo = https://github.com/altenwald/myproto +pkg_myproto_commit = master + +PACKAGES += mysql +pkg_mysql_name = mysql +pkg_mysql_description = Erlang MySQL Driver (from code.google.com) +pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_fetch = git +pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_commit = master + +PACKAGES += n2o +pkg_n2o_name = n2o +pkg_n2o_description = WebSocket Application Server +pkg_n2o_homepage = https://github.com/5HT/n2o +pkg_n2o_fetch = git +pkg_n2o_repo = https://github.com/5HT/n2o +pkg_n2o_commit = master + +PACKAGES += nat_upnp +pkg_nat_upnp_name = nat_upnp +pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD +pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_fetch = git +pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_commit = master + +PACKAGES += neo4j +pkg_neo4j_name = neo4j +pkg_neo4j_description = Erlang client library for Neo4J. +pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_fetch = git +pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_commit = master + +PACKAGES += neotoma +pkg_neotoma_name = neotoma +pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars. +pkg_neotoma_homepage = https://github.com/seancribbs/neotoma +pkg_neotoma_fetch = git +pkg_neotoma_repo = https://github.com/seancribbs/neotoma +pkg_neotoma_commit = master + +PACKAGES += newrelic +pkg_newrelic_name = newrelic +pkg_newrelic_description = Erlang library for sending metrics to New Relic +pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang +pkg_newrelic_fetch = git +pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang +pkg_newrelic_commit = master + +PACKAGES += nifty +pkg_nifty_name = nifty +pkg_nifty_description = Erlang NIF wrapper generator +pkg_nifty_homepage = https://github.com/parapluu/nifty +pkg_nifty_fetch = git +pkg_nifty_repo = https://github.com/parapluu/nifty +pkg_nifty_commit = master + +PACKAGES += nitrogen_core +pkg_nitrogen_core_name = nitrogen_core +pkg_nitrogen_core_description = The core Nitrogen library. +pkg_nitrogen_core_homepage = http://nitrogenproject.com/ +pkg_nitrogen_core_fetch = git +pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core +pkg_nitrogen_core_commit = master + +PACKAGES += nkbase +pkg_nkbase_name = nkbase +pkg_nkbase_description = NkBASE distributed database +pkg_nkbase_homepage = https://github.com/Nekso/nkbase +pkg_nkbase_fetch = git +pkg_nkbase_repo = https://github.com/Nekso/nkbase +pkg_nkbase_commit = develop + +PACKAGES += nkdocker +pkg_nkdocker_name = nkdocker +pkg_nkdocker_description = Erlang Docker client +pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker +pkg_nkdocker_fetch = git +pkg_nkdocker_repo = https://github.com/Nekso/nkdocker +pkg_nkdocker_commit = master + +PACKAGES += nkpacket +pkg_nkpacket_name = nkpacket +pkg_nkpacket_description = Generic Erlang transport layer +pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket +pkg_nkpacket_fetch = git +pkg_nkpacket_repo = https://github.com/Nekso/nkpacket +pkg_nkpacket_commit = master + +PACKAGES += nksip +pkg_nksip_name = nksip +pkg_nksip_description = Erlang SIP application server +pkg_nksip_homepage = https://github.com/kalta/nksip +pkg_nksip_fetch = git +pkg_nksip_repo = https://github.com/kalta/nksip +pkg_nksip_commit = master + +PACKAGES += nodefinder +pkg_nodefinder_name = nodefinder +pkg_nodefinder_description = automatic node discovery via UDP multicast +pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder +pkg_nodefinder_fetch = git +pkg_nodefinder_repo = https://github.com/okeuday/nodefinder +pkg_nodefinder_commit = master + +PACKAGES += nprocreg +pkg_nprocreg_name = nprocreg +pkg_nprocreg_description = Minimal Distributed Erlang Process Registry +pkg_nprocreg_homepage = http://nitrogenproject.com/ +pkg_nprocreg_fetch = git +pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg +pkg_nprocreg_commit = master + +PACKAGES += oauth +pkg_oauth_name = oauth +pkg_oauth_description = An Erlang OAuth 1.0 implementation +pkg_oauth_homepage = https://github.com/tim/erlang-oauth +pkg_oauth_fetch = git +pkg_oauth_repo = https://github.com/tim/erlang-oauth +pkg_oauth_commit = master + +PACKAGES += oauth2 +pkg_oauth2_name = oauth2 +pkg_oauth2_description = Erlang Oauth2 implementation +pkg_oauth2_homepage = https://github.com/kivra/oauth2 +pkg_oauth2_fetch = git +pkg_oauth2_repo = https://github.com/kivra/oauth2 +pkg_oauth2_commit = master + +PACKAGES += oauth2c +pkg_oauth2c_name = oauth2c +pkg_oauth2c_description = Erlang OAuth2 Client +pkg_oauth2c_homepage = https://github.com/kivra/oauth2_client +pkg_oauth2c_fetch = git +pkg_oauth2c_repo = https://github.com/kivra/oauth2_client +pkg_oauth2c_commit = master + +PACKAGES += octopus +pkg_octopus_name = octopus +pkg_octopus_description = Small and flexible pool manager written in Erlang +pkg_octopus_homepage = https://github.com/erlangbureau/octopus +pkg_octopus_fetch = git +pkg_octopus_repo = https://github.com/erlangbureau/octopus +pkg_octopus_commit = 1.0.0 + +PACKAGES += of_protocol +pkg_of_protocol_name = of_protocol +pkg_of_protocol_description = OpenFlow Protocol Library for Erlang +pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_fetch = git +pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_commit = master + +PACKAGES += opencouch +pkg_opencouch_name = couch +pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB +pkg_opencouch_homepage = https://github.com/benoitc/opencouch +pkg_opencouch_fetch = git +pkg_opencouch_repo = https://github.com/benoitc/opencouch +pkg_opencouch_commit = master + +PACKAGES += openflow +pkg_openflow_name = openflow +pkg_openflow_description = An OpenFlow controller written in pure erlang +pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_fetch = git +pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_commit = master + +PACKAGES += openid +pkg_openid_name = openid +pkg_openid_description = Erlang OpenID +pkg_openid_homepage = https://github.com/brendonh/erl_openid +pkg_openid_fetch = git +pkg_openid_repo = https://github.com/brendonh/erl_openid +pkg_openid_commit = master + +PACKAGES += openpoker +pkg_openpoker_name = openpoker +pkg_openpoker_description = Genesis Texas hold'em Game Server +pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker +pkg_openpoker_fetch = git +pkg_openpoker_repo = https://github.com/hpyhacking/openpoker +pkg_openpoker_commit = master + +PACKAGES += pal +pkg_pal_name = pal +pkg_pal_description = Pragmatic Authentication Library +pkg_pal_homepage = https://github.com/manifest/pal +pkg_pal_fetch = git +pkg_pal_repo = https://github.com/manifest/pal +pkg_pal_commit = master + +PACKAGES += parse_trans +pkg_parse_trans_name = parse_trans +pkg_parse_trans_description = Parse transform utilities for Erlang +pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans +pkg_parse_trans_fetch = git +pkg_parse_trans_repo = https://github.com/uwiger/parse_trans +pkg_parse_trans_commit = master + +PACKAGES += parsexml +pkg_parsexml_name = parsexml +pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API +pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml +pkg_parsexml_fetch = git +pkg_parsexml_repo = https://github.com/maxlapshin/parsexml +pkg_parsexml_commit = master + +PACKAGES += pegjs +pkg_pegjs_name = pegjs +pkg_pegjs_description = An implementation of PEG.js grammar for Erlang. +pkg_pegjs_homepage = https://github.com/dmitriid/pegjs +pkg_pegjs_fetch = git +pkg_pegjs_repo = https://github.com/dmitriid/pegjs +pkg_pegjs_commit = 0.3 + +PACKAGES += percept2 +pkg_percept2_name = percept2 +pkg_percept2_description = Concurrent profiling tool for Erlang +pkg_percept2_homepage = https://github.com/huiqing/percept2 +pkg_percept2_fetch = git +pkg_percept2_repo = https://github.com/huiqing/percept2 +pkg_percept2_commit = master + +PACKAGES += pgsql +pkg_pgsql_name = pgsql +pkg_pgsql_description = Erlang PostgreSQL driver +pkg_pgsql_homepage = https://github.com/semiocast/pgsql +pkg_pgsql_fetch = git +pkg_pgsql_repo = https://github.com/semiocast/pgsql +pkg_pgsql_commit = master + +PACKAGES += pkgx +pkg_pkgx_name = pkgx +pkg_pkgx_description = Build .deb packages from Erlang releases +pkg_pkgx_homepage = https://github.com/arjan/pkgx +pkg_pkgx_fetch = git +pkg_pkgx_repo = https://github.com/arjan/pkgx +pkg_pkgx_commit = master + +PACKAGES += pkt +pkg_pkt_name = pkt +pkg_pkt_description = Erlang network protocol library +pkg_pkt_homepage = https://github.com/msantos/pkt +pkg_pkt_fetch = git +pkg_pkt_repo = https://github.com/msantos/pkt +pkg_pkt_commit = master + +PACKAGES += plain_fsm +pkg_plain_fsm_name = plain_fsm +pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs. +pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_fetch = git +pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_commit = master + +PACKAGES += plumtree +pkg_plumtree_name = plumtree +pkg_plumtree_description = Epidemic Broadcast Trees +pkg_plumtree_homepage = https://github.com/helium/plumtree +pkg_plumtree_fetch = git +pkg_plumtree_repo = https://github.com/helium/plumtree +pkg_plumtree_commit = master + +PACKAGES += pmod_transform +pkg_pmod_transform_name = pmod_transform +pkg_pmod_transform_description = Parse transform for parameterized modules +pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform +pkg_pmod_transform_fetch = git +pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform +pkg_pmod_transform_commit = master + +PACKAGES += pobox +pkg_pobox_name = pobox +pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang +pkg_pobox_homepage = https://github.com/ferd/pobox +pkg_pobox_fetch = git +pkg_pobox_repo = https://github.com/ferd/pobox +pkg_pobox_commit = master + +PACKAGES += ponos +pkg_ponos_name = ponos +pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang +pkg_ponos_homepage = https://github.com/klarna/ponos +pkg_ponos_fetch = git +pkg_ponos_repo = https://github.com/klarna/ponos +pkg_ponos_commit = master + +PACKAGES += poolboy +pkg_poolboy_name = poolboy +pkg_poolboy_description = A hunky Erlang worker pool factory +pkg_poolboy_homepage = https://github.com/devinus/poolboy +pkg_poolboy_fetch = git +pkg_poolboy_repo = https://github.com/devinus/poolboy +pkg_poolboy_commit = master + +PACKAGES += pooler +pkg_pooler_name = pooler +pkg_pooler_description = An OTP Process Pool Application +pkg_pooler_homepage = https://github.com/seth/pooler +pkg_pooler_fetch = git +pkg_pooler_repo = https://github.com/seth/pooler +pkg_pooler_commit = master + +PACKAGES += pqueue +pkg_pqueue_name = pqueue +pkg_pqueue_description = Erlang Priority Queues +pkg_pqueue_homepage = https://github.com/okeuday/pqueue +pkg_pqueue_fetch = git +pkg_pqueue_repo = https://github.com/okeuday/pqueue +pkg_pqueue_commit = master + +PACKAGES += procket +pkg_procket_name = procket +pkg_procket_description = Erlang interface to low level socket operations +pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket +pkg_procket_fetch = git +pkg_procket_repo = https://github.com/msantos/procket +pkg_procket_commit = master + +PACKAGES += prop +pkg_prop_name = prop +pkg_prop_description = An Erlang code scaffolding and generator system. +pkg_prop_homepage = https://github.com/nuex/prop +pkg_prop_fetch = git +pkg_prop_repo = https://github.com/nuex/prop +pkg_prop_commit = master + +PACKAGES += proper +pkg_proper_name = proper +pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang. +pkg_proper_homepage = http://proper.softlab.ntua.gr +pkg_proper_fetch = git +pkg_proper_repo = https://github.com/manopapad/proper +pkg_proper_commit = master + +PACKAGES += props +pkg_props_name = props +pkg_props_description = Property structure library +pkg_props_homepage = https://github.com/greyarea/props +pkg_props_fetch = git +pkg_props_repo = https://github.com/greyarea/props +pkg_props_commit = master + +PACKAGES += protobuffs +pkg_protobuffs_name = protobuffs +pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs. +pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_fetch = git +pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_commit = master + +PACKAGES += psycho +pkg_psycho_name = psycho +pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware. +pkg_psycho_homepage = https://github.com/gar1t/psycho +pkg_psycho_fetch = git +pkg_psycho_repo = https://github.com/gar1t/psycho +pkg_psycho_commit = master + +PACKAGES += purity +pkg_purity_name = purity +pkg_purity_description = A side-effect analyzer for Erlang +pkg_purity_homepage = https://github.com/mpitid/purity +pkg_purity_fetch = git +pkg_purity_repo = https://github.com/mpitid/purity +pkg_purity_commit = master + +PACKAGES += push_service +pkg_push_service_name = push_service +pkg_push_service_description = Push service +pkg_push_service_homepage = https://github.com/hairyhum/push_service +pkg_push_service_fetch = git +pkg_push_service_repo = https://github.com/hairyhum/push_service +pkg_push_service_commit = master + +PACKAGES += qdate +pkg_qdate_name = qdate +pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang. +pkg_qdate_homepage = https://github.com/choptastic/qdate +pkg_qdate_fetch = git +pkg_qdate_repo = https://github.com/choptastic/qdate +pkg_qdate_commit = 0.4.0 + +PACKAGES += qrcode +pkg_qrcode_name = qrcode +pkg_qrcode_description = QR Code encoder in Erlang +pkg_qrcode_homepage = https://github.com/komone/qrcode +pkg_qrcode_fetch = git +pkg_qrcode_repo = https://github.com/komone/qrcode +pkg_qrcode_commit = master + +PACKAGES += quest +pkg_quest_name = quest +pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang. +pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest +pkg_quest_fetch = git +pkg_quest_repo = https://github.com/eriksoe/ErlangQuest +pkg_quest_commit = master + +PACKAGES += quickrand +pkg_quickrand_name = quickrand +pkg_quickrand_description = Quick Erlang Random Number Generation +pkg_quickrand_homepage = https://github.com/okeuday/quickrand +pkg_quickrand_fetch = git +pkg_quickrand_repo = https://github.com/okeuday/quickrand +pkg_quickrand_commit = master + +PACKAGES += rabbit +pkg_rabbit_name = rabbit +pkg_rabbit_description = RabbitMQ Server +pkg_rabbit_homepage = https://www.rabbitmq.com/ +pkg_rabbit_fetch = git +pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git +pkg_rabbit_commit = master + +PACKAGES += rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak +pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_fetch = git +pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_commit = master + +PACKAGES += rack +pkg_rack_name = rack +pkg_rack_description = Rack handler for erlang +pkg_rack_homepage = https://github.com/erlyvideo/rack +pkg_rack_fetch = git +pkg_rack_repo = https://github.com/erlyvideo/rack +pkg_rack_commit = master + +PACKAGES += radierl +pkg_radierl_name = radierl +pkg_radierl_description = RADIUS protocol stack implemented in Erlang. +pkg_radierl_homepage = https://github.com/vances/radierl +pkg_radierl_fetch = git +pkg_radierl_repo = https://github.com/vances/radierl +pkg_radierl_commit = master + +PACKAGES += rafter +pkg_rafter_name = rafter +pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol +pkg_rafter_homepage = https://github.com/andrewjstone/rafter +pkg_rafter_fetch = git +pkg_rafter_repo = https://github.com/andrewjstone/rafter +pkg_rafter_commit = master + +PACKAGES += ranch +pkg_ranch_name = ranch +pkg_ranch_description = Socket acceptor pool for TCP protocols. +pkg_ranch_homepage = http://ninenines.eu +pkg_ranch_fetch = git +pkg_ranch_repo = https://github.com/ninenines/ranch +pkg_ranch_commit = 1.1.0 + +PACKAGES += rbeacon +pkg_rbeacon_name = rbeacon +pkg_rbeacon_description = LAN discovery and presence in Erlang. +pkg_rbeacon_homepage = https://github.com/refuge/rbeacon +pkg_rbeacon_fetch = git +pkg_rbeacon_repo = https://github.com/refuge/rbeacon +pkg_rbeacon_commit = master + +PACKAGES += rebar +pkg_rebar_name = rebar +pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases. +pkg_rebar_homepage = http://www.rebar3.org +pkg_rebar_fetch = git +pkg_rebar_repo = https://github.com/rebar/rebar3 +pkg_rebar_commit = master + +PACKAGES += rebus +pkg_rebus_name = rebus +pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang. +pkg_rebus_homepage = https://github.com/olle/rebus +pkg_rebus_fetch = git +pkg_rebus_repo = https://github.com/olle/rebus +pkg_rebus_commit = master + +PACKAGES += rec2json +pkg_rec2json_name = rec2json +pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily. +pkg_rec2json_homepage = https://github.com/lordnull/rec2json +pkg_rec2json_fetch = git +pkg_rec2json_repo = https://github.com/lordnull/rec2json +pkg_rec2json_commit = master + +PACKAGES += recon +pkg_recon_name = recon +pkg_recon_description = Collection of functions and scripts to debug Erlang in production. +pkg_recon_homepage = https://github.com/ferd/recon +pkg_recon_fetch = git +pkg_recon_repo = https://github.com/ferd/recon +pkg_recon_commit = 2.2.1 + +PACKAGES += record_info +pkg_record_info_name = record_info +pkg_record_info_description = Convert between record and proplist +pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info +pkg_record_info_fetch = git +pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info +pkg_record_info_commit = master + +PACKAGES += redgrid +pkg_redgrid_name = redgrid +pkg_redgrid_description = automatic Erlang node discovery via redis +pkg_redgrid_homepage = https://github.com/jkvor/redgrid +pkg_redgrid_fetch = git +pkg_redgrid_repo = https://github.com/jkvor/redgrid +pkg_redgrid_commit = master + +PACKAGES += redo +pkg_redo_name = redo +pkg_redo_description = pipelined erlang redis client +pkg_redo_homepage = https://github.com/jkvor/redo +pkg_redo_fetch = git +pkg_redo_repo = https://github.com/jkvor/redo +pkg_redo_commit = master + +PACKAGES += reload_mk +pkg_reload_mk_name = reload_mk +pkg_reload_mk_description = Live reload plugin for erlang.mk. +pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk +pkg_reload_mk_fetch = git +pkg_reload_mk_repo = https://github.com/bullno1/reload.mk +pkg_reload_mk_commit = master + +PACKAGES += reltool_util +pkg_reltool_util_name = reltool_util +pkg_reltool_util_description = Erlang reltool utility functionality application +pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util +pkg_reltool_util_fetch = git +pkg_reltool_util_repo = https://github.com/okeuday/reltool_util +pkg_reltool_util_commit = master + +PACKAGES += relx +pkg_relx_name = relx +pkg_relx_description = Sane, simple release creation for Erlang +pkg_relx_homepage = https://github.com/erlware/relx +pkg_relx_fetch = git +pkg_relx_repo = https://github.com/erlware/relx +pkg_relx_commit = master + +PACKAGES += resource_discovery +pkg_resource_discovery_name = resource_discovery +pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster. +pkg_resource_discovery_homepage = http://erlware.org/ +pkg_resource_discovery_fetch = git +pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery +pkg_resource_discovery_commit = master + +PACKAGES += restc +pkg_restc_name = restc +pkg_restc_description = Erlang Rest Client +pkg_restc_homepage = https://github.com/kivra/restclient +pkg_restc_fetch = git +pkg_restc_repo = https://github.com/kivra/restclient +pkg_restc_commit = master + +PACKAGES += rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation. +pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_fetch = git +pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_commit = master + +PACKAGES += riak_control +pkg_riak_control_name = riak_control +pkg_riak_control_description = Webmachine-based administration interface for Riak. +pkg_riak_control_homepage = https://github.com/basho/riak_control +pkg_riak_control_fetch = git +pkg_riak_control_repo = https://github.com/basho/riak_control +pkg_riak_control_commit = master + +PACKAGES += riak_core +pkg_riak_core_name = riak_core +pkg_riak_core_description = Distributed systems infrastructure used by Riak. +pkg_riak_core_homepage = https://github.com/basho/riak_core +pkg_riak_core_fetch = git +pkg_riak_core_repo = https://github.com/basho/riak_core +pkg_riak_core_commit = master + +PACKAGES += riak_dt +pkg_riak_dt_name = riak_dt +pkg_riak_dt_description = Convergent replicated datatypes in Erlang +pkg_riak_dt_homepage = https://github.com/basho/riak_dt +pkg_riak_dt_fetch = git +pkg_riak_dt_repo = https://github.com/basho/riak_dt +pkg_riak_dt_commit = master + +PACKAGES += riak_ensemble +pkg_riak_ensemble_name = riak_ensemble +pkg_riak_ensemble_description = Multi-Paxos framework in Erlang +pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_fetch = git +pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_commit = master + +PACKAGES += riak_kv +pkg_riak_kv_name = riak_kv +pkg_riak_kv_description = Riak Key/Value Store +pkg_riak_kv_homepage = https://github.com/basho/riak_kv +pkg_riak_kv_fetch = git +pkg_riak_kv_repo = https://github.com/basho/riak_kv +pkg_riak_kv_commit = master + +PACKAGES += riak_pg +pkg_riak_pg_name = riak_pg +pkg_riak_pg_description = Distributed process groups with riak_core. +pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_fetch = git +pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_commit = master + +PACKAGES += riak_pipe +pkg_riak_pipe_name = riak_pipe +pkg_riak_pipe_description = Riak Pipelines +pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe +pkg_riak_pipe_fetch = git +pkg_riak_pipe_repo = https://github.com/basho/riak_pipe +pkg_riak_pipe_commit = master + +PACKAGES += riak_sysmon +pkg_riak_sysmon_name = riak_sysmon +pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages +pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_fetch = git +pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_commit = master + +PACKAGES += riak_test +pkg_riak_test_name = riak_test +pkg_riak_test_description = I'm in your cluster, testing your riaks +pkg_riak_test_homepage = https://github.com/basho/riak_test +pkg_riak_test_fetch = git +pkg_riak_test_repo = https://github.com/basho/riak_test +pkg_riak_test_commit = master + +PACKAGES += riakc +pkg_riakc_name = riakc +pkg_riakc_description = Erlang clients for Riak. +pkg_riakc_homepage = https://github.com/basho/riak-erlang-client +pkg_riakc_fetch = git +pkg_riakc_repo = https://github.com/basho/riak-erlang-client +pkg_riakc_commit = master + +PACKAGES += riakhttpc +pkg_riakhttpc_name = riakhttpc +pkg_riakhttpc_description = Riak Erlang client using the HTTP interface +pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_fetch = git +pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_commit = master + +PACKAGES += riaknostic +pkg_riaknostic_name = riaknostic +pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap +pkg_riaknostic_homepage = https://github.com/basho/riaknostic +pkg_riaknostic_fetch = git +pkg_riaknostic_repo = https://github.com/basho/riaknostic +pkg_riaknostic_commit = master + +PACKAGES += riakpool +pkg_riakpool_name = riakpool +pkg_riakpool_description = erlang riak client pool +pkg_riakpool_homepage = https://github.com/dweldon/riakpool +pkg_riakpool_fetch = git +pkg_riakpool_repo = https://github.com/dweldon/riakpool +pkg_riakpool_commit = master + +PACKAGES += rivus_cep +pkg_rivus_cep_name = rivus_cep +pkg_rivus_cep_description = Complex event processing in Erlang +pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_fetch = git +pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_commit = master + +PACKAGES += rlimit +pkg_rlimit_name = rlimit +pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent +pkg_rlimit_homepage = https://github.com/jlouis/rlimit +pkg_rlimit_fetch = git +pkg_rlimit_repo = https://github.com/jlouis/rlimit +pkg_rlimit_commit = master + +PACKAGES += safetyvalve +pkg_safetyvalve_name = safetyvalve +pkg_safetyvalve_description = A safety valve for your erlang node +pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_fetch = git +pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_commit = master + +PACKAGES += seestar +pkg_seestar_name = seestar +pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol +pkg_seestar_homepage = https://github.com/iamaleksey/seestar +pkg_seestar_fetch = git +pkg_seestar_repo = https://github.com/iamaleksey/seestar +pkg_seestar_commit = master + +PACKAGES += service +pkg_service_name = service +pkg_service_description = A minimal Erlang behavior for creating CloudI internal services +pkg_service_homepage = http://cloudi.org/ +pkg_service_fetch = git +pkg_service_repo = https://github.com/CloudI/service +pkg_service_commit = master + +PACKAGES += setup +pkg_setup_name = setup +pkg_setup_description = Generic setup utility for Erlang-based systems +pkg_setup_homepage = https://github.com/uwiger/setup +pkg_setup_fetch = git +pkg_setup_repo = https://github.com/uwiger/setup +pkg_setup_commit = master + +PACKAGES += sext +pkg_sext_name = sext +pkg_sext_description = Sortable Erlang Term Serialization +pkg_sext_homepage = https://github.com/uwiger/sext +pkg_sext_fetch = git +pkg_sext_repo = https://github.com/uwiger/sext +pkg_sext_commit = master + +PACKAGES += sfmt +pkg_sfmt_name = sfmt +pkg_sfmt_description = SFMT pseudo random number generator for Erlang. +pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_fetch = git +pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_commit = master + +PACKAGES += sgte +pkg_sgte_name = sgte +pkg_sgte_description = A simple Erlang Template Engine +pkg_sgte_homepage = https://github.com/filippo/sgte +pkg_sgte_fetch = git +pkg_sgte_repo = https://github.com/filippo/sgte +pkg_sgte_commit = master + +PACKAGES += sheriff +pkg_sheriff_name = sheriff +pkg_sheriff_description = Parse transform for type based validation. +pkg_sheriff_homepage = http://ninenines.eu +pkg_sheriff_fetch = git +pkg_sheriff_repo = https://github.com/extend/sheriff +pkg_sheriff_commit = master + +PACKAGES += shotgun +pkg_shotgun_name = shotgun +pkg_shotgun_description = better than just a gun +pkg_shotgun_homepage = https://github.com/inaka/shotgun +pkg_shotgun_fetch = git +pkg_shotgun_repo = https://github.com/inaka/shotgun +pkg_shotgun_commit = 0.1.0 + +PACKAGES += sidejob +pkg_sidejob_name = sidejob +pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang +pkg_sidejob_homepage = https://github.com/basho/sidejob +pkg_sidejob_fetch = git +pkg_sidejob_repo = https://github.com/basho/sidejob +pkg_sidejob_commit = master + +PACKAGES += sieve +pkg_sieve_name = sieve +pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang +pkg_sieve_homepage = https://github.com/benoitc/sieve +pkg_sieve_fetch = git +pkg_sieve_repo = https://github.com/benoitc/sieve +pkg_sieve_commit = master + +PACKAGES += sighandler +pkg_sighandler_name = sighandler +pkg_sighandler_description = Handle UNIX signals in Er lang +pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler +pkg_sighandler_fetch = git +pkg_sighandler_repo = https://github.com/jkingsbery/sighandler +pkg_sighandler_commit = master + +PACKAGES += simhash +pkg_simhash_name = simhash +pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data. +pkg_simhash_homepage = https://github.com/ferd/simhash +pkg_simhash_fetch = git +pkg_simhash_repo = https://github.com/ferd/simhash +pkg_simhash_commit = master + +PACKAGES += simple_bridge +pkg_simple_bridge_name = simple_bridge +pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers. +pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_fetch = git +pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_commit = master + +PACKAGES += simple_oauth2 +pkg_simple_oauth2_name = simple_oauth2 +pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured) +pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_fetch = git +pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_commit = master + +PACKAGES += skel +pkg_skel_name = skel +pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang +pkg_skel_homepage = https://github.com/ParaPhrase/skel +pkg_skel_fetch = git +pkg_skel_repo = https://github.com/ParaPhrase/skel +pkg_skel_commit = master + +PACKAGES += smother +pkg_smother_name = smother +pkg_smother_description = Extended code coverage metrics for Erlang. +pkg_smother_homepage = https://ramsay-t.github.io/Smother/ +pkg_smother_fetch = git +pkg_smother_repo = https://github.com/ramsay-t/Smother +pkg_smother_commit = master + +PACKAGES += social +pkg_social_name = social +pkg_social_description = Cowboy handler for social login via OAuth2 providers +pkg_social_homepage = https://github.com/dvv/social +pkg_social_fetch = git +pkg_social_repo = https://github.com/dvv/social +pkg_social_commit = master + +PACKAGES += spapi_router +pkg_spapi_router_name = spapi_router +pkg_spapi_router_description = Partially-connected Erlang clustering +pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router +pkg_spapi_router_fetch = git +pkg_spapi_router_repo = https://github.com/spilgames/spapi-router +pkg_spapi_router_commit = master + +PACKAGES += sqerl +pkg_sqerl_name = sqerl +pkg_sqerl_description = An Erlang-flavoured SQL DSL +pkg_sqerl_homepage = https://github.com/hairyhum/sqerl +pkg_sqerl_fetch = git +pkg_sqerl_repo = https://github.com/hairyhum/sqerl +pkg_sqerl_commit = master + +PACKAGES += srly +pkg_srly_name = srly +pkg_srly_description = Native Erlang Unix serial interface +pkg_srly_homepage = https://github.com/msantos/srly +pkg_srly_fetch = git +pkg_srly_repo = https://github.com/msantos/srly +pkg_srly_commit = master + +PACKAGES += sshrpc +pkg_sshrpc_name = sshrpc +pkg_sshrpc_description = Erlang SSH RPC module (experimental) +pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_fetch = git +pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_commit = master + +PACKAGES += stable +pkg_stable_name = stable +pkg_stable_description = Library of assorted helpers for Cowboy web server. +pkg_stable_homepage = https://github.com/dvv/stable +pkg_stable_fetch = git +pkg_stable_repo = https://github.com/dvv/stable +pkg_stable_commit = master + +PACKAGES += statebox +pkg_statebox_name = statebox +pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak. +pkg_statebox_homepage = https://github.com/mochi/statebox +pkg_statebox_fetch = git +pkg_statebox_repo = https://github.com/mochi/statebox +pkg_statebox_commit = master + +PACKAGES += statebox_riak +pkg_statebox_riak_name = statebox_riak +pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media. +pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak +pkg_statebox_riak_fetch = git +pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak +pkg_statebox_riak_commit = master + +PACKAGES += statman +pkg_statman_name = statman +pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM +pkg_statman_homepage = https://github.com/knutin/statman +pkg_statman_fetch = git +pkg_statman_repo = https://github.com/knutin/statman +pkg_statman_commit = master + +PACKAGES += statsderl +pkg_statsderl_name = statsderl +pkg_statsderl_description = StatsD client (erlang) +pkg_statsderl_homepage = https://github.com/lpgauth/statsderl +pkg_statsderl_fetch = git +pkg_statsderl_repo = https://github.com/lpgauth/statsderl +pkg_statsderl_commit = master + +PACKAGES += stdinout_pool +pkg_stdinout_pool_name = stdinout_pool +pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication. +pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_fetch = git +pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_commit = master + +PACKAGES += stockdb +pkg_stockdb_name = stockdb +pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang +pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb +pkg_stockdb_fetch = git +pkg_stockdb_repo = https://github.com/maxlapshin/stockdb +pkg_stockdb_commit = master + +PACKAGES += stripe +pkg_stripe_name = stripe +pkg_stripe_description = Erlang interface to the stripe.com API +pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang +pkg_stripe_fetch = git +pkg_stripe_repo = https://github.com/mattsta/stripe-erlang +pkg_stripe_commit = v1 + +PACKAGES += surrogate +pkg_surrogate_name = surrogate +pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes. +pkg_surrogate_homepage = https://github.com/skruger/Surrogate +pkg_surrogate_fetch = git +pkg_surrogate_repo = https://github.com/skruger/Surrogate +pkg_surrogate_commit = master + +PACKAGES += swab +pkg_swab_name = swab +pkg_swab_description = General purpose buffer handling module +pkg_swab_homepage = https://github.com/crownedgrouse/swab +pkg_swab_fetch = git +pkg_swab_repo = https://github.com/crownedgrouse/swab +pkg_swab_commit = master + +PACKAGES += swarm +pkg_swarm_name = swarm +pkg_swarm_description = Fast and simple acceptor pool for Erlang +pkg_swarm_homepage = https://github.com/jeremey/swarm +pkg_swarm_fetch = git +pkg_swarm_repo = https://github.com/jeremey/swarm +pkg_swarm_commit = master + +PACKAGES += switchboard +pkg_switchboard_name = switchboard +pkg_switchboard_description = A framework for processing email using worker plugins. +pkg_switchboard_homepage = https://github.com/thusfresh/switchboard +pkg_switchboard_fetch = git +pkg_switchboard_repo = https://github.com/thusfresh/switchboard +pkg_switchboard_commit = master + +PACKAGES += syn +pkg_syn_name = syn +pkg_syn_description = A global process registry for Erlang. +pkg_syn_homepage = https://github.com/ostinelli/syn +pkg_syn_fetch = git +pkg_syn_repo = https://github.com/ostinelli/syn +pkg_syn_commit = master + +PACKAGES += sync +pkg_sync_name = sync +pkg_sync_description = On-the-fly recompiling and reloading in Erlang. +pkg_sync_homepage = https://github.com/rustyio/sync +pkg_sync_fetch = git +pkg_sync_repo = https://github.com/rustyio/sync +pkg_sync_commit = master + +PACKAGES += syntaxerl +pkg_syntaxerl_name = syntaxerl +pkg_syntaxerl_description = Syntax checker for Erlang +pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_fetch = git +pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_commit = master + +PACKAGES += syslog +pkg_syslog_name = syslog +pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3) +pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog +pkg_syslog_fetch = git +pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog +pkg_syslog_commit = master + +PACKAGES += taskforce +pkg_taskforce_name = taskforce +pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks. +pkg_taskforce_homepage = https://github.com/g-andrade/taskforce +pkg_taskforce_fetch = git +pkg_taskforce_repo = https://github.com/g-andrade/taskforce +pkg_taskforce_commit = master + +PACKAGES += tddreloader +pkg_tddreloader_name = tddreloader +pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes +pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader +pkg_tddreloader_fetch = git +pkg_tddreloader_repo = https://github.com/version2beta/tddreloader +pkg_tddreloader_commit = master + +PACKAGES += tempo +pkg_tempo_name = tempo +pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang. +pkg_tempo_homepage = https://github.com/selectel/tempo +pkg_tempo_fetch = git +pkg_tempo_repo = https://github.com/selectel/tempo +pkg_tempo_commit = master + +PACKAGES += ticktick +pkg_ticktick_name = ticktick +pkg_ticktick_description = Ticktick is an id generator for message service. +pkg_ticktick_homepage = https://github.com/ericliang/ticktick +pkg_ticktick_fetch = git +pkg_ticktick_repo = https://github.com/ericliang/ticktick +pkg_ticktick_commit = master + +PACKAGES += tinymq +pkg_tinymq_name = tinymq +pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue +pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_fetch = git +pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_commit = master + +PACKAGES += tinymt +pkg_tinymt_name = tinymt +pkg_tinymt_description = TinyMT pseudo random number generator for Erlang. +pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_fetch = git +pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_commit = master + +PACKAGES += tirerl +pkg_tirerl_name = tirerl +pkg_tirerl_description = Erlang interface to Elastic Search +pkg_tirerl_homepage = https://github.com/inaka/tirerl +pkg_tirerl_fetch = git +pkg_tirerl_repo = https://github.com/inaka/tirerl +pkg_tirerl_commit = master + +PACKAGES += traffic_tools +pkg_traffic_tools_name = traffic_tools +pkg_traffic_tools_description = Simple traffic limiting library +pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools +pkg_traffic_tools_fetch = git +pkg_traffic_tools_repo = https://github.com/systra/traffic_tools +pkg_traffic_tools_commit = master + +PACKAGES += trails +pkg_trails_name = trails +pkg_trails_description = A couple of improvements over Cowboy Routes +pkg_trails_homepage = http://inaka.github.io/cowboy-trails/ +pkg_trails_fetch = git +pkg_trails_repo = https://github.com/inaka/cowboy-trails +pkg_trails_commit = master + +PACKAGES += trane +pkg_trane_name = trane +pkg_trane_description = SAX style broken HTML parser in Erlang +pkg_trane_homepage = https://github.com/massemanet/trane +pkg_trane_fetch = git +pkg_trane_repo = https://github.com/massemanet/trane +pkg_trane_commit = master + +PACKAGES += transit +pkg_transit_name = transit +pkg_transit_description = transit format for erlang +pkg_transit_homepage = https://github.com/isaiah/transit-erlang +pkg_transit_fetch = git +pkg_transit_repo = https://github.com/isaiah/transit-erlang +pkg_transit_commit = master + +PACKAGES += trie +pkg_trie_name = trie +pkg_trie_description = Erlang Trie Implementation +pkg_trie_homepage = https://github.com/okeuday/trie +pkg_trie_fetch = git +pkg_trie_repo = https://github.com/okeuday/trie +pkg_trie_commit = master + +PACKAGES += triq +pkg_triq_name = triq +pkg_triq_description = Trifork QuickCheck +pkg_triq_homepage = https://github.com/krestenkrab/triq +pkg_triq_fetch = git +pkg_triq_repo = https://github.com/krestenkrab/triq +pkg_triq_commit = master + +PACKAGES += tunctl +pkg_tunctl_name = tunctl +pkg_tunctl_description = Erlang TUN/TAP interface +pkg_tunctl_homepage = https://github.com/msantos/tunctl +pkg_tunctl_fetch = git +pkg_tunctl_repo = https://github.com/msantos/tunctl +pkg_tunctl_commit = master + +PACKAGES += twerl +pkg_twerl_name = twerl +pkg_twerl_description = Erlang client for the Twitter Streaming API +pkg_twerl_homepage = https://github.com/lucaspiller/twerl +pkg_twerl_fetch = git +pkg_twerl_repo = https://github.com/lucaspiller/twerl +pkg_twerl_commit = oauth + +PACKAGES += twitter_erlang +pkg_twitter_erlang_name = twitter_erlang +pkg_twitter_erlang_description = An Erlang twitter client +pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_fetch = git +pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_commit = master + +PACKAGES += ucol_nif +pkg_ucol_nif_name = ucol_nif +pkg_ucol_nif_description = ICU based collation Erlang module +pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif +pkg_ucol_nif_fetch = git +pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif +pkg_ucol_nif_commit = master + +PACKAGES += unicorn +pkg_unicorn_name = unicorn +pkg_unicorn_description = Generic configuration server +pkg_unicorn_homepage = https://github.com/shizzard/unicorn +pkg_unicorn_fetch = git +pkg_unicorn_repo = https://github.com/shizzard/unicorn +pkg_unicorn_commit = 0.3.0 + +PACKAGES += unsplit +pkg_unsplit_name = unsplit +pkg_unsplit_description = Resolves conflicts in Mnesia after network splits +pkg_unsplit_homepage = https://github.com/uwiger/unsplit +pkg_unsplit_fetch = git +pkg_unsplit_repo = https://github.com/uwiger/unsplit +pkg_unsplit_commit = master + +PACKAGES += uuid +pkg_uuid_name = uuid +pkg_uuid_description = Erlang UUID Implementation +pkg_uuid_homepage = https://github.com/okeuday/uuid +pkg_uuid_fetch = git +pkg_uuid_repo = https://github.com/okeuday/uuid +pkg_uuid_commit = v1.4.0 + +PACKAGES += ux +pkg_ux_name = ux +pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation) +pkg_ux_homepage = https://github.com/erlang-unicode/ux +pkg_ux_fetch = git +pkg_ux_repo = https://github.com/erlang-unicode/ux +pkg_ux_commit = master + +PACKAGES += vert +pkg_vert_name = vert +pkg_vert_description = Erlang binding to libvirt virtualization API +pkg_vert_homepage = https://github.com/msantos/erlang-libvirt +pkg_vert_fetch = git +pkg_vert_repo = https://github.com/msantos/erlang-libvirt +pkg_vert_commit = master + +PACKAGES += verx +pkg_verx_name = verx +pkg_verx_description = Erlang implementation of the libvirtd remote protocol +pkg_verx_homepage = https://github.com/msantos/verx +pkg_verx_fetch = git +pkg_verx_repo = https://github.com/msantos/verx +pkg_verx_commit = master + +PACKAGES += vmq_acl +pkg_vmq_acl_name = vmq_acl +pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_acl_homepage = https://verne.mq/ +pkg_vmq_acl_fetch = git +pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl +pkg_vmq_acl_commit = master + +PACKAGES += vmq_bridge +pkg_vmq_bridge_name = vmq_bridge +pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_bridge_homepage = https://verne.mq/ +pkg_vmq_bridge_fetch = git +pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge +pkg_vmq_bridge_commit = master + +PACKAGES += vmq_graphite +pkg_vmq_graphite_name = vmq_graphite +pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_graphite_homepage = https://verne.mq/ +pkg_vmq_graphite_fetch = git +pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite +pkg_vmq_graphite_commit = master + +PACKAGES += vmq_passwd +pkg_vmq_passwd_name = vmq_passwd +pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_passwd_homepage = https://verne.mq/ +pkg_vmq_passwd_fetch = git +pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd +pkg_vmq_passwd_commit = master + +PACKAGES += vmq_server +pkg_vmq_server_name = vmq_server +pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_server_homepage = https://verne.mq/ +pkg_vmq_server_fetch = git +pkg_vmq_server_repo = https://github.com/erlio/vmq_server +pkg_vmq_server_commit = master + +PACKAGES += vmq_snmp +pkg_vmq_snmp_name = vmq_snmp +pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_snmp_homepage = https://verne.mq/ +pkg_vmq_snmp_fetch = git +pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp +pkg_vmq_snmp_commit = master + +PACKAGES += vmq_systree +pkg_vmq_systree_name = vmq_systree +pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_systree_homepage = https://verne.mq/ +pkg_vmq_systree_fetch = git +pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree +pkg_vmq_systree_commit = master + +PACKAGES += vmstats +pkg_vmstats_name = vmstats +pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs. +pkg_vmstats_homepage = https://github.com/ferd/vmstats +pkg_vmstats_fetch = git +pkg_vmstats_repo = https://github.com/ferd/vmstats +pkg_vmstats_commit = master + +PACKAGES += walrus +pkg_walrus_name = walrus +pkg_walrus_description = Walrus - Mustache-like Templating +pkg_walrus_homepage = https://github.com/devinus/walrus +pkg_walrus_fetch = git +pkg_walrus_repo = https://github.com/devinus/walrus +pkg_walrus_commit = master + +PACKAGES += webmachine +pkg_webmachine_name = webmachine +pkg_webmachine_description = A REST-based system for building web applications. +pkg_webmachine_homepage = https://github.com/basho/webmachine +pkg_webmachine_fetch = git +pkg_webmachine_repo = https://github.com/basho/webmachine +pkg_webmachine_commit = master + +PACKAGES += websocket_client +pkg_websocket_client_name = websocket_client +pkg_websocket_client_description = Erlang websocket client (ws and wss supported) +pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client +pkg_websocket_client_fetch = git +pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client +pkg_websocket_client_commit = master + +PACKAGES += worker_pool +pkg_worker_pool_name = worker_pool +pkg_worker_pool_description = a simple erlang worker pool +pkg_worker_pool_homepage = https://github.com/inaka/worker_pool +pkg_worker_pool_fetch = git +pkg_worker_pool_repo = https://github.com/inaka/worker_pool +pkg_worker_pool_commit = 1.0.3 + +PACKAGES += wrangler +pkg_wrangler_name = wrangler +pkg_wrangler_description = Import of the Wrangler svn repository. +pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html +pkg_wrangler_fetch = git +pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler +pkg_wrangler_commit = master + +PACKAGES += wsock +pkg_wsock_name = wsock +pkg_wsock_description = Erlang library to build WebSocket clients and servers +pkg_wsock_homepage = https://github.com/madtrick/wsock +pkg_wsock_fetch = git +pkg_wsock_repo = https://github.com/madtrick/wsock +pkg_wsock_commit = master + +PACKAGES += xhttpc +pkg_xhttpc_name = xhttpc +pkg_xhttpc_description = Extensible HTTP Client for Erlang +pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc +pkg_xhttpc_fetch = git +pkg_xhttpc_repo = https://github.com/seriyps/xhttpc +pkg_xhttpc_commit = master + +PACKAGES += xref_runner +pkg_xref_runner_name = xref_runner +pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref) +pkg_xref_runner_homepage = https://github.com/inaka/xref_runner +pkg_xref_runner_fetch = git +pkg_xref_runner_repo = https://github.com/inaka/xref_runner +pkg_xref_runner_commit = 0.2.0 + +PACKAGES += yamerl +pkg_yamerl_name = yamerl +pkg_yamerl_description = YAML 1.2 parser in pure Erlang +pkg_yamerl_homepage = https://github.com/yakaz/yamerl +pkg_yamerl_fetch = git +pkg_yamerl_repo = https://github.com/yakaz/yamerl +pkg_yamerl_commit = master + +PACKAGES += yamler +pkg_yamler_name = yamler +pkg_yamler_description = libyaml-based yaml loader for Erlang +pkg_yamler_homepage = https://github.com/goertzenator/yamler +pkg_yamler_fetch = git +pkg_yamler_repo = https://github.com/goertzenator/yamler +pkg_yamler_commit = master + +PACKAGES += yaws +pkg_yaws_name = yaws +pkg_yaws_description = Yaws webserver +pkg_yaws_homepage = http://yaws.hyber.org +pkg_yaws_fetch = git +pkg_yaws_repo = https://github.com/klacke/yaws +pkg_yaws_commit = master + +PACKAGES += zab_engine +pkg_zab_engine_name = zab_engine +pkg_zab_engine_description = zab propotocol implement by erlang +pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_fetch = git +pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_commit = master + +PACKAGES += zeta +pkg_zeta_name = zeta +pkg_zeta_description = HTTP access log parser in Erlang +pkg_zeta_homepage = https://github.com/s1n4/zeta +pkg_zeta_fetch = git +pkg_zeta_repo = https://github.com/s1n4/zeta +pkg_zeta_commit = + +PACKAGES += zippers +pkg_zippers_name = zippers +pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers +pkg_zippers_homepage = https://github.com/ferd/zippers +pkg_zippers_fetch = git +pkg_zippers_repo = https://github.com/ferd/zippers +pkg_zippers_commit = master + +PACKAGES += zlists +pkg_zlists_name = zlists +pkg_zlists_description = Erlang lazy lists library. +pkg_zlists_homepage = https://github.com/vjache/erlang-zlists +pkg_zlists_fetch = git +pkg_zlists_repo = https://github.com/vjache/erlang-zlists +pkg_zlists_commit = master + +PACKAGES += zraft_lib +pkg_zraft_lib_name = zraft_lib +pkg_zraft_lib_description = Erlang raft consensus protocol implementation +pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_fetch = git +pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_commit = master + +PACKAGES += zucchini +pkg_zucchini_name = zucchini +pkg_zucchini_description = An Erlang INI parser +pkg_zucchini_homepage = https://github.com/devinus/zucchini +pkg_zucchini_fetch = git +pkg_zucchini_repo = https://github.com/devinus/zucchini +pkg_zucchini_commit = master + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: search + +define pkg_print + $(verbose) printf "%s\n" \ + $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \ + "App name: $(pkg_$(1)_name)" \ + "Description: $(pkg_$(1)_description)" \ + "Home page: $(pkg_$(1)_homepage)" \ + "Fetch with: $(pkg_$(1)_fetch)" \ + "Repository: $(pkg_$(1)_repo)" \ + "Commit: $(pkg_$(1)_commit)" \ + "" + +endef + +search: +ifdef q + $(foreach p,$(PACKAGES), \ + $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \ + $(call pkg_print,$(p)))) +else + $(foreach p,$(PACKAGES),$(call pkg_print,$(p))) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-deps + +# Configuration. + +ifdef OTP_DEPS +$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.) +endif + +IGNORE_DEPS ?= +export IGNORE_DEPS + +APPS_DIR ?= $(CURDIR)/apps +export APPS_DIR + +DEPS_DIR ?= $(CURDIR)/deps +export DEPS_DIR + +REBAR_DEPS_DIR = $(DEPS_DIR) +export REBAR_DEPS_DIR + +dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) +dep_repo = $(patsubst git://github.com/%,https://github.com/%, \ + $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))) +dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) + +ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) +ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) + +ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),) +ifeq ($(ERL_LIBS),) + ERL_LIBS = $(APPS_DIR):$(DEPS_DIR) +else + ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR) +endif +endif +export ERL_LIBS + +export NO_AUTOPATCH + +# Verbosity. + +dep_verbose_0 = @echo " DEP " $(1); +dep_verbose_2 = set -x; +dep_verbose = $(dep_verbose_$(V)) + +# Core targets. + +ifneq ($(SKIP_DEPS),) +deps:: +else +deps:: $(ALL_DEPS_DIRS) +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \ + if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \ + :; \ + else \ + echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ + if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ + $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \ + else \ + echo "Error: No Makefile to build dependency $$dep."; \ + exit 2; \ + fi \ + fi \ + done +endif + +# Deps related targets. + +# @todo rename GNUmakefile and makefile into Makefile first, if they exist +# While Makefile file could be GNUmakefile or makefile, +# in practice only Makefile is needed so far. +define dep_autopatch + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \ + $(call dep_autopatch2,$(1)); \ + else \ + if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + $(call dep_autopatch_erlang_mk,$(1)); \ + else \ + $(call erlang,$(call dep_autopatch_app.erl,$(1))); \ + fi \ + fi \ + else \ + if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ + $(call dep_autopatch_noop,$(1)); \ + else \ + $(call dep_autopatch2,$(1)); \ + fi \ + fi +endef + +define dep_autopatch2 + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + if [ -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \ + $(call dep_autopatch_fetch_rebar); \ + $(call dep_autopatch_rebar,$(1)); \ + else \ + $(call dep_autopatch_gen,$(1)); \ + fi +endef + +define dep_autopatch_noop + printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile +endef + +# Overwrite erlang.mk with the current file by default. +ifeq ($(NO_AUTOPATCH_ERLANG_MK),) +define dep_autopatch_erlang_mk + echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \ + > $(DEPS_DIR)/$1/erlang.mk +endef +else +define dep_autopatch_erlang_mk + : +endef +endif + +define dep_autopatch_gen + printf "%s\n" \ + "ERLC_OPTS = +debug_info" \ + "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile +endef + +define dep_autopatch_fetch_rebar + mkdir -p $(ERLANG_MK_TMP); \ + if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ + git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \ + cd $(ERLANG_MK_TMP)/rebar; \ + git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \ + $(MAKE); \ + cd -; \ + fi +endef + +define dep_autopatch_rebar + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \ + fi; \ + $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \ + rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app +endef + +define dep_autopatch_rebar.erl + application:load(rebar), + application:set_env(rebar, log_level, debug), + Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of + {ok, Conf0} -> Conf0; + _ -> [] + end, + {Conf, OsEnv} = fun() -> + case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of + false -> {Conf1, []}; + true -> + Bindings0 = erl_eval:new_bindings(), + Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0), + Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1), + Before = os:getenv(), + {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings), + {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)} + end + end(), + Write = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append]) + end, + Escape = fun (Text) -> + re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}]) + end, + Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package " + "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"), + Write("C_SRC_DIR = /path/do/not/exist\n"), + Write("C_SRC_TYPE = rebar\n"), + Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"), + Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]), + fun() -> + Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), + case lists:keyfind(erl_opts, 1, Conf) of + false -> ok; + {_, ErlOpts} -> + lists:foreach(fun + ({d, D}) -> + Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + ({i, I}) -> + Write(["ERLC_OPTS += -I ", I, "\n"]); + ({platform_define, Regex, D}) -> + case rebar_utils:is_arch(Regex) of + true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + false -> ok + end; + ({parse_transform, PT}) -> + Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n"); + (_) -> ok + end, ErlOpts) + end, + Write("\n") + end(), + fun() -> + File = case lists:keyfind(deps, 1, Conf) of + false -> []; + {_, Deps} -> + [begin case case Dep of + {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}}; + {N, S} when is_tuple(S) -> {N, S}; + {N, _, S} -> {N, S}; + {N, _, S, _} -> {N, S}; + _ -> false + end of + false -> ok; + {Name, Source} -> + {Method, Repo, Commit} = case Source of + {hex, V} -> {hex, V, undefined}; + {git, R} -> {git, R, master}; + {M, R, {branch, C}} -> {M, R, C}; + {M, R, {ref, C}} -> {M, R, C}; + {M, R, {tag, C}} -> {M, R, C}; + {M, R, C} -> {M, R, C} + end, + Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit])) + end end || Dep <- Deps] + end + end(), + fun() -> + case lists:keyfind(erl_first_files, 1, Conf) of + false -> ok; + {_, Files} -> + Names = [[" ", case lists:reverse(F) of + "lre." ++ Elif -> lists:reverse(Elif); + Elif -> lists:reverse(Elif) + end] || "src/" ++ F <- Files], + Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) + end + end(), + FindFirst = fun(F, Fd) -> + case io:parse_erl_form(Fd, undefined) of + {ok, {attribute, _, compile, {parse_transform, PT}}, _} -> + [PT, F(F, Fd)]; + {ok, {attribute, _, compile, CompileOpts}, _} when is_list(CompileOpts) -> + case proplists:get_value(parse_transform, CompileOpts) of + undefined -> [F(F, Fd)]; + PT -> [PT, F(F, Fd)] + end; + {ok, {attribute, _, include, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end + end; + {ok, {attribute, _, include_lib, "$(1)/include/" ++ Hrl}, _} -> + {ok, HrlFd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]), + [F(F, HrlFd), F(F, Fd)]; + {ok, {attribute, _, include_lib, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {ok, {attribute, _, import, {Imp, _}}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(Imp) ++ ".erl", [read]) of + {ok, ImpFd} -> [Imp, F(F, ImpFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {eof, _} -> + file:close(Fd), + []; + _ -> + F(F, Fd) + end + end, + fun() -> + ErlFiles = filelib:wildcard("$(call core_native_path,$(DEPS_DIR)/$1/src/)*.erl"), + First0 = lists:usort(lists:flatten([begin + {ok, Fd} = file:open(F, [read]), + FindFirst(FindFirst, Fd) + end || F <- ErlFiles])), + First = lists:flatten([begin + {ok, Fd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", [read]), + FindFirst(FindFirst, Fd) + end || M <- First0, lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)]) ++ First0, + Write(["COMPILE_FIRST +=", [[" ", atom_to_list(M)] || M <- First, + lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)], "\n"]) + end(), + Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"), + Write("\npreprocess::\n"), + Write("\npre-deps::\n"), + Write("\npre-app::\n"), + PatchHook = fun(Cmd) -> + case Cmd of + "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + _ -> Escape(Cmd) + end + end, + fun() -> + case lists:keyfind(pre_hooks, 1, Conf) of + false -> ok; + {_, Hooks} -> + [case H of + {'get-deps', Cmd} -> + Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n"); + {compile, Cmd} -> + Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + {Regex, compile, Cmd} -> + case rebar_utils:is_arch(Regex) of + true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + false -> ok + end; + _ -> ok + end || H <- Hooks] + end + end(), + ShellToMk = fun(V) -> + re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]), + "-Werror\\\\b", "", [{return, list}, global]) + end, + PortSpecs = fun() -> + case lists:keyfind(port_specs, 1, Conf) of + false -> + case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of + false -> []; + true -> + [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"), + proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}] + end; + {_, Specs} -> + lists:flatten([case S of + {Output, Input} -> {ShellToMk(Output), Input, []}; + {Regex, Output, Input} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, []}; + false -> [] + end; + {Regex, Output, Input, [{env, Env}]} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, Env}; + false -> [] + end + end || S <- Specs]) + end + end(), + PortSpecWrite = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append]) + end, + case PortSpecs of + [] -> ok; + _ -> + Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"), + PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I ~s/erts-~s/include -I ~s\n", + [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])), + PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L ~s -lerl_interface -lei\n", + [code:lib_dir(erl_interface, lib)])), + [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv], + FilterEnv = fun(Env) -> + lists:flatten([case E of + {_, _} -> E; + {Regex, K, V} -> + case rebar_utils:is_arch(Regex) of + true -> {K, V}; + false -> [] + end + end || E <- Env]) + end, + MergeEnv = fun(Env) -> + lists:foldl(fun ({K, V}, Acc) -> + case lists:keyfind(K, 1, Acc) of + false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc]; + {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc] + end + end, [], Env) + end, + PortEnv = case lists:keyfind(port_env, 1, Conf) of + false -> []; + {_, PortEnv0} -> FilterEnv(PortEnv0) + end, + PortSpec = fun ({Output, Input0, Env}) -> + filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output), + Input = [[" ", I] || I <- Input0], + PortSpecWrite([ + [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))], + case $(PLATFORM) of + darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress"; + _ -> "" + end, + "\n\nall:: ", Output, "\n\n", + "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))], + Output, ": $$\(foreach ext,.c .C .cc .cpp,", + "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n", + "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)", + case filename:extension(Output) of + [] -> "\n"; + _ -> " -shared\n" + end]) + end, + [PortSpec(S) || S <- PortSpecs] + end, + Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"), + RunPlugin = fun(Plugin, Step) -> + case erlang:function_exported(Plugin, Step, 2) of + false -> ok; + true -> + c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"), + Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(), + dict:store(base_dir, "", dict:new())}, undefined), + io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret]) + end + end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins} -> + [begin + case lists:keyfind(deps, 1, Conf) of + false -> ok; + {_, Deps} -> + case lists:keyfind(P, 1, Deps) of + false -> ok; + _ -> + Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P), + io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]), + io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]), + code:add_patha(Path ++ "/ebin") + end + end + end || P <- Plugins], + [case code:load_file(P) of + {module, P} -> ok; + _ -> + case lists:keyfind(plugin_dir, 1, Conf) of + false -> ok; + {_, PluginsDir} -> + ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl", + {ok, P, Bin} = compile:file(ErlFile, [binary]), + {module, P} = code:load_binary(P, ErlFile, Bin) + end + end || P <- Plugins], + [RunPlugin(P, preprocess) || P <- Plugins], + [RunPlugin(P, pre_compile) || P <- Plugins], + [RunPlugin(P, compile) || P <- Plugins] + end + end(), + halt() +endef + +define dep_autopatch_app.erl + UpdateModules = fun(App) -> + case filelib:is_regular(App) of + false -> ok; + true -> + {ok, [{application, '$(1)', L0}]} = file:consult(App), + Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true, + fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []), + L = lists:keystore(modules, 1, L0, {modules, Mods}), + ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}])) + end + end, + UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"), + halt() +endef + +define dep_autopatch_appsrc.erl + AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", + AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end, + case filelib:is_regular(AppSrcIn) of + false -> ok; + true -> + {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), + L1 = lists:keystore(modules, 1, L0, {modules, []}), + L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end, + L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end, + ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])), + case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end + end, + halt() +endef + +define dep_fetch_git + git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1)); +endef + +define dep_fetch_git-submodule + git submodule update --init -- $(DEPS_DIR)/$1; +endef + +define dep_fetch_hg + hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1)); +endef + +define dep_fetch_svn + svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_cp + cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_hex.erl + ssl:start(), + inets:start(), + {ok, {{_, 200, _}, _, Body}} = httpc:request(get, + {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []}, + [], [{body_format, binary}]), + {ok, Files} = erl_tar:extract({binary, Body}, [memory]), + {_, Source} = lists:keyfind("contents.tar.gz", 1, Files), + ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]), + halt() +endef + +# Hex only has a package version. No need to look in the Erlang.mk packages. +define dep_fetch_hex + $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1)))))); +endef + +define dep_fetch_fail + echo "Error: Unknown or invalid dependency: $(1)." >&2; \ + exit 78; +endef + +# Kept for compatibility purposes with older Erlang.mk configuration. +define dep_fetch_legacy + $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \ + git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \ + cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master); +endef + +define dep_fetch + $(if $(dep_$(1)), \ + $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \ + $(word 1,$(dep_$(1))), \ + $(if $(IS_DEP),legacy,fail)), \ + $(if $(filter $(1),$(PACKAGES)), \ + $(pkg_$(1)_fetch), \ + fail)) +endef + +define dep_target +$(DEPS_DIR)/$(call dep_name,$1): + $(eval DEP_NAME := $(call dep_name,$1)) + $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) + $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ + echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \ + exit 17; \ + fi + $(verbose) mkdir -p $(DEPS_DIR) + $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$1)),$1) + $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure.ac -o -f $(DEPS_DIR)/$(DEP_NAME)/configure.in ]; then \ + echo " AUTO " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && autoreconf -Wall -vif -I m4; \ + fi + - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \ + echo " CONF " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \ + fi +ifeq ($(filter $(1),$(NO_AUTOPATCH)),) + $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi; \ + if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \ + echo " PATCH Downloading rabbitmq-server"; \ + git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \ + fi; \ + ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \ + elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi \ + else \ + $$(call dep_autopatch,$(DEP_NAME)) \ + fi +endif +endef + +$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep)))) + +ifndef IS_APP +clean:: clean-apps + +clean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \ + done + +distclean:: distclean-apps + +distclean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \ + done +endif + +ifndef SKIP_DEPS +distclean:: distclean-deps + +distclean-deps: + $(gen_verbose) rm -rf $(DEPS_DIR) +endif + +# Forward-declare variables used in core/deps-tools.mk. This is required +# in case plugins use them. + +ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/list-deps.log +ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/list-doc-deps.log +ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/list-rel-deps.log +ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/list-test-deps.log +ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/list-shell-deps.log + +# External plugins. + +DEP_PLUGINS ?= + +define core_dep_plugin +-include $(DEPS_DIR)/$(1) + +$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; +endef + +$(foreach p,$(DEP_PLUGINS),\ + $(eval $(if $(findstring /,$p),\ + $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ + $(call core_dep_plugin,$p/plugins.mk,$p)))) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Configuration. + +DTL_FULL_PATH ?= +DTL_PATH ?= templates/ +DTL_SUFFIX ?= _dtl + +# Verbosity. + +dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); +dtl_verbose = $(dtl_verbose_$(V)) + +# Core targets. + +define erlydtl_compile.erl + [begin + Module0 = case "$(strip $(DTL_FULL_PATH))" of + "" -> + filename:basename(F, ".dtl"); + _ -> + "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"), + re:replace(F2, "/", "_", [{return, list}, global]) + end, + Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), + case erlydtl:compile(F, Module, [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of + ok -> ok; + {ok, _} -> ok + end + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) + +DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl)) + +ifdef DTL_FULL_PATH +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%)))) +else +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES)))) +endif + +ifneq ($(words $(DTL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(DTL_FILES); \ + fi + @touch $@ + +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl +endif + +ebin/$(PROJECT).app:: $(DTL_FILES) + $(if $(strip $?),\ + $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?,-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Verbosity. + +proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); +proto_verbose = $(proto_verbose_$(V)) + +# Core targets. + +define compile_proto + $(verbose) mkdir -p ebin/ include/ + $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1))) + $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl + $(verbose) rm ebin/*.erl +endef + +define compile_proto.erl + [begin + Dir = filename:dirname(filename:dirname(F)), + protobuffs_compile:generate_source(F, + [{output_include_dir, Dir ++ "/include"}, + {output_src_dir, Dir ++ "/ebin"}]) + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) +ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto)) + $(if $(strip $?),$(call compile_proto,$?)) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-app + +# Configuration. + +ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \ + +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec +COMPILE_FIRST ?= +COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST))) +ERLC_EXCLUDE ?= +ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE))) + +ERLC_MIB_OPTS ?= +COMPILE_MIB_FIRST ?= +COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST))) + +# Verbosity. + +app_verbose_0 = @echo " APP " $(PROJECT); +app_verbose_2 = set -x; +app_verbose = $(app_verbose_$(V)) + +appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src; +appsrc_verbose_2 = set -x; +appsrc_verbose = $(appsrc_verbose_$(V)) + +makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d; +makedep_verbose_2 = set -x; +makedep_verbose = $(makedep_verbose_$(V)) + +erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\ + $(filter %.erl %.core,$(?F))); +erlc_verbose_2 = set -x; +erlc_verbose = $(erlc_verbose_$(V)) + +xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F)); +xyrl_verbose_2 = set -x; +xyrl_verbose = $(xyrl_verbose_$(V)) + +asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F)); +asn1_verbose_2 = set -x; +asn1_verbose = $(asn1_verbose_$(V)) + +mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F)); +mib_verbose_2 = set -x; +mib_verbose = $(mib_verbose_$(V)) + +ifneq ($(wildcard src/),) + +# Targets. + +ifeq ($(wildcard ebin/test),) +app:: deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +else +app:: clean deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +endif + +ifeq ($(wildcard src/$(PROJECT)_app.erl),) +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, []}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]} +]}. +endef +else +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}, + {mod, {$(PROJECT)_app, []}} +]}. +endef +endif + +app-build: ebin/$(PROJECT).app + $(verbose) : + +# Source files. + +ERL_FILES = $(sort $(call core_find,src/,*.erl)) +CORE_FILES = $(sort $(call core_find,src/,*.core)) + +# ASN.1 files. + +ifneq ($(wildcard asn1/),) +ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1)) +ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +define compile_asn1 + $(verbose) mkdir -p include/ + $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1) + $(verbose) mv asn1/*.erl src/ + $(verbose) mv asn1/*.hrl include/ + $(verbose) mv asn1/*.asn1db include/ +endef + +$(PROJECT).d:: $(ASN1_FILES) + $(if $(strip $?),$(call compile_asn1,$?)) +endif + +# SNMP MIB files. + +ifneq ($(wildcard mibs/),) +MIB_FILES = $(sort $(call core_find,mibs/,*.mib)) + +$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES) + $(verbose) mkdir -p include/ priv/mibs/ + $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $? + $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?))) +endif + +# Leex and Yecc files. + +XRL_FILES = $(sort $(call core_find,src/,*.xrl)) +XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES)))) +ERL_FILES += $(XRL_ERL_FILES) + +YRL_FILES = $(sort $(call core_find,src/,*.yrl)) +YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES)))) +ERL_FILES += $(YRL_ERL_FILES) + +$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES) + $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?) + +# Erlang and Core Erlang files. + +define makedep.erl + ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")), + Modules = [{filename:basename(F, ".erl"), F} || F <- ErlFiles], + Add = fun (Dep, Acc) -> + case lists:keyfind(atom_to_list(Dep), 1, Modules) of + {_, DepFile} -> [DepFile|Acc]; + false -> Acc + end + end, + AddHd = fun (Dep, Acc) -> + case {Dep, lists:keymember(Dep, 2, Modules)} of + {"src/" ++ _, false} -> [Dep|Acc]; + {"include/" ++ _, false} -> [Dep|Acc]; + _ -> Acc + end + end, + CompileFirst = fun (Deps) -> + First0 = [case filename:extension(D) of + ".erl" -> filename:basename(D, ".erl"); + _ -> [] + end || D <- Deps], + case lists:usort(First0) of + [] -> []; + [[]] -> []; + First -> ["COMPILE_FIRST +=", [[" ", F] || F <- First], "\n"] + end + end, + Depend = [begin + case epp:parse_file(F, ["include/"], []) of + {ok, Forms} -> + Deps = lists:usort(lists:foldl(fun + ({attribute, _, behavior, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, behaviour, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, compile, {parse_transform, Dep}}, Acc) -> Add(Dep, Acc); + ({attribute, _, file, {Dep, _}}, Acc) -> AddHd(Dep, Acc); + (_, Acc) -> Acc + end, [], Forms)), + case Deps of + [] -> ""; + _ -> [F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n", CompileFirst(Deps)] + end; + {error, enoent} -> + [] + end + end || F <- ErlFiles], + ok = file:write_file("$(1)", Depend), + halt() +endef + +ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),) +$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST) + $(makedep_verbose) $(call erlang,$(call makedep.erl,$@)) +endif + +ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \ + touch -c $(PROJECT).d; \ + fi + @touch $@ + +$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change +endif + +-include $(PROJECT).d + +ebin/$(PROJECT).app:: ebin/ + +ebin/: + $(verbose) mkdir -p ebin/ + +define compile_erl + $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \ + -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1)) +endef + +ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src) + $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?)) + $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE))) + $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true)) + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES))))))) +ifeq ($(wildcard src/$(PROJECT).app.src),) + $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \ + > ebin/$(PROJECT).app +else + $(verbose) if [ -z "$$(grep -E '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \ + echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \ + exit 1; \ + fi + $(appsrc_verbose) cat src/$(PROJECT).app.src \ + | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \ + | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(GITDESCRIBE)\"}/" \ + > ebin/$(PROJECT).app +endif + +clean:: clean-app + +clean-app: + $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \ + $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \ + $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +endif + +# Copyright (c) 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: docs-deps + +# Configuration. + +ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS)) + +# Targets. + +$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +doc-deps: +else +doc-deps: $(ALL_DOC_DEPS_DIRS) + $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rel-deps + +# Configuration. + +ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS)) + +# Targets. + +$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +rel-deps: +else +rel-deps: $(ALL_REL_DEPS_DIRS) + $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: test-deps test-dir test-build clean-test-dir + +# Configuration. + +TEST_DIR ?= $(CURDIR)/test + +ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS)) + +TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard +TEST_ERLC_OPTS += -DTEST=1 + +# Targets. + +$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +test-deps: +else +test-deps: $(ALL_TEST_DEPS_DIRS) + $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done +endif + +ifneq ($(wildcard $(TEST_DIR)),) +test-dir: + $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \ + $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/ +endif + +ifeq ($(wildcard ebin/test),) +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: clean deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" + $(gen_verbose) touch ebin/test +else +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" +endif + +clean:: clean-test-dir + +clean-test-dir: +ifneq ($(wildcard $(TEST_DIR)/*.beam),) + $(gen_verbose) rm -f $(TEST_DIR)/*.beam +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rebar.config + +# We strip out -Werror because we don't want to fail due to +# warnings when used as a dependency. + +compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/') + +define compat_convert_erlc_opts +$(if $(filter-out -Werror,$1),\ + $(if $(findstring +,$1),\ + $(shell echo $1 | cut -b 2-))) +endef + +define compat_rebar_config +{deps, [$(call comma_list,$(foreach d,$(DEPS),\ + {$(call dep_name,$d),".*",{git,"$(call dep_repo,$d)","$(call dep_commit,$d)"}}))]}. +{erl_opts, [$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$(ERLC_OPTS)),\ + $(call compat_convert_erlc_opts,$o)))]}. +endef + +$(eval _compat_rebar_config = $$(compat_rebar_config)) +$(eval export _compat_rebar_config) + +rebar.config: + $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc + +MAN_INSTALL_PATH ?= /usr/local/share/man +MAN_SECTIONS ?= 3 7 + +docs:: asciidoc + +asciidoc: distclean-asciidoc doc-deps asciidoc-guide asciidoc-manual + +ifeq ($(wildcard doc/src/guide/book.asciidoc),) +asciidoc-guide: +else +asciidoc-guide: + a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf + a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ +endif + +ifeq ($(wildcard doc/src/manual/*.asciidoc),) +asciidoc-manual: +else +asciidoc-manual: + for f in doc/src/manual/*.asciidoc ; do \ + a2x -v -f manpage $$f ; \ + done + for s in $(MAN_SECTIONS); do \ + mkdir -p doc/man$$s/ ; \ + mv doc/src/manual/*.$$s doc/man$$s/ ; \ + gzip doc/man$$s/*.$$s ; \ + done + +install-docs:: install-asciidoc + +install-asciidoc: asciidoc-manual + for s in $(MAN_SECTIONS); do \ + mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \ + install -g 0 -o 0 -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \ + done +endif + +distclean:: distclean-asciidoc + +distclean-asciidoc: + $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/ + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Bootstrap targets:" \ + " bootstrap Generate a skeleton of an OTP application" \ + " bootstrap-lib Generate a skeleton of an OTP library" \ + " bootstrap-rel Generate the files needed to build a release" \ + " new-app n=NAME Create a new local OTP application NAME" \ + " new-lib n=NAME Create a new local OTP library NAME" \ + " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ + " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \ + " list-templates List available templates" + +# Bootstrap templates. + +define bs_appsrc +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]}, + {mod, {$p_app, []}}, + {env, []} +]}. +endef + +define bs_appsrc_lib +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. +endef + +ifdef SP +define bs_Makefile +PROJECT = $p +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.0.1 + +# Whitespace to be used when creating files from templates. +SP = $(SP) + +include erlang.mk +endef +else +define bs_Makefile +PROJECT = $p +include erlang.mk +endef +endif + +define bs_apps_Makefile +PROJECT = $p +include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk +endef + +define bs_app +-module($p_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). + +start(_Type, _Args) -> + $p_sup:start_link(). + +stop(_State) -> + ok. +endef + +define bs_relx_config +{release, {$p_release, "1"}, [$p]}. +{extended_start_script, true}. +{sys_config, "rel/sys.config"}. +{vm_args, "rel/vm.args"}. +endef + +define bs_sys_config +[ +]. +endef + +define bs_vm_args +-name $p@127.0.0.1 +-setcookie $p +-heart +endef + +# Normal templates. + +define tpl_supervisor +-module($(n)). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +init([]) -> + Procs = [], + {ok, {{one_for_one, 1, 5}, Procs}}. +endef + +define tpl_gen_server +-module($(n)). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +%% gen_server. + +init([]) -> + {ok, #state{}}. + +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +endef + +define tpl_cowboy_http +-module($(n)). +-behaviour(cowboy_http_handler). + +-export([init/3]). +-export([handle/2]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {ok, Req, #state{}}. + +handle(Req, State=#state{}) -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_gen_fsm +-module($(n)). +-behaviour(gen_fsm). + +%% API. +-export([start_link/0]). + +%% gen_fsm. +-export([init/1]). +-export([state_name/2]). +-export([handle_event/3]). +-export([state_name/3]). +-export([handle_sync_event/4]). +-export([handle_info/3]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_fsm:start_link(?MODULE, [], []). + +%% gen_fsm. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_Event, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_Event, StateName, StateData) -> + {next_state, StateName, StateData}. + +state_name(_Event, _From, StateData) -> + {reply, ignored, state_name, StateData}. + +handle_sync_event(_Event, _From, StateName, StateData) -> + {reply, ignored, StateName, StateData}. + +handle_info(_Info, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + +define tpl_cowboy_loop +-module($(n)). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {loop, Req, #state{}, 5000, hibernate}. + +info(_Info, Req, State) -> + {loop, Req, State, hibernate}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_cowboy_rest +-module($(n)). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_html/2]). + +init(_, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. + +get_html(Req, State) -> + {<<"This is REST!">>, Req, State}. +endef + +define tpl_cowboy_ws +-module($(n)). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +-record(state, { +}). + +init(_, _, _) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, #state{}}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_ranch_protocol +-module($(n)). +-behaviour(ranch_protocol). + +-export([start_link/4]). +-export([init/4]). + +-type opts() :: []. +-export_type([opts/0]). + +-record(state, { + socket :: inet:socket(), + transport :: module() +}). + +start_link(Ref, Socket, Transport, Opts) -> + Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]), + {ok, Pid}. + +-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Ref, Socket, Transport, _Opts) -> + ok = ranch:accept_ack(Ref), + loop(#state{socket=Socket, transport=Transport}). + +loop(State) -> + loop(State). +endef + +# Plugin-specific targets. + +define render_template + $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) +endef + +ifndef WS +ifdef SP +WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a)) +else +WS = $(tab) +endif +endif + +bootstrap: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(eval n := $(PROJECT)_sup) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc,src/$(PROJECT).app.src) +endif + $(call render_template,bs_app,src/$(PROJECT)_app.erl) + $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl) + +bootstrap-lib: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src) +endif + +bootstrap-rel: +ifneq ($(wildcard relx.config),) + $(error Error: relx.config already exists) +endif +ifneq ($(wildcard rel/),) + $(error Error: rel/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_relx_config,relx.config) + $(verbose) mkdir rel/ + $(call render_template,bs_sys_config,rel/sys.config) + $(call render_template,bs_vm_args,rel/vm.args) + +new-app: +ifndef in + $(error Usage: $(MAKE) new-app in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(eval n := $(in)_sup) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) +endif + $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) + $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) + +new-lib: +ifndef in + $(error Usage: $(MAKE) new-lib in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) +endif + +new: +ifeq ($(wildcard src/)$(in),) + $(error Error: src/ directory does not exist) +endif +ifndef t + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifndef tpl_$(t) + $(error Unknown template) +endif +ifndef n + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in= +else + $(call render_template,tpl_$(t),src/$(n).erl) +endif + +list-templates: + $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-c_src distclean-c_src-env + +# Configuration. + +C_SRC_DIR ?= $(CURDIR)/c_src +C_SRC_ENV ?= $(C_SRC_DIR)/env.mk +C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT).so +C_SRC_TYPE ?= shared + +# System type and C compiler/flags. + +ifeq ($(PLATFORM),darwin) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall + LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress +else ifeq ($(PLATFORM),freebsd) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +else ifeq ($(PLATFORM),linux) + CC ?= gcc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +endif + +CFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) +CXXFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) + +LDLIBS += -L $(ERL_INTERFACE_LIB_DIR) -lerl_interface -lei + +# Verbosity. + +c_verbose_0 = @echo " C " $(?F); +c_verbose = $(c_verbose_$(V)) + +cpp_verbose_0 = @echo " CPP " $(?F); +cpp_verbose = $(cpp_verbose_$(V)) + +link_verbose_0 = @echo " LD " $(@F); +link_verbose = $(link_verbose_$(V)) + +# Targets. + +ifeq ($(wildcard $(C_SRC_DIR)),) +else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),) +app:: app-c_src + +test-build:: app-c_src + +app-c_src: + $(MAKE) -C $(C_SRC_DIR) + +clean:: + $(MAKE) -C $(C_SRC_DIR) clean + +else + +ifeq ($(SOURCES),) +SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat)))) +endif +OBJECTS = $(addsuffix .o, $(basename $(SOURCES))) + +COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c +COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c + +app:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +$(C_SRC_OUTPUT): $(OBJECTS) + $(verbose) mkdir -p priv/ + $(link_verbose) $(CC) $(OBJECTS) \ + $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \ + -o $(C_SRC_OUTPUT) + +%.o: %.c + $(COMPILE_C) $(OUTPUT_OPTION) $< + +%.o: %.cc + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.C + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.cpp + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +clean:: clean-c_src + +clean-c_src: + $(gen_verbose) rm -f $(C_SRC_OUTPUT) $(OBJECTS) + +endif + +ifneq ($(wildcard $(C_SRC_DIR)),) +$(C_SRC_ENV): + $(verbose) $(ERL) -eval "file:write_file(\"$(C_SRC_ENV)\", \ + io_lib:format( \ + \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \ + \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \ + \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \ + [code:root_dir(), erlang:system_info(version), \ + code:lib_dir(erl_interface, include), \ + code:lib_dir(erl_interface, lib)])), \ + halt()." + +distclean:: distclean-c_src-env + +distclean-c_src-env: + $(gen_verbose) rm -f $(C_SRC_ENV) + +-include $(C_SRC_ENV) +endif + +# Templates. + +define bs_c_nif +#include "erl_nif.h" + +static int loads = 0; + +static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info) +{ + /* Initialize private data. */ + *priv_data = NULL; + + loads++; + + return 0; +} + +static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info) +{ + /* Convert the private data to the new version. */ + *priv_data = *old_priv_data; + + loads++; + + return 0; +} + +static void unload(ErlNifEnv* env, void* priv_data) +{ + if (loads == 1) { + /* Destroy the private data. */ + } + + loads--; +} + +static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{ + if (enif_is_atom(env, argv[0])) { + return enif_make_tuple2(env, + enif_make_atom(env, "hello"), + argv[0]); + } + + return enif_make_tuple2(env, + enif_make_atom(env, "error"), + enif_make_atom(env, "badarg")); +} + +static ErlNifFunc nif_funcs[] = { + {"hello", 1, hello} +}; + +ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload) +endef + +define bs_erl_nif +-module($n). + +-export([hello/1]). + +-on_load(on_load/0). +on_load() -> + PrivDir = case code:priv_dir(?MODULE) of + {error, _} -> + AppPath = filename:dirname(filename:dirname(code:which(?MODULE))), + filename:join(AppPath, "priv"); + Path -> + Path + end, + erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0). + +hello(_) -> + erlang:nif_error({not_loaded, ?MODULE}). +endef + +new-nif: +ifneq ($(wildcard $(C_SRC_DIR)/$n.c),) + $(error Error: $(C_SRC_DIR)/$n.c already exists) +endif +ifneq ($(wildcard src/$n.erl),) + $(error Error: src/$n.erl already exists) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in= +else + $(verbose) mkdir -p $(C_SRC_DIR) src/ + $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c) + $(call render_template,bs_erl_nif,src/$n.erl) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ci ci-setup distclean-kerl + +KERL ?= $(CURDIR)/kerl +export KERL + +KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl + +OTP_GIT ?= https://github.com/erlang/otp + +CI_INSTALL_DIR ?= $(HOME)/erlang +CI_OTP ?= + +ifeq ($(strip $(CI_OTP)),) +ci:: +else +ci:: $(addprefix ci-,$(CI_OTP)) + +ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP)) + +ci-setup:: + +ci_verbose_0 = @echo " CI " $(1); +ci_verbose = $(ci_verbose_$(V)) + +define ci_target +ci-$(1): $(CI_INSTALL_DIR)/$(1) + $(ci_verbose) \ + PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \ + CI_OTP_RELEASE="$(1)" \ + CT_OPTS="-label $(1)" \ + $(MAKE) clean ci-setup tests +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp)))) + +define ci_otp_target +ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),) +$(CI_INSTALL_DIR)/$(1): $(KERL) + $(KERL) build git $(OTP_GIT) $(1) $(1) + $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1) +endif +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp)))) + +$(KERL): + $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL)) + $(verbose) chmod +x $(KERL) + +help:: + $(verbose) printf "%s\n" "" \ + "Continuous Integration targets:" \ + " ci Run '$(MAKE) tests' on all configured Erlang versions." \ + "" \ + "The CI_OTP variable must be defined with the Erlang versions" \ + "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3" + +distclean:: distclean-kerl + +distclean-kerl: + $(gen_verbose) rm -rf $(KERL) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ct distclean-ct + +# Configuration. + +CT_OPTS ?= +ifneq ($(wildcard $(TEST_DIR)),) + CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl)))) +else + CT_SUITES ?= +endif + +# Core targets. + +tests:: ct + +distclean:: distclean-ct + +help:: + $(verbose) printf "%s\n" "" \ + "Common_test targets:" \ + " ct Run all the common_test suites for this project" \ + "" \ + "All your common_test suites have their associated targets." \ + "A suite named http_SUITE can be ran using the ct-http target." + +# Plugin-specific targets. + +CT_RUN = ct_run \ + -no_auto_compile \ + -noinput \ + -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(TEST_DIR) \ + -dir $(TEST_DIR) \ + -logdir $(CURDIR)/logs + +ifeq ($(CT_SUITES),) +ct: +else +ct: test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) +endif + +define ct_suite_target +ct-$(1): test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(1)) $(CT_OPTS) +endef + +$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) + +distclean-ct: + $(gen_verbose) rm -rf $(CURDIR)/logs/ + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: plt distclean-plt dialyze + +# Configuration. + +DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt +export DIALYZER_PLT + +PLT_APPS ?= +DIALYZER_DIRS ?= --src -r src +DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions \ + -Wunmatched_returns # -Wunderspecs + +# Core targets. + +check:: dialyze + +distclean:: distclean-plt + +help:: + $(verbose) printf "%s\n" "" \ + "Dialyzer targets:" \ + " plt Build a PLT file for this project" \ + " dialyze Analyze the project using Dialyzer" + +# Plugin-specific targets. + +$(DIALYZER_PLT): deps app + $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS) + +plt: $(DIALYZER_PLT) + +distclean-plt: + $(gen_verbose) rm -f $(DIALYZER_PLT) + +ifneq ($(wildcard $(DIALYZER_PLT)),) +dialyze: +else +dialyze: $(DIALYZER_PLT) +endif + $(verbose) dialyzer --no_native $(DIALYZER_DIRS) $(DIALYZER_OPTS) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-edoc edoc + +# Configuration. + +EDOC_OPTS ?= + +# Core targets. + +docs:: distclean-edoc edoc + +distclean:: distclean-edoc + +# Plugin-specific targets. + +edoc: doc-deps + $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().' + +distclean-edoc: + $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: elvis distclean-elvis + +# Configuration. + +ELVIS_CONFIG ?= $(CURDIR)/elvis.config + +ELVIS ?= $(CURDIR)/elvis +export ELVIS + +ELVIS_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis +ELVIS_CONFIG_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis.config +ELVIS_OPTS ?= + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Elvis targets:" \ + " elvis Run Elvis using the local elvis.config or download the default otherwise" + +distclean:: distclean-elvis + +# Plugin-specific targets. + +$(ELVIS): + $(gen_verbose) $(call core_http_get,$(ELVIS),$(ELVIS_URL)) + $(verbose) chmod +x $(ELVIS) + +$(ELVIS_CONFIG): + $(verbose) $(call core_http_get,$(ELVIS_CONFIG),$(ELVIS_CONFIG_URL)) + +elvis: $(ELVIS) $(ELVIS_CONFIG) + $(verbose) $(ELVIS) rock -c $(ELVIS_CONFIG) $(ELVIS_OPTS) + +distclean-elvis: + $(gen_verbose) rm -rf $(ELVIS) + +# Copyright (c) 2014 Dave Cottlehuber +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-escript escript + +# Configuration. + +ESCRIPT_NAME ?= $(PROJECT) +ESCRIPT_COMMENT ?= This is an -*- erlang -*- file + +ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*" +ESCRIPT_SYS_CONFIG ?= "rel/sys.config" +ESCRIPT_EMU_ARGS ?= -pa . \ + -sasl errlog_type error \ + -escript main $(ESCRIPT_NAME) +ESCRIPT_SHEBANG ?= /usr/bin/env escript +ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**" + +# Core targets. + +distclean:: distclean-escript + +help:: + $(verbose) printf "%s\n" "" \ + "Escript targets:" \ + " escript Build an executable escript archive" \ + +# Plugin-specific targets. + +# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl +# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center +# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE : +# Software may only be used for the great good and the true happiness of all +# sentient beings. + +define ESCRIPT_RAW +'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\ +'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\ +' [F || F <- A, not filelib:is_dir(F) ] end,'\ +'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\ +'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\ +'Ez = fun(Escript) ->'\ +' Static = Files([$(ESCRIPT_STATIC)]),'\ +' Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\ +' Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\ +' escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\ +' {archive, Archive, [memory]},'\ +' {shebang, "$(ESCRIPT_SHEBANG)"},'\ +' {comment, "$(ESCRIPT_COMMENT)"},'\ +' {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\ +' ]),'\ +' file:change_mode(Escript, 8#755)'\ +'end,'\ +'Ez("$(ESCRIPT_NAME)"),'\ +'halt().' +endef + +ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW)) + +escript:: distclean-escript deps app + $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND) + +distclean-escript: + $(gen_verbose) rm -f $(ESCRIPT_NAME) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: relx-rel distclean-relx-rel distclean-relx run + +# Configuration. + +RELX ?= $(CURDIR)/relx +RELX_CONFIG ?= $(CURDIR)/relx.config + +RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.5.0/relx +RELX_OPTS ?= +RELX_OUTPUT_DIR ?= _rel + +ifeq ($(firstword $(RELX_OPTS)),-o) + RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) +else + RELX_OPTS += -o $(RELX_OUTPUT_DIR) +endif + +# Core targets. + +ifeq ($(IS_DEP),) +ifneq ($(wildcard $(RELX_CONFIG)),) +rel:: relx-rel +endif +endif + +distclean:: distclean-relx-rel distclean-relx + +# Plugin-specific targets. + +$(RELX): + $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) + $(verbose) chmod +x $(RELX) + +relx-rel: $(RELX) rel-deps app + $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) + +distclean-relx-rel: + $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) + +distclean-relx: + $(gen_verbose) rm -rf $(RELX) + +# Run target. + +ifeq ($(wildcard $(RELX_CONFIG)),) +run: +else + +define get_relx_release.erl + {ok, Config} = file:consult("$(RELX_CONFIG)"), + {release, {Name, _}, _} = lists:keyfind(release, 1, Config), + io:format("~s", [Name]), + halt(0). +endef + +RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))` + +run: all + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console + +help:: + $(verbose) printf "%s\n" "" \ + "Relx targets:" \ + " run Compile the project, build the release and run it" + +endif + +# Copyright (c) 2014, M Robert Martin +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: shell + +# Configuration. + +SHELL_ERL ?= erl +SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin +SHELL_OPTS ?= + +ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) + +# Core targets + +help:: + $(verbose) printf "%s\n" "" \ + "Shell targets:" \ + " shell Run an erlang shell with SHELL_OPTS or reasonable default" + +# Plugin-specific targets. + +$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) + +build-shell-deps: $(ALL_SHELL_DEPS_DIRS) + $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done + +shell: build-shell-deps + $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS) + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq) +.PHONY: triq + +# Targets. + +tests:: triq + +define triq_check.erl + code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]), + try + case $(1) of + all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]); + module -> triq:check($(2)); + function -> triq:check($(2)) + end + of + true -> halt(0); + _ -> halt(1) + catch error:undef -> + io:format("Undefined property or module~n"), + halt(0) + end. +endef + +ifdef t +ifeq (,$(findstring :,$(t))) +triq: test-build + $(verbose) $(call erlang,$(call triq_check.erl,module,$(t))) +else +triq: test-build + $(verbose) echo Testing $(t)/0 + $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)())) +endif +else +triq: test-build + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam)))))) + $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES))) +endif +endif + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: xref distclean-xref + +# Configuration. + +ifeq ($(XREF_CONFIG),) + XREF_ARGS := +else + XREF_ARGS := -c $(XREF_CONFIG) +endif + +XREFR ?= $(CURDIR)/xrefr +export XREFR + +XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Xref targets:" \ + " xref Run Xrefr using $XREF_CONFIG as config file if defined" + +distclean:: distclean-xref + +# Plugin-specific targets. + +$(XREFR): + $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL)) + $(verbose) chmod +x $(XREFR) + +xref: deps app $(XREFR) + $(gen_verbose) $(XREFR) $(XREFR_ARGS) + +distclean-xref: + $(gen_verbose) rm -rf $(XREFR) + +# Copyright 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +COVER_REPORT_DIR = cover + +# Hook in coverage to ct + +ifdef COVER +ifdef CT_RUN +# All modules in 'ebin' +COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam))) + +test-build:: $(TEST_DIR)/ct.cover.spec + +$(TEST_DIR)/ct.cover.spec: + $(verbose) echo Cover mods: $(COVER_MODS) + $(gen_verbose) printf "%s\n" \ + '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \ + '{export,"$(CURDIR)/ct.coverdata"}.' > $@ + +CT_RUN += -cover $(TEST_DIR)/ct.cover.spec +endif +endif + +# Core targets + +ifdef COVER +ifneq ($(COVER_REPORT_DIR),) +tests:: + $(verbose) $(MAKE) --no-print-directory cover-report +endif +endif + +clean:: coverdata-clean + +ifneq ($(COVER_REPORT_DIR),) +distclean:: cover-report-clean +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Cover targets:" \ + " cover-report Generate a HTML coverage report from previously collected" \ + " cover data." \ + " all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \ + "" \ + "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \ + "target tests additionally generates a HTML coverage report from the combined" \ + "coverdata files from each of these testing tools. HTML reports can be disabled" \ + "by setting COVER_REPORT_DIR to empty." + +# Plugin specific targets + +COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata)) + +.PHONY: coverdata-clean +coverdata-clean: + $(gen_verbose) rm -f *.coverdata ct.cover.spec + +# Merge all coverdata files into one. +all.coverdata: $(COVERDATA) + $(gen_verbose) $(ERL) -eval ' \ + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \ + cover:export("$@"), halt(0).' + +# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to +# empty if you want the coverdata files but not the HTML report. +ifneq ($(COVER_REPORT_DIR),) + +.PHONY: cover-report-clean cover-report + +cover-report-clean: + $(gen_verbose) rm -rf $(COVER_REPORT_DIR) + +ifeq ($(COVERDATA),) +cover-report: +else + +# Modules which include eunit.hrl always contain one line without coverage +# because eunit defines test/0 which is never called. We compensate for this. +EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \ + grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ + | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq)) + +define cover_report.erl + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) + Ms = cover:imported_modules(), + [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M) + ++ ".COVER.html", [html]) || M <- Ms], + Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms], + EunitHrlMods = [$(EUNIT_HRL_MODS)], + Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of + true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report], + TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]), + TotalN = lists:sum([N || {_, {_, N}} <- Report1]), + TotalPerc = round(100 * TotalY / (TotalY + TotalN)), + {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]), + io:format(F, "~n" + "~n" + "Coverage report~n" + "~n", []), + io:format(F, "

Coverage

~n

Total: ~p%

~n", [TotalPerc]), + io:format(F, "~n", []), + [io:format(F, "" + "~n", + [M, M, round(100 * Y / (Y + N))]) || {M, {Y, N}} <- Report1], + How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))", + Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")", + io:format(F, "
ModuleCoverage
~p~p%
~n" + "

Generated using ~s and erlang.mk on ~s.

~n" + "", [How, Date]), + halt(). +endef + +cover-report: + $(gen_verbose) mkdir -p $(COVER_REPORT_DIR) + $(gen_verbose) $(call erlang,$(cover_report.erl)) + +endif +endif # ifneq ($(COVER_REPORT_DIR),) + +# Copyright (c) 2013-2015, Loïc Hoguin +# Copyright (c) 2015, Jean-Sébastien Pédron +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Fetch dependencies (without building them). + +.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \ + fetch-shell-deps + +ifneq ($(SKIP_DEPS),) +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: + @: +else +# By default, we fetch "normal" dependencies. They are also included no +# matter the type of requested dependencies. +# +# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS). +fetch-deps: $(ALL_DEPS_DIRS) +fetch-doc-deps: $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS) +fetch-rel-deps: $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS) +fetch-test-deps: $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS) +fetch-shell-deps: $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(filter doc,$(DEP_TYPES)),) +fetch-deps: $(ALL_DOC_DEPS_DIRS) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +fetch-deps: $(ALL_REL_DEPS_DIRS) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +fetch-deps: $(ALL_TEST_DEPS_DIRS) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +fetch-deps: $(ALL_SHELL_DEPS_DIRS) +endif + +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep $@ IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/$@.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $^ ; do \ + if ! grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/$@.log; then \ + echo $$dep >> $(ERLANG_MK_TMP)/$@.log; \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C $$dep fetch-deps IS_DEP=1 || exit $$?; \ + fi \ + fi \ + done +endif # ifneq ($(SKIP_DEPS),) + +# List dependencies recursively. + +.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \ + list-shell-deps + +ifneq ($(SKIP_DEPS),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): + $(verbose) :> $@ +else +LIST_DIRS = $(ALL_DEPS_DIRS) +LIST_DEPS = $(BUILD_DEPS) $(DEPS) + +$(ERLANG_MK_RECURSIVE_DEPS_LIST): fetch-deps + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DIRS += $(ALL_DOC_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DEPS += $(DOC_DEPS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-doc-deps +else +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DIRS += $(ALL_REL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DEPS += $(REL_DEPS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-rel-deps +else +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DIRS += $(ALL_TEST_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DEPS += $(TEST_DEPS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-test-deps +else +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DIRS += $(ALL_SHELL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DEPS += $(SHELL_DEPS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-shell-deps +else +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-deps +endif + +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): +ifneq ($(IS_DEP),1) + $(verbose) rm -f $@.orig +endif +ifndef IS_APP + $(verbose) for app in $(filter-out $(CURDIR),$(ALL_APPS_DIRS)); do \ + $(MAKE) -C "$$app" --no-print-directory $@ IS_APP=1 || :; \ + done +endif + $(verbose) for dep in $(filter-out $(CURDIR),$(LIST_DIRS)); do \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C "$$dep" --no-print-directory $@ IS_DEP=1; \ + fi; \ + done + $(verbose) for dep in $(LIST_DEPS); do \ + echo $(DEPS_DIR)/$$dep; \ + done >> $@.orig +ifndef IS_APP +ifneq ($(IS_DEP),1) + $(verbose) sort < $@.orig | uniq > $@ + $(verbose) rm -f $@.orig +endif +endif +endif # ifneq ($(SKIP_DEPS),) + +ifneq ($(SKIP_DEPS),) +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + @: +else +list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST) +list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(IS_DEP),1) +ifneq ($(filter doc,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) +endif +endif + +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + $(verbose) cat $^ | sort | uniq +endif # ifneq ($(SKIP_DEPS),) diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/include/rabbit_amqp1_0.hrl b/rabbitmq-server/deps/rabbitmq_amqp1_0/include/rabbit_amqp1_0.hrl similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/include/rabbit_amqp1_0.hrl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/include/rabbit_amqp1_0.hrl diff --git a/rabbitmq-server/deps/rabbitmq_amqp1_0/rabbitmq-components.mk b/rabbitmq-server/deps/rabbitmq_amqp1_0/rabbitmq-components.mk new file mode 100644 index 0000000..eed26fd --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/rabbitmq-components.mk @@ -0,0 +1,331 @@ +ifeq ($(.DEFAULT_GOAL),) +# Define default goal to `all` because this file defines some targets +# before the inclusion of erlang.mk leading to the wrong target becoming +# the default. +.DEFAULT_GOAL = all +endif + +# Automatically add rabbitmq-common to the dependencies, at least for +# the Makefiles. +ifneq ($(PROJECT),rabbit_common) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +ifeq ($(filter rabbit_common,$(DEPS)),) +DEPS += rabbit_common +endif +endif +endif + +# -------------------------------------------------------------------- +# RabbitMQ components. +# -------------------------------------------------------------------- + +# For RabbitMQ repositories, we want to checkout branches which match +# the parent project. For instance, if the parent project is on a +# release tag, dependencies must be on the same release tag. If the +# parent project is on a topic branch, dependencies must be on the same +# topic branch or fallback to `stable` or `master` whichever was the +# base of the topic branch. + +dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_clusterer = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_lvc = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_visualiser = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master +dep_sockjs = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master +dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master + +dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master + +# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk +# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch +# needs to add "ranch" as a BUILD_DEPS. The list of projects needing +# this workaround are: +# o rabbitmq-web-stomp +dep_ranch = git https://github.com/ninenines/ranch 1.2.1 + +RABBITMQ_COMPONENTS = amqp_client \ + rabbit \ + rabbit_common \ + rabbitmq_amqp1_0 \ + rabbitmq_auth_backend_amqp \ + rabbitmq_auth_backend_http \ + rabbitmq_auth_backend_ldap \ + rabbitmq_auth_mechanism_ssl \ + rabbitmq_boot_steps_visualiser \ + rabbitmq_clusterer \ + rabbitmq_codegen \ + rabbitmq_consistent_hash_exchange \ + rabbitmq_delayed_message_exchange \ + rabbitmq_dotnet_client \ + rabbitmq_event_exchange \ + rabbitmq_federation \ + rabbitmq_federation_management \ + rabbitmq_java_client \ + rabbitmq_lvc \ + rabbitmq_management \ + rabbitmq_management_agent \ + rabbitmq_management_exchange \ + rabbitmq_management_themes \ + rabbitmq_management_visualiser \ + rabbitmq_message_timestamp \ + rabbitmq_metronome \ + rabbitmq_mqtt \ + rabbitmq_recent_history_exchange \ + rabbitmq_rtopic_exchange \ + rabbitmq_sharding \ + rabbitmq_shovel \ + rabbitmq_shovel_management \ + rabbitmq_stomp \ + rabbitmq_test \ + rabbitmq_toke \ + rabbitmq_top \ + rabbitmq_tracing \ + rabbitmq_web_dispatch \ + rabbitmq_web_stomp \ + rabbitmq_web_stomp_examples \ + rabbitmq_website + +# Several components have a custom erlang.mk/build.config, mainly +# to disable eunit. Therefore, we can't use the top-level project's +# erlang.mk copy. +NO_AUTOPATCH += $(RABBITMQ_COMPONENTS) + +ifeq ($(origin current_rmq_ref),undefined) +ifneq ($(wildcard .git),) +current_rmq_ref := $(shell (\ + ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\ + if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi)) +else +current_rmq_ref := master +endif +endif +export current_rmq_ref + +ifeq ($(origin base_rmq_ref),undefined) +ifneq ($(wildcard .git),) +base_rmq_ref := $(shell \ + (git rev-parse --verify -q stable >/dev/null && \ + git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \ + echo stable) || \ + echo master) +else +base_rmq_ref := master +endif +endif +export base_rmq_ref + +# Repository URL selection. +# +# First, we infer other components' location from the current project +# repository URL, if it's a Git repository: +# - We take the "origin" remote URL as the base +# - The current project name and repository name is replaced by the +# target's properties: +# eg. rabbitmq-common is replaced by rabbitmq-codegen +# eg. rabbit_common is replaced by rabbitmq_codegen +# +# If cloning from this computed location fails, we fallback to RabbitMQ +# upstream which is GitHub. + +# Maccro to transform eg. "rabbit_common" to "rabbitmq-common". +rmq_cmp_repo_name = $(word 2,$(dep_$(1))) + +# Upstream URL for the current project. +RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT)) +RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git +RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git + +# Current URL for the current project. If this is not a Git clone, +# default to the upstream Git repository. +ifneq ($(wildcard .git),) +git_origin_fetch_url := $(shell git config remote.origin.url) +git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url) +RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url) +RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url) +else +RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL) +RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL) +endif + +# Macro to replace the following pattern: +# 1. /foo.git -> /bar.git +# 2. /foo -> /bar +# 3. /foo/ -> /bar/ +subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3)))) + +# Macro to replace both the project's name (eg. "rabbit_common") and +# repository name (eg. "rabbitmq-common") by the target's equivalent. +# +# This macro is kept on one line because we don't want whitespaces in +# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell +# single-quoted string. +dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo)) + +dep_rmq_commits = $(if $(dep_$(1)), \ + $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \ + $(pkg_$(1)_commit)) + +define dep_fetch_git_rmq + fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \ + fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \ + if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \ + git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url1"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \ + elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url2"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \ + fi; \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \ + $(foreach ref,$(call dep_rmq_commits,$(1)), \ + git checkout -q $(ref) >/dev/null 2>&1 || \ + ) \ + (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \ + 1>&2 && false) ) && \ + (test "$$$$fetch_url" = "$$$$push_url" || \ + git remote set-url --push origin "$$$$push_url") +endef + +# -------------------------------------------------------------------- +# Component distribution. +# -------------------------------------------------------------------- + +list-dist-deps:: + @: + +prepare-dist:: + @: + +# -------------------------------------------------------------------- +# Run a RabbitMQ node (moved from rabbitmq-run.mk as a workaround). +# -------------------------------------------------------------------- + +# Add "rabbit" to the build dependencies when the user wants to start +# a broker or to the test dependencies when the user wants to test a +# project. +# +# NOTE: This should belong to rabbitmq-run.mk. Unfortunately, it is +# loaded *after* erlang.mk which is too late to add a dependency. That's +# why rabbitmq-components.mk knows the list of targets which start a +# broker and add "rabbit" to the dependencies in this case. + +ifneq ($(PROJECT),rabbit) +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS)),) +RUN_RMQ_TARGETS = run-broker \ + run-background-broker \ + run-node \ + run-background-node \ + start-background-node + +ifneq ($(filter $(RUN_RMQ_TARGETS),$(MAKECMDGOALS)),) +BUILD_DEPS += rabbit +endif +endif + +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +ifneq ($(filter check tests tests-with-broker test,$(MAKECMDGOALS)),) +TEST_DEPS += rabbit +endif +endif +endif + +ifeq ($(filter rabbit_public_umbrella amqp_client rabbit_common rabbitmq_test,$(PROJECT)),) +ifeq ($(filter rabbitmq_test,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +TEST_DEPS += rabbitmq_test +endif +endif + +# -------------------------------------------------------------------- +# rabbitmq-components.mk checks. +# -------------------------------------------------------------------- + +ifeq ($(PROJECT),rabbit_common) +else ifdef SKIP_RMQCOMP_CHECK +else ifeq ($(IS_DEP),1) +else ifneq ($(filter co up,$(MAKECMDGOALS)),) +else +# In all other cases, rabbitmq-components.mk must be in sync. +deps:: check-rabbitmq-components.mk +fetch-deps: check-rabbitmq-components.mk +endif + +# If this project is under the Umbrella project, we override $(DEPS_DIR) +# to point to the Umbrella's one. We also disable `make distclean` so +# $(DEPS_DIR) is not accidentally removed. + +ifneq ($(wildcard ../../UMBRELLA.md),) +UNDER_UMBRELLA = 1 +else ifneq ($(wildcard UMBRELLA.md),) +UNDER_UMBRELLA = 1 +endif + +ifeq ($(UNDER_UMBRELLA),1) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +DEPS_DIR ?= $(abspath ..) + +distclean:: distclean-components + @: + +distclean-components: +endif + +ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),) +SKIP_DEPS = 1 +endif +endif + +UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk + +check-rabbitmq-components.mk: + $(verbose) cmp -s rabbitmq-components.mk \ + $(UPSTREAM_RMQ_COMPONENTS_MK) || \ + (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \ + false) + +ifeq ($(PROJECT),rabbit_common) +rabbitmq-components-mk: + @: +else +rabbitmq-components-mk: + $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) . +ifeq ($(DO_COMMIT),yes) + $(verbose) git diff --quiet rabbitmq-components.mk \ + || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk +endif +endif diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/messaging.xml b/rabbitmq-server/deps/rabbitmq_amqp1_0/spec/messaging.xml similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/messaging.xml rename to rabbitmq-server/deps/rabbitmq_amqp1_0/spec/messaging.xml diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/security.xml b/rabbitmq-server/deps/rabbitmq_amqp1_0/spec/security.xml similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/security.xml rename to rabbitmq-server/deps/rabbitmq_amqp1_0/spec/security.xml diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/transactions.xml b/rabbitmq-server/deps/rabbitmq_amqp1_0/spec/transactions.xml similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/transactions.xml rename to rabbitmq-server/deps/rabbitmq_amqp1_0/spec/transactions.xml diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/transport.xml b/rabbitmq-server/deps/rabbitmq_amqp1_0/spec/transport.xml similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/transport.xml rename to rabbitmq-server/deps/rabbitmq_amqp1_0/spec/transport.xml diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/types.xml b/rabbitmq-server/deps/rabbitmq_amqp1_0/spec/types.xml similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/spec/types.xml rename to rabbitmq-server/deps/rabbitmq_amqp1_0/spec/types.xml diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_binary_generator.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_binary_generator.erl similarity index 81% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_binary_generator.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_binary_generator.erl index 2277cb3..6f089df 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_binary_generator.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_binary_generator.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_binary_generator). @@ -88,9 +88,14 @@ generate({list, List}) -> Count = length(List), Compound = lists:map(fun generate/1, List), S = iolist_size(Compound), - %% S < 256 -> Count < 256 - if S > 255 -> [<<16#d0, (S + 4):32/unsigned, Count:32/unsigned>>, Compound]; - true -> [<<16#c0, (S + 1):8/unsigned, Count:8/unsigned>>, Compound] + %% If the list contains less than (256 - 1) elements and if the + %% encoded size (including the encoding of "Count", thus S + 1 + %% in the test) is less than 256 bytes, we use the short form. + %% Otherwise, we use the large form. + if Count >= (256 - 1) orelse (S + 1) >= 256 -> + [<<16#d0, (S + 4):32/unsigned, Count:32/unsigned>>, Compound]; + true -> + [<<16#c0, (S + 1):8/unsigned, Count:8/unsigned>>, Compound] end; generate({map, ListOfPairs}) -> @@ -100,8 +105,11 @@ generate({map, ListOfPairs}) -> (generate(Val))] end, ListOfPairs), S = iolist_size(Compound), - if S > 255 -> [<<16#d1,(S + 4):32,Count:32>>, Compound]; - true -> [<<16#c1,(S + 1):8,Count:8>>, Compound] + %% See generate({list, ...}) for an explanation of this test. + if Count >= (256 - 1) orelse (S + 1) >= 256 -> + [<<16#d1, (S + 4):32, Count:32>>, Compound]; + true -> + [<<16#c1, (S + 1):8, Count:8>>, Compound] end; generate({array, Type, List}) -> @@ -109,9 +117,11 @@ generate({array, Type, List}) -> Body = iolist_to_binary( [constructor(Type), [generate(Type, I) || I <- List]]), S = size(Body), - %% S < 256 -> Count < 256 - if S > 255 -> [<<16#f0, (S + 4):32/unsigned, Count:32/unsigned>>, Body]; - true -> [<<16#e0, (S + 1):8/unsigned, Count:8/unsigned>>, Body] + %% See generate({list, ...}) for an explanation of this test. + if Count >= (256 - 1) orelse (S + 1) >= 256 -> + [<<16#f0, (S + 4):32/unsigned, Count:32/unsigned>>, Body]; + true -> + [<<16#e0, (S + 1):8/unsigned, Count:8/unsigned>>, Body] end; generate({as_is, TypeCode, Bin}) -> diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_binary_parser.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_binary_parser.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_binary_parser.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_binary_parser.erl index 677ebaa..95c8077 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_binary_parser.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_binary_parser.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_binary_parser). diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_channel.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_channel.erl similarity index 97% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_channel.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_channel.erl index 1a96803..2e5a4b2 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_channel.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_channel.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_channel). diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_framing.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_framing.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_framing.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_framing.erl index a4b1b60..0ff6b43 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_framing.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_framing.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_framing). diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_incoming_link.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_incoming_link.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_incoming_link.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_incoming_link.erl index ab5d331..46748b1 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_incoming_link.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_incoming_link.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_incoming_link). @@ -193,6 +193,8 @@ ensure_target(Target = #'v1_0.target'{address = Address, timeout = _Timeout}, Link = #incoming_link{ route_state = RouteState }, DCh) -> DeclareParams = [{durable, rabbit_amqp1_0_link_util:durable(Durable)}, + {exclusive, false}, + {auto_delete, false}, {check_exchange, true}, {nowait, false}], case Dynamic of diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_link_util.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_link_util.erl similarity index 97% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_link_util.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_link_util.erl index 2c34fd2..5b423ff 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_link_util.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_link_util.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_link_util). diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_message.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_message.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_message.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_message.erl index c48e9c5..19d5fc0 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_message.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_message.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_message). diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_outgoing_link.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_outgoing_link.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_outgoing_link.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_outgoing_link.erl index c418ba4..f0366c8 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_outgoing_link.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_outgoing_link.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_outgoing_link). @@ -157,6 +157,8 @@ ensure_source(Source = #'v1_0.source'{address = Address, timeout = _Timeout}, Link = #outgoing_link{ route_state = RouteState }, DCh) -> DeclareParams = [{durable, rabbit_amqp1_0_link_util:durable(Durable)}, + {exclusive, false}, + {auto_delete, false}, {check_exchange, true}, {nowait, false}], case Dynamic of diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_reader.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_reader.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_reader.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_reader.erl index 06664ff..dd46010 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_reader.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_reader.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_reader). diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session.erl index 3f507be..5a3ae02 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_session). diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_process.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_process.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_process.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_process.erl index 2f9be46..68cc49c 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_process.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_process.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_session_process). diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_sup.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup.erl similarity index 97% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_sup.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup.erl index bdc8fa9..7cf7900 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_sup.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_session_sup). diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_sup_sup.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup_sup.erl similarity index 95% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_sup_sup.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup_sup.erl index dca7312..1e1eaa5 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_session_sup_sup.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_session_sup_sup.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_session_sup_sup). diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_util.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_util.erl similarity index 97% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_util.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_util.erl index 4a79502..1e608db 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_util.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_util.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_util). diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_writer.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_writer.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_writer.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_writer.erl index 399e4e4..e70f728 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbit_amqp1_0_writer.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_writer.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_writer). diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbitmq_amqp1_0.app.src b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbitmq_amqp1_0.app.src similarity index 93% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbitmq_amqp1_0.app.src rename to rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbitmq_amqp1_0.app.src index da69097..63c98b0 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/src/rabbitmq_amqp1_0.app.src +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/src/rabbitmq_amqp1_0.app.src @@ -1,6 +1,6 @@ {application, rabbitmq_amqp1_0, [{description, "AMQP 1.0 support for RabbitMQ"}, - {vsn, "%%VSN%%"}, + {vsn, "3.6.1"}, {modules, []}, {registered, []}, {env, [{default_user, "guest"}, diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/lib-java/junit.jar b/rabbitmq-server/deps/rabbitmq_amqp1_0/test/lib-java/junit.jar similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/lib-java/junit.jar rename to rabbitmq-server/deps/rabbitmq_amqp1_0/test/lib-java/junit.jar diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/Makefile b/rabbitmq-server/deps/rabbitmq_amqp1_0/test/proton/Makefile similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/Makefile rename to rabbitmq-server/deps/rabbitmq_amqp1_0/test/proton/Makefile diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/build.xml b/rabbitmq-server/deps/rabbitmq_amqp1_0/test/proton/build.xml similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/build.xml rename to rabbitmq-server/deps/rabbitmq_amqp1_0/test/proton/build.xml diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/test/com/rabbitmq/amqp1_0/tests/proton/ProtonTests.java b/rabbitmq-server/deps/rabbitmq_amqp1_0/test/proton/test/com/rabbitmq/amqp1_0/tests/proton/ProtonTests.java similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/proton/test/com/rabbitmq/amqp1_0/tests/proton/ProtonTests.java rename to rabbitmq-server/deps/rabbitmq_amqp1_0/test/proton/test/com/rabbitmq/amqp1_0/tests/proton/ProtonTests.java diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/src/rabbit_amqp1_0_test.erl b/rabbitmq-server/deps/rabbitmq_amqp1_0/test/src/rabbit_amqp1_0_test.erl similarity index 95% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/src/rabbit_amqp1_0_test.erl rename to rabbitmq-server/deps/rabbitmq_amqp1_0/test/src/rabbit_amqp1_0_test.erl index 2be29a2..211a6d5 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/src/rabbit_amqp1_0_test.erl +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/test/src/rabbit_amqp1_0_test.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_amqp1_0_test). diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/Makefile b/rabbitmq-server/deps/rabbitmq_amqp1_0/test/swiftmq/Makefile similarity index 78% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/Makefile rename to rabbitmq-server/deps/rabbitmq_amqp1_0/test/swiftmq/Makefile index 3a1c639..3963a3d 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/Makefile +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/test/swiftmq/Makefile @@ -4,7 +4,7 @@ CLIENT_PKG=$(CLIENT_DIR).tar.gz .PHONY: test test: build/lib - ant test + $(ANT) test build/lib: $(CLIENT_PKG) mkdir -p build/tmp @@ -13,8 +13,8 @@ build/lib: $(CLIENT_PKG) mv build/tmp/$(CLIENT_DIR)/jars/*.jar build/lib rm -rf build/tmp cp ../lib-java/*.jar build/lib - (cd ../../../rabbitmq-java-client && ant dist) - cp ../../../rabbitmq-java-client/build/dist/rabbitmq-client.jar build/lib + (cd $(DEPS_DIR)/rabbitmq_java_client && ant dist) + cp $(DEPS_DIR)/rabbitmq_java_client/build/dist/rabbitmq-client.jar build/lib $(CLIENT_PKG): @echo diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/build.xml b/rabbitmq-server/deps/rabbitmq_amqp1_0/test/swiftmq/build.xml similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/build.xml rename to rabbitmq-server/deps/rabbitmq_amqp1_0/test/swiftmq/build.xml diff --git a/rabbitmq-server/deps/rabbitmq_amqp1_0/test/swiftmq/run-tests.sh b/rabbitmq-server/deps/rabbitmq_amqp1_0/test/swiftmq/run-tests.sh new file mode 100755 index 0000000..b055576 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/test/swiftmq/run-tests.sh @@ -0,0 +1,2 @@ +#!/bin/sh -e +${MAKE:-make} -C $(dirname $0) test diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/test/com/rabbitmq/amqp1_0/tests/swiftmq/SwiftMQTests.java b/rabbitmq-server/deps/rabbitmq_amqp1_0/test/swiftmq/test/com/rabbitmq/amqp1_0/tests/swiftmq/SwiftMQTests.java similarity index 90% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/test/com/rabbitmq/amqp1_0/tests/swiftmq/SwiftMQTests.java rename to rabbitmq-server/deps/rabbitmq_amqp1_0/test/swiftmq/test/com/rabbitmq/amqp1_0/tests/swiftmq/SwiftMQTests.java index 2db131f..a44fb48 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/test/com/rabbitmq/amqp1_0/tests/swiftmq/SwiftMQTests.java +++ b/rabbitmq-server/deps/rabbitmq_amqp1_0/test/swiftmq/test/com/rabbitmq/amqp1_0/tests/swiftmq/SwiftMQTests.java @@ -24,6 +24,7 @@ public class SwiftMQTests extends TestCase { private static final int OUTBOUND_WINDOW = 100; private static final int CONSUMER_LINK_CREDIT = 200; private static final String QUEUE = "/queue/test"; + private static final int RECEIVE_TIMEOUT = 10000; // 10 seconds timeout. private AMQPMessage msg() { AMQPMessage m = new AMQPMessage(); @@ -45,7 +46,7 @@ public class SwiftMQTests extends TestCase { p.send(msg()); p.close(); // Settlement happens here Consumer c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null); - AMQPMessage m = c.receive(); + AMQPMessage m = c.receive(RECEIVE_TIMEOUT); m.accept(); assertEquals(1, m.getData().size()); assertEquals(data(), m.getData().get(0)); @@ -75,7 +76,7 @@ public class SwiftMQTests extends TestCase { p.close(); Consumer c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null); - AMQPMessage m = c.receive(); + AMQPMessage m = c.receive(RECEIVE_TIMEOUT); m.accept(); c.close(); assertEquals(PayloadSize, m.getData().get(0).getValue().length); @@ -139,7 +140,7 @@ public class SwiftMQTests extends TestCase { p.send(msg); p.close(); Consumer c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null); - AMQPMessage recvMsg = c.receive(); + AMQPMessage recvMsg = c.receive(RECEIVE_TIMEOUT); recvMsg.accept(); assertEquals(val.getValue().getValueString(), recvMsg.getAmqpValue().getValue().getValueString()); @@ -157,7 +158,7 @@ public class SwiftMQTests extends TestCase { p.close(); Consumer c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_MOST_ONCE, false, null); - AMQPMessage m = c.receive(); + AMQPMessage m = c.receive(RECEIVE_TIMEOUT); assertTrue(m.isSettled()); s.close(); @@ -178,7 +179,7 @@ public class SwiftMQTests extends TestCase { p.close(); Consumer c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null); - AMQPMessage m = c.receive(); + AMQPMessage m = c.receive(RECEIVE_TIMEOUT); m.reject(); assertNull(get(c)); conn.close(); @@ -195,14 +196,14 @@ public class SwiftMQTests extends TestCase { p.close(); Consumer c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null); - AMQPMessage m1 = c.receive(); + AMQPMessage m1 = c.receive(RECEIVE_TIMEOUT); assertTrue(m1.getHeader().getFirstAcquirer().getValue()); assertFalse(m1.isSettled()); s.close(); s = conn.createSession(INBOUND_WINDOW, OUTBOUND_WINDOW); c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null); - AMQPMessage m2 = c.receive(); + AMQPMessage m2 = c.receive(RECEIVE_TIMEOUT); m2.accept(); assertTrue(compareMessageData(m1, m2)); @@ -225,10 +226,28 @@ public class SwiftMQTests extends TestCase { route("/exchange/amq.direct/", "/exchange/amq.direct", "", true); route("/exchange/amq.direct/a", "/exchange/amq.direct", "a", true); + /* The following three tests rely on the queue "test" created by + * previous tests in this function. */ route("/amq/queue/test", QUEUE, "", true); route(QUEUE, "/amq/queue/test", "", true); route("/amq/queue/test", "/amq/queue/test", "", true); + /* The following tests verify that a queue created out-of-band in AMQP + * is reachable from the AMQP 1.0 world. */ + ConnectionFactory factory = new ConnectionFactory(); + com.rabbitmq.client.Connection connection = factory.newConnection(); + Channel channel = connection.createChannel(); + channel.queueDeclare("transient_q", false, false, false, null); + route("/amq/queue/transient_q", "/amq/queue/transient_q", "", true); + channel.queueDelete("transient_q"); + channel.queueDeclare("durable_q", true, false, false, null); + route("/amq/queue/durable_q", "/amq/queue/durable_q", "", true); + channel.queueDelete("durable_q"); + channel.queueDeclare("autodel_q", false, false, true, null); + route("/amq/queue/autodel_q", "/amq/queue/autodel_q", "", true); + channel.queueDelete("autodel_q"); + connection.close(); + route("/exchange/amq.direct/b", "/exchange/amq.direct", "a", false); route(QUEUE, "/exchange/amq.fanout", "", false); route(QUEUE, "/exchange/amq.headers", "", false); @@ -242,7 +261,7 @@ public class SwiftMQTests extends TestCase { channel.queueDeclare("transient", false, false, false, null); connection.close(); - for (String dest : Arrays.asList("/exchange/missing", "/queue/transient", "/fruit/orange")) { + for (String dest : Arrays.asList("/exchange/missing", "/fruit/orange")) { routeInvalidSource(dest); routeInvalidTarget(dest); } @@ -285,7 +304,7 @@ public class SwiftMQTests extends TestCase { p.send(msg); if (succeed) { - AMQPMessage m = c.receive(); + AMQPMessage m = c.receive(RECEIVE_TIMEOUT); assertNotNull(m); assertEquals(sentinel.getValue().getValueString(), m.getAmqpValue().getValue().getValueString()); m.accept(); @@ -355,7 +374,7 @@ public class SwiftMQTests extends TestCase { p.send(msg); p.close(); Consumer c = s.createConsumer(QUEUE, CONSUMER_LINK_CREDIT, QoS.AT_LEAST_ONCE, false, null); - AMQPMessage recvMsg = c.receive(); + AMQPMessage recvMsg = c.receive(RECEIVE_TIMEOUT); recvMsg.accept(); compareMaps(map, d.getDecoration(recvMsg)); diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/CONTRIBUTING.md b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/CONTRIBUTING.md similarity index 100% rename from rabbitmq-server/plugins-src/mochiweb-wrapper/CONTRIBUTING.md rename to rabbitmq-server/deps/rabbitmq_auth_backend_ldap/CONTRIBUTING.md diff --git a/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/Makefile b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/Makefile new file mode 100644 index 0000000..25e042a --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/Makefile @@ -0,0 +1,29 @@ +PROJECT = rabbitmq_auth_backend_ldap + +DEPS = amqp_client + +DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk + +# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be +# reviewed and merged. + +ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git +ERLANG_MK_COMMIT = rabbitmq-tmp + +include rabbitmq-components.mk +include erlang.mk + +# -------------------------------------------------------------------- +# Testing. +# -------------------------------------------------------------------- + +ifneq ($(filter tests tests-with-broker test,$(MAKECMDGOALS)),) +ifeq ($(shell nc -z localhost 389 && echo true),true) +WITH_BROKER_TEST_MAKEVARS := \ + RABBITMQ_CONFIG_FILE=$(CURDIR)/etc/rabbit-test +WITH_BROKER_TEST_COMMANDS := \ + eunit:test([rabbit_auth_backend_ldap_unit_test,rabbit_auth_backend_ldap_test],[verbose]) +else +$(info Skipping LDAP tests; no LDAP server found on localhost) +endif +endif diff --git a/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/README-authorisation.md b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/README-authorisation.md new file mode 100644 index 0000000..303825f --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/README-authorisation.md @@ -0,0 +1 @@ +See [RabbitMQ LDAP authentication backend docs](http://www.rabbitmq.com/ldap.html). diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README-tests b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/README-tests.md similarity index 53% rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README-tests rename to rabbitmq-server/deps/rabbitmq_auth_backend_ldap/README-tests.md index eac53c7..423adda 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README-tests +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/README-tests.md @@ -1,13 +1,16 @@ +# Running LDAP Backend Tests + The tests *require* a locally installed LDAP server with some predefined objects inside. If there's no LDAP server running on port 389, they will be skipped. -On Debian / Ubuntu you can just: +On a Debian-based distro you can set up a LDAP server +and run the tests with: -$ ./example/setup.sh -$ make test + ./example/setup.sh + make tests - - but be aware that this will wipe out your local OpenLDAP installation. +but be aware that this will wipe out your local OpenLDAP installation. Poke around in example/ if using any other distro, you can probably make it work. diff --git a/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/README.md b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/README.md new file mode 100644 index 0000000..69fea5c --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/README.md @@ -0,0 +1,19 @@ +# RabbitMQ LDAP Authentication Backend + +This plugin provides [authentication and authorisation backends](http://rabbitmq.com/access-control.html) +for RabbitMQ that use LDAP. + +## Requirements + +You can build and install it like any other plugin (see +http://www.rabbitmq.com/plugin-development.html). + +## Documentation + +[See LDAP guide](http://www.rabbitmq.com/ldap.html) on rabbitmq.com. + +## Limitations + +Prior to RabbitMQ 3.6.0, this plugin opened a new LDAP server +connection for every operation. 3.6.0 and later versions use +a pool of connections. diff --git a/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/build.config b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/build.config new file mode 100644 index 0000000..0855303 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/build.config @@ -0,0 +1,43 @@ +# Do *not* comment or remove core modules +# unless you know what you are doing. +# +# Feel free to comment plugins out however. + +# Core modules. +core/core +index/* +core/index +core/deps + +# Plugins that must run before Erlang code gets compiled. +plugins/erlydtl +plugins/protobuffs + +# Core modules, continued. +core/erlc +core/docs +core/rel +core/test +core/compat + +# Plugins. +plugins/asciidoc +plugins/bootstrap +plugins/c_src +plugins/ci +plugins/ct +plugins/dialyzer +plugins/edoc +plugins/elvis +plugins/escript +# plugins/eunit +plugins/relx +plugins/shell +plugins/triq +plugins/xref + +# Plugins enhancing the functionality of other plugins. +plugins/cover + +# Core modules which can use variables from plugins. +core/deps-tools diff --git a/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/erlang.mk b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/erlang.mk new file mode 100644 index 0000000..9f0c0c3 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/erlang.mk @@ -0,0 +1,6589 @@ +# Copyright (c) 2013-2015, Loïc Hoguin +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +.PHONY: all app deps search rel docs install-docs check tests clean distclean help erlang-mk + +ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) + +ERLANG_MK_VERSION = 2.0.0-pre.2-16-gb52203c-dirty + +# Core configuration. + +PROJECT ?= $(notdir $(CURDIR)) +PROJECT := $(strip $(PROJECT)) + +PROJECT_VERSION ?= rolling + +# Verbosity. + +V ?= 0 + +verbose_0 = @ +verbose_2 = set -x; +verbose = $(verbose_$(V)) + +gen_verbose_0 = @echo " GEN " $@; +gen_verbose_2 = set -x; +gen_verbose = $(gen_verbose_$(V)) + +# Temporary files directory. + +ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk +export ERLANG_MK_TMP + +# "erl" command. + +ERL = erl +A0 -noinput -boot start_clean + +# Platform detection. + +ifeq ($(PLATFORM),) +UNAME_S := $(shell uname -s) + +ifeq ($(UNAME_S),Linux) +PLATFORM = linux +else ifeq ($(UNAME_S),Darwin) +PLATFORM = darwin +else ifeq ($(UNAME_S),SunOS) +PLATFORM = solaris +else ifeq ($(UNAME_S),GNU) +PLATFORM = gnu +else ifeq ($(UNAME_S),FreeBSD) +PLATFORM = freebsd +else ifeq ($(UNAME_S),NetBSD) +PLATFORM = netbsd +else ifeq ($(UNAME_S),OpenBSD) +PLATFORM = openbsd +else ifeq ($(UNAME_S),DragonFly) +PLATFORM = dragonfly +else ifeq ($(shell uname -o),Msys) +PLATFORM = msys2 +else +$(error Unable to detect platform. Please open a ticket with the output of uname -a.) +endif + +export PLATFORM +endif + +# Core targets. + +all:: deps app rel + +# Noop to avoid a Make warning when there's nothing to do. +rel:: + $(verbose) : + +check:: clean app tests + +clean:: clean-crashdump + +clean-crashdump: +ifneq ($(wildcard erl_crash.dump),) + $(gen_verbose) rm -f erl_crash.dump +endif + +distclean:: clean distclean-tmp + +distclean-tmp: + $(gen_verbose) rm -rf $(ERLANG_MK_TMP) + +help:: + $(verbose) printf "%s\n" \ + "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \ + "Copyright (c) 2013-2015 Loïc Hoguin " \ + "" \ + "Usage: [V=1] $(MAKE) [target]..." \ + "" \ + "Core targets:" \ + " all Run deps, app and rel targets in that order" \ + " app Compile the project" \ + " deps Fetch dependencies (if needed) and compile them" \ + " fetch-deps Fetch dependencies (if needed) without compiling them" \ + " list-deps Fetch dependencies (if needed) and list them" \ + " search q=... Search for a package in the built-in index" \ + " rel Build a release for this project, if applicable" \ + " docs Build the documentation for this project" \ + " install-docs Install the man pages for this project" \ + " check Compile and run all tests and analysis for this project" \ + " tests Run the tests for this project" \ + " clean Delete temporary and output files from most targets" \ + " distclean Delete all temporary and output files" \ + " help Display this help and exit" \ + " erlang-mk Update erlang.mk to the latest version" + +# Core functions. + +empty := +space := $(empty) $(empty) +tab := $(empty) $(empty) +comma := , + +define newline + + +endef + +define comma_list +$(subst $(space),$(comma),$(strip $(1))) +endef + +# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. +define erlang +$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk +endef + +ifeq ($(PLATFORM),msys2) +core_native_path = $(subst \,\\\\,$(shell cygpath -w $1)) +else +core_native_path = $1 +endif + +ifeq ($(shell which wget 2>/dev/null | wc -l), 1) +define core_http_get + wget --no-check-certificate -O $(1) $(2)|| rm $(1) +endef +else +define core_http_get.erl + ssl:start(), + inets:start(), + case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of + {ok, {{_, 200, _}, _, Body}} -> + case file:write_file("$(1)", Body) of + ok -> ok; + {error, R1} -> halt(R1) + end; + {error, R2} -> + halt(R2) + end, + halt(0). +endef + +define core_http_get + $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2)) +endef +endif + +core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1))) + +core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2))) + +core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1))))))))))))))))))))))))))) + +core_ls = $(filter-out $(1),$(shell echo $(1))) + +# @todo Use a solution that does not require using perl. +core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2) + +# Automated update. + +ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk +ERLANG_MK_COMMIT ?= +ERLANG_MK_BUILD_CONFIG ?= build.config +ERLANG_MK_BUILD_DIR ?= .erlang.mk.build + +erlang-mk: + git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) +ifdef ERLANG_MK_COMMIT + cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) +endif + if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi + $(MAKE) -C $(ERLANG_MK_BUILD_DIR) + cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk + rm -rf $(ERLANG_MK_BUILD_DIR) + +# The erlang.mk package index is bundled in the default erlang.mk build. +# Search for the string "copyright" to skip to the rest of the code. + +PACKAGES += aberth +pkg_aberth_name = aberth +pkg_aberth_description = Generic BERT-RPC server in Erlang +pkg_aberth_homepage = https://github.com/a13x/aberth +pkg_aberth_fetch = git +pkg_aberth_repo = https://github.com/a13x/aberth +pkg_aberth_commit = master + +PACKAGES += active +pkg_active_name = active +pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running +pkg_active_homepage = https://github.com/proger/active +pkg_active_fetch = git +pkg_active_repo = https://github.com/proger/active +pkg_active_commit = master + +PACKAGES += actordb_core +pkg_actordb_core_name = actordb_core +pkg_actordb_core_description = ActorDB main source +pkg_actordb_core_homepage = http://www.actordb.com/ +pkg_actordb_core_fetch = git +pkg_actordb_core_repo = https://github.com/biokoda/actordb_core +pkg_actordb_core_commit = master + +PACKAGES += actordb_thrift +pkg_actordb_thrift_name = actordb_thrift +pkg_actordb_thrift_description = Thrift API for ActorDB +pkg_actordb_thrift_homepage = http://www.actordb.com/ +pkg_actordb_thrift_fetch = git +pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift +pkg_actordb_thrift_commit = master + +PACKAGES += aleppo +pkg_aleppo_name = aleppo +pkg_aleppo_description = Alternative Erlang Pre-Processor +pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo +pkg_aleppo_fetch = git +pkg_aleppo_repo = https://github.com/ErlyORM/aleppo +pkg_aleppo_commit = master + +PACKAGES += alog +pkg_alog_name = alog +pkg_alog_description = Simply the best logging framework for Erlang +pkg_alog_homepage = https://github.com/siberian-fast-food/alogger +pkg_alog_fetch = git +pkg_alog_repo = https://github.com/siberian-fast-food/alogger +pkg_alog_commit = master + +PACKAGES += amqp_client +pkg_amqp_client_name = amqp_client +pkg_amqp_client_description = RabbitMQ Erlang AMQP client +pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html +pkg_amqp_client_fetch = git +pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git +pkg_amqp_client_commit = master + +PACKAGES += annotations +pkg_annotations_name = annotations +pkg_annotations_description = Simple code instrumentation utilities +pkg_annotations_homepage = https://github.com/hyperthunk/annotations +pkg_annotations_fetch = git +pkg_annotations_repo = https://github.com/hyperthunk/annotations +pkg_annotations_commit = master + +PACKAGES += antidote +pkg_antidote_name = antidote +pkg_antidote_description = Large-scale computation without synchronisation +pkg_antidote_homepage = https://syncfree.lip6.fr/ +pkg_antidote_fetch = git +pkg_antidote_repo = https://github.com/SyncFree/antidote +pkg_antidote_commit = master + +PACKAGES += apns +pkg_apns_name = apns +pkg_apns_description = Apple Push Notification Server for Erlang +pkg_apns_homepage = http://inaka.github.com/apns4erl +pkg_apns_fetch = git +pkg_apns_repo = https://github.com/inaka/apns4erl +pkg_apns_commit = 1.0.4 + +PACKAGES += azdht +pkg_azdht_name = azdht +pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang +pkg_azdht_homepage = https://github.com/arcusfelis/azdht +pkg_azdht_fetch = git +pkg_azdht_repo = https://github.com/arcusfelis/azdht +pkg_azdht_commit = master + +PACKAGES += backoff +pkg_backoff_name = backoff +pkg_backoff_description = Simple exponential backoffs in Erlang +pkg_backoff_homepage = https://github.com/ferd/backoff +pkg_backoff_fetch = git +pkg_backoff_repo = https://github.com/ferd/backoff +pkg_backoff_commit = master + +PACKAGES += barrel_tcp +pkg_barrel_tcp_name = barrel_tcp +pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang. +pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_fetch = git +pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_commit = master + +PACKAGES += basho_bench +pkg_basho_bench_name = basho_bench +pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for. +pkg_basho_bench_homepage = https://github.com/basho/basho_bench +pkg_basho_bench_fetch = git +pkg_basho_bench_repo = https://github.com/basho/basho_bench +pkg_basho_bench_commit = master + +PACKAGES += bcrypt +pkg_bcrypt_name = bcrypt +pkg_bcrypt_description = Bcrypt Erlang / C library +pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt +pkg_bcrypt_fetch = git +pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt +pkg_bcrypt_commit = master + +PACKAGES += beam +pkg_beam_name = beam +pkg_beam_description = BEAM emulator written in Erlang +pkg_beam_homepage = https://github.com/tonyrog/beam +pkg_beam_fetch = git +pkg_beam_repo = https://github.com/tonyrog/beam +pkg_beam_commit = master + +PACKAGES += beanstalk +pkg_beanstalk_name = beanstalk +pkg_beanstalk_description = An Erlang client for beanstalkd +pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_fetch = git +pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_commit = master + +PACKAGES += bear +pkg_bear_name = bear +pkg_bear_description = a set of statistics functions for erlang +pkg_bear_homepage = https://github.com/boundary/bear +pkg_bear_fetch = git +pkg_bear_repo = https://github.com/boundary/bear +pkg_bear_commit = master + +PACKAGES += bertconf +pkg_bertconf_name = bertconf +pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded +pkg_bertconf_homepage = https://github.com/ferd/bertconf +pkg_bertconf_fetch = git +pkg_bertconf_repo = https://github.com/ferd/bertconf +pkg_bertconf_commit = master + +PACKAGES += bifrost +pkg_bifrost_name = bifrost +pkg_bifrost_description = Erlang FTP Server Framework +pkg_bifrost_homepage = https://github.com/thorstadt/bifrost +pkg_bifrost_fetch = git +pkg_bifrost_repo = https://github.com/thorstadt/bifrost +pkg_bifrost_commit = master + +PACKAGES += binpp +pkg_binpp_name = binpp +pkg_binpp_description = Erlang Binary Pretty Printer +pkg_binpp_homepage = https://github.com/jtendo/binpp +pkg_binpp_fetch = git +pkg_binpp_repo = https://github.com/jtendo/binpp +pkg_binpp_commit = master + +PACKAGES += bisect +pkg_bisect_name = bisect +pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang +pkg_bisect_homepage = https://github.com/knutin/bisect +pkg_bisect_fetch = git +pkg_bisect_repo = https://github.com/knutin/bisect +pkg_bisect_commit = master + +PACKAGES += bitcask +pkg_bitcask_name = bitcask +pkg_bitcask_description = because you need another a key/value storage engine +pkg_bitcask_homepage = https://github.com/basho/bitcask +pkg_bitcask_fetch = git +pkg_bitcask_repo = https://github.com/basho/bitcask +pkg_bitcask_commit = master + +PACKAGES += bitstore +pkg_bitstore_name = bitstore +pkg_bitstore_description = A document based ontology development environment +pkg_bitstore_homepage = https://github.com/bdionne/bitstore +pkg_bitstore_fetch = git +pkg_bitstore_repo = https://github.com/bdionne/bitstore +pkg_bitstore_commit = master + +PACKAGES += bootstrap +pkg_bootstrap_name = bootstrap +pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application. +pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap +pkg_bootstrap_fetch = git +pkg_bootstrap_repo = https://github.com/schlagert/bootstrap +pkg_bootstrap_commit = master + +PACKAGES += boss +pkg_boss_name = boss +pkg_boss_description = Erlang web MVC, now featuring Comet +pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_fetch = git +pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_commit = master + +PACKAGES += boss_db +pkg_boss_db_name = boss_db +pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang +pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db +pkg_boss_db_fetch = git +pkg_boss_db_repo = https://github.com/ErlyORM/boss_db +pkg_boss_db_commit = master + +PACKAGES += bson +pkg_bson_name = bson +pkg_bson_description = BSON documents in Erlang, see bsonspec.org +pkg_bson_homepage = https://github.com/comtihon/bson-erlang +pkg_bson_fetch = git +pkg_bson_repo = https://github.com/comtihon/bson-erlang +pkg_bson_commit = master + +PACKAGES += bullet +pkg_bullet_name = bullet +pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy. +pkg_bullet_homepage = http://ninenines.eu +pkg_bullet_fetch = git +pkg_bullet_repo = https://github.com/ninenines/bullet +pkg_bullet_commit = master + +PACKAGES += cache +pkg_cache_name = cache +pkg_cache_description = Erlang in-memory cache +pkg_cache_homepage = https://github.com/fogfish/cache +pkg_cache_fetch = git +pkg_cache_repo = https://github.com/fogfish/cache +pkg_cache_commit = master + +PACKAGES += cake +pkg_cake_name = cake +pkg_cake_description = Really simple terminal colorization +pkg_cake_homepage = https://github.com/darach/cake-erl +pkg_cake_fetch = git +pkg_cake_repo = https://github.com/darach/cake-erl +pkg_cake_commit = v0.1.2 + +PACKAGES += carotene +pkg_carotene_name = carotene +pkg_carotene_description = Real-time server +pkg_carotene_homepage = https://github.com/carotene/carotene +pkg_carotene_fetch = git +pkg_carotene_repo = https://github.com/carotene/carotene +pkg_carotene_commit = master + +PACKAGES += cberl +pkg_cberl_name = cberl +pkg_cberl_description = NIF based Erlang bindings for Couchbase +pkg_cberl_homepage = https://github.com/chitika/cberl +pkg_cberl_fetch = git +pkg_cberl_repo = https://github.com/chitika/cberl +pkg_cberl_commit = master + +PACKAGES += cecho +pkg_cecho_name = cecho +pkg_cecho_description = An ncurses library for Erlang +pkg_cecho_homepage = https://github.com/mazenharake/cecho +pkg_cecho_fetch = git +pkg_cecho_repo = https://github.com/mazenharake/cecho +pkg_cecho_commit = master + +PACKAGES += cferl +pkg_cferl_name = cferl +pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client +pkg_cferl_homepage = https://github.com/ddossot/cferl +pkg_cferl_fetch = git +pkg_cferl_repo = https://github.com/ddossot/cferl +pkg_cferl_commit = master + +PACKAGES += chaos_monkey +pkg_chaos_monkey_name = chaos_monkey +pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes. +pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_fetch = git +pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_commit = master + +PACKAGES += check_node +pkg_check_node_name = check_node +pkg_check_node_description = Nagios Scripts for monitoring Riak +pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios +pkg_check_node_fetch = git +pkg_check_node_repo = https://github.com/basho-labs/riak_nagios +pkg_check_node_commit = master + +PACKAGES += chronos +pkg_chronos_name = chronos +pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests. +pkg_chronos_homepage = https://github.com/lehoff/chronos +pkg_chronos_fetch = git +pkg_chronos_repo = https://github.com/lehoff/chronos +pkg_chronos_commit = master + +PACKAGES += cl +pkg_cl_name = cl +pkg_cl_description = OpenCL binding for Erlang +pkg_cl_homepage = https://github.com/tonyrog/cl +pkg_cl_fetch = git +pkg_cl_repo = https://github.com/tonyrog/cl +pkg_cl_commit = master + +PACKAGES += classifier +pkg_classifier_name = classifier +pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier +pkg_classifier_homepage = https://github.com/inaka/classifier +pkg_classifier_fetch = git +pkg_classifier_repo = https://github.com/inaka/classifier +pkg_classifier_commit = master + +PACKAGES += clique +pkg_clique_name = clique +pkg_clique_description = CLI Framework for Erlang +pkg_clique_homepage = https://github.com/basho/clique +pkg_clique_fetch = git +pkg_clique_repo = https://github.com/basho/clique +pkg_clique_commit = develop + +PACKAGES += cloudi_core +pkg_cloudi_core_name = cloudi_core +pkg_cloudi_core_description = CloudI internal service runtime +pkg_cloudi_core_homepage = http://cloudi.org/ +pkg_cloudi_core_fetch = git +pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core +pkg_cloudi_core_commit = master + +PACKAGES += cloudi_service_api_requests +pkg_cloudi_service_api_requests_name = cloudi_service_api_requests +pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support) +pkg_cloudi_service_api_requests_homepage = http://cloudi.org/ +pkg_cloudi_service_api_requests_fetch = git +pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests +pkg_cloudi_service_api_requests_commit = master + +PACKAGES += cloudi_service_db +pkg_cloudi_service_db_name = cloudi_service_db +pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) +pkg_cloudi_service_db_homepage = http://cloudi.org/ +pkg_cloudi_service_db_fetch = git +pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db +pkg_cloudi_service_db_commit = master + +PACKAGES += cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service +pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_fetch = git +pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_commit = master + +PACKAGES += cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service +pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_cql_fetch = git +pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_commit = master + +PACKAGES += cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service +pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/ +pkg_cloudi_service_db_couchdb_fetch = git +pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_commit = master + +PACKAGES += cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service +pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/ +pkg_cloudi_service_db_elasticsearch_fetch = git +pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_commit = master + +PACKAGES += cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_description = memcached CloudI Service +pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/ +pkg_cloudi_service_db_memcached_fetch = git +pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_commit = master + +PACKAGES += cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_description = MySQL CloudI Service +pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_mysql_fetch = git +pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_commit = master + +PACKAGES += cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service +pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_pgsql_fetch = git +pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_commit = master + +PACKAGES += cloudi_service_db_riak +pkg_cloudi_service_db_riak_name = cloudi_service_db_riak +pkg_cloudi_service_db_riak_description = Riak CloudI Service +pkg_cloudi_service_db_riak_homepage = http://cloudi.org/ +pkg_cloudi_service_db_riak_fetch = git +pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak +pkg_cloudi_service_db_riak_commit = master + +PACKAGES += cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service +pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/ +pkg_cloudi_service_db_tokyotyrant_fetch = git +pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_commit = master + +PACKAGES += cloudi_service_filesystem +pkg_cloudi_service_filesystem_name = cloudi_service_filesystem +pkg_cloudi_service_filesystem_description = Filesystem CloudI Service +pkg_cloudi_service_filesystem_homepage = http://cloudi.org/ +pkg_cloudi_service_filesystem_fetch = git +pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem +pkg_cloudi_service_filesystem_commit = master + +PACKAGES += cloudi_service_http_client +pkg_cloudi_service_http_client_name = cloudi_service_http_client +pkg_cloudi_service_http_client_description = HTTP client CloudI Service +pkg_cloudi_service_http_client_homepage = http://cloudi.org/ +pkg_cloudi_service_http_client_fetch = git +pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client +pkg_cloudi_service_http_client_commit = master + +PACKAGES += cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service +pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/ +pkg_cloudi_service_http_cowboy_fetch = git +pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_commit = master + +PACKAGES += cloudi_service_http_elli +pkg_cloudi_service_http_elli_name = cloudi_service_http_elli +pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service +pkg_cloudi_service_http_elli_homepage = http://cloudi.org/ +pkg_cloudi_service_http_elli_fetch = git +pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli +pkg_cloudi_service_http_elli_commit = master + +PACKAGES += cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service +pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/ +pkg_cloudi_service_map_reduce_fetch = git +pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_commit = master + +PACKAGES += cloudi_service_oauth1 +pkg_cloudi_service_oauth1_name = cloudi_service_oauth1 +pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service +pkg_cloudi_service_oauth1_homepage = http://cloudi.org/ +pkg_cloudi_service_oauth1_fetch = git +pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1 +pkg_cloudi_service_oauth1_commit = master + +PACKAGES += cloudi_service_queue +pkg_cloudi_service_queue_name = cloudi_service_queue +pkg_cloudi_service_queue_description = Persistent Queue Service +pkg_cloudi_service_queue_homepage = http://cloudi.org/ +pkg_cloudi_service_queue_fetch = git +pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue +pkg_cloudi_service_queue_commit = master + +PACKAGES += cloudi_service_quorum +pkg_cloudi_service_quorum_name = cloudi_service_quorum +pkg_cloudi_service_quorum_description = CloudI Quorum Service +pkg_cloudi_service_quorum_homepage = http://cloudi.org/ +pkg_cloudi_service_quorum_fetch = git +pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum +pkg_cloudi_service_quorum_commit = master + +PACKAGES += cloudi_service_router +pkg_cloudi_service_router_name = cloudi_service_router +pkg_cloudi_service_router_description = CloudI Router Service +pkg_cloudi_service_router_homepage = http://cloudi.org/ +pkg_cloudi_service_router_fetch = git +pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router +pkg_cloudi_service_router_commit = master + +PACKAGES += cloudi_service_tcp +pkg_cloudi_service_tcp_name = cloudi_service_tcp +pkg_cloudi_service_tcp_description = TCP CloudI Service +pkg_cloudi_service_tcp_homepage = http://cloudi.org/ +pkg_cloudi_service_tcp_fetch = git +pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp +pkg_cloudi_service_tcp_commit = master + +PACKAGES += cloudi_service_timers +pkg_cloudi_service_timers_name = cloudi_service_timers +pkg_cloudi_service_timers_description = Timers CloudI Service +pkg_cloudi_service_timers_homepage = http://cloudi.org/ +pkg_cloudi_service_timers_fetch = git +pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers +pkg_cloudi_service_timers_commit = master + +PACKAGES += cloudi_service_udp +pkg_cloudi_service_udp_name = cloudi_service_udp +pkg_cloudi_service_udp_description = UDP CloudI Service +pkg_cloudi_service_udp_homepage = http://cloudi.org/ +pkg_cloudi_service_udp_fetch = git +pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp +pkg_cloudi_service_udp_commit = master + +PACKAGES += cloudi_service_validate +pkg_cloudi_service_validate_name = cloudi_service_validate +pkg_cloudi_service_validate_description = CloudI Validate Service +pkg_cloudi_service_validate_homepage = http://cloudi.org/ +pkg_cloudi_service_validate_fetch = git +pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate +pkg_cloudi_service_validate_commit = master + +PACKAGES += cloudi_service_zeromq +pkg_cloudi_service_zeromq_name = cloudi_service_zeromq +pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service +pkg_cloudi_service_zeromq_homepage = http://cloudi.org/ +pkg_cloudi_service_zeromq_fetch = git +pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq +pkg_cloudi_service_zeromq_commit = master + +PACKAGES += cluster_info +pkg_cluster_info_name = cluster_info +pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app +pkg_cluster_info_homepage = https://github.com/basho/cluster_info +pkg_cluster_info_fetch = git +pkg_cluster_info_repo = https://github.com/basho/cluster_info +pkg_cluster_info_commit = master + +PACKAGES += color +pkg_color_name = color +pkg_color_description = ANSI colors for your Erlang +pkg_color_homepage = https://github.com/julianduque/erlang-color +pkg_color_fetch = git +pkg_color_repo = https://github.com/julianduque/erlang-color +pkg_color_commit = master + +PACKAGES += confetti +pkg_confetti_name = confetti +pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids +pkg_confetti_homepage = https://github.com/jtendo/confetti +pkg_confetti_fetch = git +pkg_confetti_repo = https://github.com/jtendo/confetti +pkg_confetti_commit = master + +PACKAGES += couchbeam +pkg_couchbeam_name = couchbeam +pkg_couchbeam_description = Apache CouchDB client in Erlang +pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam +pkg_couchbeam_fetch = git +pkg_couchbeam_repo = https://github.com/benoitc/couchbeam +pkg_couchbeam_commit = master + +PACKAGES += covertool +pkg_covertool_name = covertool +pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports +pkg_covertool_homepage = https://github.com/idubrov/covertool +pkg_covertool_fetch = git +pkg_covertool_repo = https://github.com/idubrov/covertool +pkg_covertool_commit = master + +PACKAGES += cowboy +pkg_cowboy_name = cowboy +pkg_cowboy_description = Small, fast and modular HTTP server. +pkg_cowboy_homepage = http://ninenines.eu +pkg_cowboy_fetch = git +pkg_cowboy_repo = https://github.com/ninenines/cowboy +pkg_cowboy_commit = 1.0.1 + +PACKAGES += cowdb +pkg_cowdb_name = cowdb +pkg_cowdb_description = Pure Key/Value database library for Erlang Applications +pkg_cowdb_homepage = https://github.com/refuge/cowdb +pkg_cowdb_fetch = git +pkg_cowdb_repo = https://github.com/refuge/cowdb +pkg_cowdb_commit = master + +PACKAGES += cowlib +pkg_cowlib_name = cowlib +pkg_cowlib_description = Support library for manipulating Web protocols. +pkg_cowlib_homepage = http://ninenines.eu +pkg_cowlib_fetch = git +pkg_cowlib_repo = https://github.com/ninenines/cowlib +pkg_cowlib_commit = 1.0.1 + +PACKAGES += cpg +pkg_cpg_name = cpg +pkg_cpg_description = CloudI Process Groups +pkg_cpg_homepage = https://github.com/okeuday/cpg +pkg_cpg_fetch = git +pkg_cpg_repo = https://github.com/okeuday/cpg +pkg_cpg_commit = master + +PACKAGES += cqerl +pkg_cqerl_name = cqerl +pkg_cqerl_description = Native Erlang CQL client for Cassandra +pkg_cqerl_homepage = https://matehat.github.io/cqerl/ +pkg_cqerl_fetch = git +pkg_cqerl_repo = https://github.com/matehat/cqerl +pkg_cqerl_commit = master + +PACKAGES += cr +pkg_cr_name = cr +pkg_cr_description = Chain Replication +pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm +pkg_cr_fetch = git +pkg_cr_repo = https://github.com/spawnproc/cr +pkg_cr_commit = master + +PACKAGES += cuttlefish +pkg_cuttlefish_name = cuttlefish +pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me? +pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish +pkg_cuttlefish_fetch = git +pkg_cuttlefish_repo = https://github.com/basho/cuttlefish +pkg_cuttlefish_commit = master + +PACKAGES += damocles +pkg_damocles_name = damocles +pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box. +pkg_damocles_homepage = https://github.com/lostcolony/damocles +pkg_damocles_fetch = git +pkg_damocles_repo = https://github.com/lostcolony/damocles +pkg_damocles_commit = master + +PACKAGES += debbie +pkg_debbie_name = debbie +pkg_debbie_description = .DEB Built In Erlang +pkg_debbie_homepage = https://github.com/crownedgrouse/debbie +pkg_debbie_fetch = git +pkg_debbie_repo = https://github.com/crownedgrouse/debbie +pkg_debbie_commit = master + +PACKAGES += decimal +pkg_decimal_name = decimal +pkg_decimal_description = An Erlang decimal arithmetic library +pkg_decimal_homepage = https://github.com/tim/erlang-decimal +pkg_decimal_fetch = git +pkg_decimal_repo = https://github.com/tim/erlang-decimal +pkg_decimal_commit = master + +PACKAGES += detergent +pkg_detergent_name = detergent +pkg_detergent_description = An emulsifying Erlang SOAP library +pkg_detergent_homepage = https://github.com/devinus/detergent +pkg_detergent_fetch = git +pkg_detergent_repo = https://github.com/devinus/detergent +pkg_detergent_commit = master + +PACKAGES += detest +pkg_detest_name = detest +pkg_detest_description = Tool for running tests on a cluster of erlang nodes +pkg_detest_homepage = https://github.com/biokoda/detest +pkg_detest_fetch = git +pkg_detest_repo = https://github.com/biokoda/detest +pkg_detest_commit = master + +PACKAGES += dh_date +pkg_dh_date_name = dh_date +pkg_dh_date_description = Date formatting / parsing library for erlang +pkg_dh_date_homepage = https://github.com/daleharvey/dh_date +pkg_dh_date_fetch = git +pkg_dh_date_repo = https://github.com/daleharvey/dh_date +pkg_dh_date_commit = master + +PACKAGES += dhtcrawler +pkg_dhtcrawler_name = dhtcrawler +pkg_dhtcrawler_description = dhtcrawler is a DHT crawler written in erlang. It can join a DHT network and crawl many P2P torrents. +pkg_dhtcrawler_homepage = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_fetch = git +pkg_dhtcrawler_repo = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_commit = master + +PACKAGES += dirbusterl +pkg_dirbusterl_name = dirbusterl +pkg_dirbusterl_description = DirBuster successor in Erlang +pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_fetch = git +pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_commit = master + +PACKAGES += dispcount +pkg_dispcount_name = dispcount +pkg_dispcount_description = Erlang task dispatcher based on ETS counters. +pkg_dispcount_homepage = https://github.com/ferd/dispcount +pkg_dispcount_fetch = git +pkg_dispcount_repo = https://github.com/ferd/dispcount +pkg_dispcount_commit = master + +PACKAGES += dlhttpc +pkg_dlhttpc_name = dlhttpc +pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints +pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc +pkg_dlhttpc_fetch = git +pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc +pkg_dlhttpc_commit = master + +PACKAGES += dns +pkg_dns_name = dns +pkg_dns_description = Erlang DNS library +pkg_dns_homepage = https://github.com/aetrion/dns_erlang +pkg_dns_fetch = git +pkg_dns_repo = https://github.com/aetrion/dns_erlang +pkg_dns_commit = master + +PACKAGES += dnssd +pkg_dnssd_name = dnssd +pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation +pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_fetch = git +pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_commit = master + +PACKAGES += dtl +pkg_dtl_name = dtl +pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang. +pkg_dtl_homepage = https://github.com/oinksoft/dtl +pkg_dtl_fetch = git +pkg_dtl_repo = https://github.com/oinksoft/dtl +pkg_dtl_commit = master + +PACKAGES += dynamic_compile +pkg_dynamic_compile_name = dynamic_compile +pkg_dynamic_compile_description = compile and load erlang modules from string input +pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_fetch = git +pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_commit = master + +PACKAGES += e2 +pkg_e2_name = e2 +pkg_e2_description = Library to simply writing correct OTP applications. +pkg_e2_homepage = http://e2project.org +pkg_e2_fetch = git +pkg_e2_repo = https://github.com/gar1t/e2 +pkg_e2_commit = master + +PACKAGES += eamf +pkg_eamf_name = eamf +pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang +pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf +pkg_eamf_fetch = git +pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf +pkg_eamf_commit = master + +PACKAGES += eavro +pkg_eavro_name = eavro +pkg_eavro_description = Apache Avro encoder/decoder +pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_fetch = git +pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_commit = master + +PACKAGES += ecapnp +pkg_ecapnp_name = ecapnp +pkg_ecapnp_description = Cap'n Proto library for Erlang +pkg_ecapnp_homepage = https://github.com/kaos/ecapnp +pkg_ecapnp_fetch = git +pkg_ecapnp_repo = https://github.com/kaos/ecapnp +pkg_ecapnp_commit = master + +PACKAGES += econfig +pkg_econfig_name = econfig +pkg_econfig_description = simple Erlang config handler using INI files +pkg_econfig_homepage = https://github.com/benoitc/econfig +pkg_econfig_fetch = git +pkg_econfig_repo = https://github.com/benoitc/econfig +pkg_econfig_commit = master + +PACKAGES += edate +pkg_edate_name = edate +pkg_edate_description = date manipulation library for erlang +pkg_edate_homepage = https://github.com/dweldon/edate +pkg_edate_fetch = git +pkg_edate_repo = https://github.com/dweldon/edate +pkg_edate_commit = master + +PACKAGES += edgar +pkg_edgar_name = edgar +pkg_edgar_description = Erlang Does GNU AR +pkg_edgar_homepage = https://github.com/crownedgrouse/edgar +pkg_edgar_fetch = git +pkg_edgar_repo = https://github.com/crownedgrouse/edgar +pkg_edgar_commit = master + +PACKAGES += edis +pkg_edis_name = edis +pkg_edis_description = An Erlang implementation of Redis KV Store +pkg_edis_homepage = http://inaka.github.com/edis/ +pkg_edis_fetch = git +pkg_edis_repo = https://github.com/inaka/edis +pkg_edis_commit = master + +PACKAGES += edns +pkg_edns_name = edns +pkg_edns_description = Erlang/OTP DNS server +pkg_edns_homepage = https://github.com/hcvst/erlang-dns +pkg_edns_fetch = git +pkg_edns_repo = https://github.com/hcvst/erlang-dns +pkg_edns_commit = master + +PACKAGES += edown +pkg_edown_name = edown +pkg_edown_description = EDoc extension for generating Github-flavored Markdown +pkg_edown_homepage = https://github.com/uwiger/edown +pkg_edown_fetch = git +pkg_edown_repo = https://github.com/uwiger/edown +pkg_edown_commit = master + +PACKAGES += eep +pkg_eep_name = eep +pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy +pkg_eep_homepage = https://github.com/virtan/eep +pkg_eep_fetch = git +pkg_eep_repo = https://github.com/virtan/eep +pkg_eep_commit = master + +PACKAGES += eep_app +pkg_eep_app_name = eep_app +pkg_eep_app_description = Embedded Event Processing +pkg_eep_app_homepage = https://github.com/darach/eep-erl +pkg_eep_app_fetch = git +pkg_eep_app_repo = https://github.com/darach/eep-erl +pkg_eep_app_commit = master + +PACKAGES += efene +pkg_efene_name = efene +pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX +pkg_efene_homepage = https://github.com/efene/efene +pkg_efene_fetch = git +pkg_efene_repo = https://github.com/efene/efene +pkg_efene_commit = master + +PACKAGES += eganglia +pkg_eganglia_name = eganglia +pkg_eganglia_description = Erlang library to interact with Ganglia +pkg_eganglia_homepage = https://github.com/inaka/eganglia +pkg_eganglia_fetch = git +pkg_eganglia_repo = https://github.com/inaka/eganglia +pkg_eganglia_commit = v0.9.1 + +PACKAGES += egeoip +pkg_egeoip_name = egeoip +pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database. +pkg_egeoip_homepage = https://github.com/mochi/egeoip +pkg_egeoip_fetch = git +pkg_egeoip_repo = https://github.com/mochi/egeoip +pkg_egeoip_commit = master + +PACKAGES += ehsa +pkg_ehsa_name = ehsa +pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules +pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa +pkg_ehsa_fetch = hg +pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa +pkg_ehsa_commit = 2.0.4 + +PACKAGES += ej +pkg_ej_name = ej +pkg_ej_description = Helper module for working with Erlang terms representing JSON +pkg_ej_homepage = https://github.com/seth/ej +pkg_ej_fetch = git +pkg_ej_repo = https://github.com/seth/ej +pkg_ej_commit = master + +PACKAGES += ejabberd +pkg_ejabberd_name = ejabberd +pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform +pkg_ejabberd_homepage = https://github.com/processone/ejabberd +pkg_ejabberd_fetch = git +pkg_ejabberd_repo = https://github.com/processone/ejabberd +pkg_ejabberd_commit = master + +PACKAGES += ejwt +pkg_ejwt_name = ejwt +pkg_ejwt_description = erlang library for JSON Web Token +pkg_ejwt_homepage = https://github.com/artefactop/ejwt +pkg_ejwt_fetch = git +pkg_ejwt_repo = https://github.com/artefactop/ejwt +pkg_ejwt_commit = master + +PACKAGES += ekaf +pkg_ekaf_name = ekaf +pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang. +pkg_ekaf_homepage = https://github.com/helpshift/ekaf +pkg_ekaf_fetch = git +pkg_ekaf_repo = https://github.com/helpshift/ekaf +pkg_ekaf_commit = master + +PACKAGES += elarm +pkg_elarm_name = elarm +pkg_elarm_description = Alarm Manager for Erlang. +pkg_elarm_homepage = https://github.com/esl/elarm +pkg_elarm_fetch = git +pkg_elarm_repo = https://github.com/esl/elarm +pkg_elarm_commit = master + +PACKAGES += eleveldb +pkg_eleveldb_name = eleveldb +pkg_eleveldb_description = Erlang LevelDB API +pkg_eleveldb_homepage = https://github.com/basho/eleveldb +pkg_eleveldb_fetch = git +pkg_eleveldb_repo = https://github.com/basho/eleveldb +pkg_eleveldb_commit = master + +PACKAGES += elli +pkg_elli_name = elli +pkg_elli_description = Simple, robust and performant Erlang web server +pkg_elli_homepage = https://github.com/knutin/elli +pkg_elli_fetch = git +pkg_elli_repo = https://github.com/knutin/elli +pkg_elli_commit = master + +PACKAGES += elvis +pkg_elvis_name = elvis +pkg_elvis_description = Erlang Style Reviewer +pkg_elvis_homepage = https://github.com/inaka/elvis +pkg_elvis_fetch = git +pkg_elvis_repo = https://github.com/inaka/elvis +pkg_elvis_commit = 0.2.4 + +PACKAGES += emagick +pkg_emagick_name = emagick +pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool. +pkg_emagick_homepage = https://github.com/kivra/emagick +pkg_emagick_fetch = git +pkg_emagick_repo = https://github.com/kivra/emagick +pkg_emagick_commit = master + +PACKAGES += emysql +pkg_emysql_name = emysql +pkg_emysql_description = Stable, pure Erlang MySQL driver. +pkg_emysql_homepage = https://github.com/Eonblast/Emysql +pkg_emysql_fetch = git +pkg_emysql_repo = https://github.com/Eonblast/Emysql +pkg_emysql_commit = master + +PACKAGES += enm +pkg_enm_name = enm +pkg_enm_description = Erlang driver for nanomsg +pkg_enm_homepage = https://github.com/basho/enm +pkg_enm_fetch = git +pkg_enm_repo = https://github.com/basho/enm +pkg_enm_commit = master + +PACKAGES += entop +pkg_entop_name = entop +pkg_entop_description = A top-like tool for monitoring an Erlang node +pkg_entop_homepage = https://github.com/mazenharake/entop +pkg_entop_fetch = git +pkg_entop_repo = https://github.com/mazenharake/entop +pkg_entop_commit = master + +PACKAGES += epcap +pkg_epcap_name = epcap +pkg_epcap_description = Erlang packet capture interface using pcap +pkg_epcap_homepage = https://github.com/msantos/epcap +pkg_epcap_fetch = git +pkg_epcap_repo = https://github.com/msantos/epcap +pkg_epcap_commit = master + +PACKAGES += eper +pkg_eper_name = eper +pkg_eper_description = Erlang performance and debugging tools. +pkg_eper_homepage = https://github.com/massemanet/eper +pkg_eper_fetch = git +pkg_eper_repo = https://github.com/massemanet/eper +pkg_eper_commit = master + +PACKAGES += epgsql +pkg_epgsql_name = epgsql +pkg_epgsql_description = Erlang PostgreSQL client library. +pkg_epgsql_homepage = https://github.com/epgsql/epgsql +pkg_epgsql_fetch = git +pkg_epgsql_repo = https://github.com/epgsql/epgsql +pkg_epgsql_commit = master + +PACKAGES += episcina +pkg_episcina_name = episcina +pkg_episcina_description = A simple non intrusive resource pool for connections +pkg_episcina_homepage = https://github.com/erlware/episcina +pkg_episcina_fetch = git +pkg_episcina_repo = https://github.com/erlware/episcina +pkg_episcina_commit = master + +PACKAGES += eplot +pkg_eplot_name = eplot +pkg_eplot_description = A plot engine written in erlang. +pkg_eplot_homepage = https://github.com/psyeugenic/eplot +pkg_eplot_fetch = git +pkg_eplot_repo = https://github.com/psyeugenic/eplot +pkg_eplot_commit = master + +PACKAGES += epocxy +pkg_epocxy_name = epocxy +pkg_epocxy_description = Erlang Patterns of Concurrency +pkg_epocxy_homepage = https://github.com/duomark/epocxy +pkg_epocxy_fetch = git +pkg_epocxy_repo = https://github.com/duomark/epocxy +pkg_epocxy_commit = master + +PACKAGES += epubnub +pkg_epubnub_name = epubnub +pkg_epubnub_description = Erlang PubNub API +pkg_epubnub_homepage = https://github.com/tsloughter/epubnub +pkg_epubnub_fetch = git +pkg_epubnub_repo = https://github.com/tsloughter/epubnub +pkg_epubnub_commit = master + +PACKAGES += eqm +pkg_eqm_name = eqm +pkg_eqm_description = Erlang pub sub with supply-demand channels +pkg_eqm_homepage = https://github.com/loucash/eqm +pkg_eqm_fetch = git +pkg_eqm_repo = https://github.com/loucash/eqm +pkg_eqm_commit = master + +PACKAGES += eredis +pkg_eredis_name = eredis +pkg_eredis_description = Erlang Redis client +pkg_eredis_homepage = https://github.com/wooga/eredis +pkg_eredis_fetch = git +pkg_eredis_repo = https://github.com/wooga/eredis +pkg_eredis_commit = master + +PACKAGES += eredis_pool +pkg_eredis_pool_name = eredis_pool +pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy. +pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_fetch = git +pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_commit = master + +PACKAGES += erl_streams +pkg_erl_streams_name = erl_streams +pkg_erl_streams_description = Streams in Erlang +pkg_erl_streams_homepage = https://github.com/epappas/erl_streams +pkg_erl_streams_fetch = git +pkg_erl_streams_repo = https://github.com/epappas/erl_streams +pkg_erl_streams_commit = master + +PACKAGES += erlang_cep +pkg_erlang_cep_name = erlang_cep +pkg_erlang_cep_description = A basic CEP package written in erlang +pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_fetch = git +pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_commit = master + +PACKAGES += erlang_js +pkg_erlang_js_name = erlang_js +pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime. +pkg_erlang_js_homepage = https://github.com/basho/erlang_js +pkg_erlang_js_fetch = git +pkg_erlang_js_repo = https://github.com/basho/erlang_js +pkg_erlang_js_commit = master + +PACKAGES += erlang_localtime +pkg_erlang_localtime_name = erlang_localtime +pkg_erlang_localtime_description = Erlang library for conversion from one local time to another +pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_fetch = git +pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_commit = master + +PACKAGES += erlang_smtp +pkg_erlang_smtp_name = erlang_smtp +pkg_erlang_smtp_description = Erlang SMTP and POP3 server code. +pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_fetch = git +pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_commit = master + +PACKAGES += erlang_term +pkg_erlang_term_name = erlang_term +pkg_erlang_term_description = Erlang Term Info +pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term +pkg_erlang_term_fetch = git +pkg_erlang_term_repo = https://github.com/okeuday/erlang_term +pkg_erlang_term_commit = master + +PACKAGES += erlastic_search +pkg_erlastic_search_name = erlastic_search +pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface. +pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_fetch = git +pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_commit = master + +PACKAGES += erlasticsearch +pkg_erlasticsearch_name = erlasticsearch +pkg_erlasticsearch_description = Erlang thrift interface to elastic_search +pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_fetch = git +pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_commit = master + +PACKAGES += erlbrake +pkg_erlbrake_name = erlbrake +pkg_erlbrake_description = Erlang Airbrake notification client +pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake +pkg_erlbrake_fetch = git +pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake +pkg_erlbrake_commit = master + +PACKAGES += erlcloud +pkg_erlcloud_name = erlcloud +pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB) +pkg_erlcloud_homepage = https://github.com/gleber/erlcloud +pkg_erlcloud_fetch = git +pkg_erlcloud_repo = https://github.com/gleber/erlcloud +pkg_erlcloud_commit = master + +PACKAGES += erlcron +pkg_erlcron_name = erlcron +pkg_erlcron_description = Erlang cronish system +pkg_erlcron_homepage = https://github.com/erlware/erlcron +pkg_erlcron_fetch = git +pkg_erlcron_repo = https://github.com/erlware/erlcron +pkg_erlcron_commit = master + +PACKAGES += erldb +pkg_erldb_name = erldb +pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang +pkg_erldb_homepage = http://erldb.org +pkg_erldb_fetch = git +pkg_erldb_repo = https://github.com/erldb/erldb +pkg_erldb_commit = master + +PACKAGES += erldis +pkg_erldis_name = erldis +pkg_erldis_description = redis erlang client library +pkg_erldis_homepage = https://github.com/cstar/erldis +pkg_erldis_fetch = git +pkg_erldis_repo = https://github.com/cstar/erldis +pkg_erldis_commit = master + +PACKAGES += erldns +pkg_erldns_name = erldns +pkg_erldns_description = DNS server, in erlang. +pkg_erldns_homepage = https://github.com/aetrion/erl-dns +pkg_erldns_fetch = git +pkg_erldns_repo = https://github.com/aetrion/erl-dns +pkg_erldns_commit = master + +PACKAGES += erldocker +pkg_erldocker_name = erldocker +pkg_erldocker_description = Docker Remote API client for Erlang +pkg_erldocker_homepage = https://github.com/proger/erldocker +pkg_erldocker_fetch = git +pkg_erldocker_repo = https://github.com/proger/erldocker +pkg_erldocker_commit = master + +PACKAGES += erlfsmon +pkg_erlfsmon_name = erlfsmon +pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX +pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon +pkg_erlfsmon_fetch = git +pkg_erlfsmon_repo = https://github.com/proger/erlfsmon +pkg_erlfsmon_commit = master + +PACKAGES += erlgit +pkg_erlgit_name = erlgit +pkg_erlgit_description = Erlang convenience wrapper around git executable +pkg_erlgit_homepage = https://github.com/gleber/erlgit +pkg_erlgit_fetch = git +pkg_erlgit_repo = https://github.com/gleber/erlgit +pkg_erlgit_commit = master + +PACKAGES += erlguten +pkg_erlguten_name = erlguten +pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang. +pkg_erlguten_homepage = https://github.com/richcarl/erlguten +pkg_erlguten_fetch = git +pkg_erlguten_repo = https://github.com/richcarl/erlguten +pkg_erlguten_commit = master + +PACKAGES += erlmc +pkg_erlmc_name = erlmc +pkg_erlmc_description = Erlang memcached binary protocol client +pkg_erlmc_homepage = https://github.com/jkvor/erlmc +pkg_erlmc_fetch = git +pkg_erlmc_repo = https://github.com/jkvor/erlmc +pkg_erlmc_commit = master + +PACKAGES += erlmongo +pkg_erlmongo_name = erlmongo +pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support +pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_fetch = git +pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_commit = master + +PACKAGES += erlog +pkg_erlog_name = erlog +pkg_erlog_description = Prolog interpreter in and for Erlang +pkg_erlog_homepage = https://github.com/rvirding/erlog +pkg_erlog_fetch = git +pkg_erlog_repo = https://github.com/rvirding/erlog +pkg_erlog_commit = master + +PACKAGES += erlpass +pkg_erlpass_name = erlpass +pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever. +pkg_erlpass_homepage = https://github.com/ferd/erlpass +pkg_erlpass_fetch = git +pkg_erlpass_repo = https://github.com/ferd/erlpass +pkg_erlpass_commit = master + +PACKAGES += erlport +pkg_erlport_name = erlport +pkg_erlport_description = ErlPort - connect Erlang to other languages +pkg_erlport_homepage = https://github.com/hdima/erlport +pkg_erlport_fetch = git +pkg_erlport_repo = https://github.com/hdima/erlport +pkg_erlport_commit = master + +PACKAGES += erlsh +pkg_erlsh_name = erlsh +pkg_erlsh_description = Erlang shell tools +pkg_erlsh_homepage = https://github.com/proger/erlsh +pkg_erlsh_fetch = git +pkg_erlsh_repo = https://github.com/proger/erlsh +pkg_erlsh_commit = master + +PACKAGES += erlsha2 +pkg_erlsha2_name = erlsha2 +pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs. +pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2 +pkg_erlsha2_fetch = git +pkg_erlsha2_repo = https://github.com/vinoski/erlsha2 +pkg_erlsha2_commit = master + +PACKAGES += erlsom +pkg_erlsom_name = erlsom +pkg_erlsom_description = XML parser for Erlang +pkg_erlsom_homepage = https://github.com/willemdj/erlsom +pkg_erlsom_fetch = git +pkg_erlsom_repo = https://github.com/willemdj/erlsom +pkg_erlsom_commit = master + +PACKAGES += erlubi +pkg_erlubi_name = erlubi +pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer) +pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi +pkg_erlubi_fetch = git +pkg_erlubi_repo = https://github.com/krestenkrab/erlubi +pkg_erlubi_commit = master + +PACKAGES += erlvolt +pkg_erlvolt_name = erlvolt +pkg_erlvolt_description = VoltDB Erlang Client Driver +pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_fetch = git +pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_commit = master + +PACKAGES += erlware_commons +pkg_erlware_commons_name = erlware_commons +pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components. +pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons +pkg_erlware_commons_fetch = git +pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons +pkg_erlware_commons_commit = master + +PACKAGES += erlydtl +pkg_erlydtl_name = erlydtl +pkg_erlydtl_description = Django Template Language for Erlang. +pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl +pkg_erlydtl_fetch = git +pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl +pkg_erlydtl_commit = master + +PACKAGES += errd +pkg_errd_name = errd +pkg_errd_description = Erlang RRDTool library +pkg_errd_homepage = https://github.com/archaelus/errd +pkg_errd_fetch = git +pkg_errd_repo = https://github.com/archaelus/errd +pkg_errd_commit = master + +PACKAGES += erserve +pkg_erserve_name = erserve +pkg_erserve_description = Erlang/Rserve communication interface +pkg_erserve_homepage = https://github.com/del/erserve +pkg_erserve_fetch = git +pkg_erserve_repo = https://github.com/del/erserve +pkg_erserve_commit = master + +PACKAGES += erwa +pkg_erwa_name = erwa +pkg_erwa_description = A WAMP router and client written in Erlang. +pkg_erwa_homepage = https://github.com/bwegh/erwa +pkg_erwa_fetch = git +pkg_erwa_repo = https://github.com/bwegh/erwa +pkg_erwa_commit = 0.1.1 + +PACKAGES += espec +pkg_espec_name = espec +pkg_espec_description = ESpec: Behaviour driven development framework for Erlang +pkg_espec_homepage = https://github.com/lucaspiller/espec +pkg_espec_fetch = git +pkg_espec_repo = https://github.com/lucaspiller/espec +pkg_espec_commit = master + +PACKAGES += estatsd +pkg_estatsd_name = estatsd +pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite +pkg_estatsd_homepage = https://github.com/RJ/estatsd +pkg_estatsd_fetch = git +pkg_estatsd_repo = https://github.com/RJ/estatsd +pkg_estatsd_commit = master + +PACKAGES += etap +pkg_etap_name = etap +pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output. +pkg_etap_homepage = https://github.com/ngerakines/etap +pkg_etap_fetch = git +pkg_etap_repo = https://github.com/ngerakines/etap +pkg_etap_commit = master + +PACKAGES += etest +pkg_etest_name = etest +pkg_etest_description = A lightweight, convention over configuration test framework for Erlang +pkg_etest_homepage = https://github.com/wooga/etest +pkg_etest_fetch = git +pkg_etest_repo = https://github.com/wooga/etest +pkg_etest_commit = master + +PACKAGES += etest_http +pkg_etest_http_name = etest_http +pkg_etest_http_description = etest Assertions around HTTP (client-side) +pkg_etest_http_homepage = https://github.com/wooga/etest_http +pkg_etest_http_fetch = git +pkg_etest_http_repo = https://github.com/wooga/etest_http +pkg_etest_http_commit = master + +PACKAGES += etoml +pkg_etoml_name = etoml +pkg_etoml_description = TOML language erlang parser +pkg_etoml_homepage = https://github.com/kalta/etoml +pkg_etoml_fetch = git +pkg_etoml_repo = https://github.com/kalta/etoml +pkg_etoml_commit = master + +PACKAGES += eunit +pkg_eunit_name = eunit +pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. +pkg_eunit_homepage = https://github.com/richcarl/eunit +pkg_eunit_fetch = git +pkg_eunit_repo = https://github.com/richcarl/eunit +pkg_eunit_commit = master + +PACKAGES += eunit_formatters +pkg_eunit_formatters_name = eunit_formatters +pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better. +pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_fetch = git +pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_commit = master + +PACKAGES += euthanasia +pkg_euthanasia_name = euthanasia +pkg_euthanasia_description = Merciful killer for your Erlang processes +pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia +pkg_euthanasia_fetch = git +pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia +pkg_euthanasia_commit = master + +PACKAGES += evum +pkg_evum_name = evum +pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM +pkg_evum_homepage = https://github.com/msantos/evum +pkg_evum_fetch = git +pkg_evum_repo = https://github.com/msantos/evum +pkg_evum_commit = master + +PACKAGES += exec +pkg_exec_name = exec +pkg_exec_description = Execute and control OS processes from Erlang/OTP. +pkg_exec_homepage = http://saleyn.github.com/erlexec +pkg_exec_fetch = git +pkg_exec_repo = https://github.com/saleyn/erlexec +pkg_exec_commit = master + +PACKAGES += exml +pkg_exml_name = exml +pkg_exml_description = XML parsing library in Erlang +pkg_exml_homepage = https://github.com/paulgray/exml +pkg_exml_fetch = git +pkg_exml_repo = https://github.com/paulgray/exml +pkg_exml_commit = master + +PACKAGES += exometer +pkg_exometer_name = exometer +pkg_exometer_description = Basic measurement objects and probe behavior +pkg_exometer_homepage = https://github.com/Feuerlabs/exometer +pkg_exometer_fetch = git +pkg_exometer_repo = https://github.com/Feuerlabs/exometer +pkg_exometer_commit = 1.2 + +PACKAGES += exs1024 +pkg_exs1024_name = exs1024 +pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang. +pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024 +pkg_exs1024_fetch = git +pkg_exs1024_repo = https://github.com/jj1bdx/exs1024 +pkg_exs1024_commit = master + +PACKAGES += exs64 +pkg_exs64_name = exs64 +pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang. +pkg_exs64_homepage = https://github.com/jj1bdx/exs64 +pkg_exs64_fetch = git +pkg_exs64_repo = https://github.com/jj1bdx/exs64 +pkg_exs64_commit = master + +PACKAGES += exsplus116 +pkg_exsplus116_name = exsplus116 +pkg_exsplus116_description = Xorshift116plus for Erlang +pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_fetch = git +pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_commit = master + +PACKAGES += exsplus128 +pkg_exsplus128_name = exsplus128 +pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang. +pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_fetch = git +pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_commit = master + +PACKAGES += ezmq +pkg_ezmq_name = ezmq +pkg_ezmq_description = zMQ implemented in Erlang +pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq +pkg_ezmq_fetch = git +pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq +pkg_ezmq_commit = master + +PACKAGES += ezmtp +pkg_ezmtp_name = ezmtp +pkg_ezmtp_description = ZMTP protocol in pure Erlang. +pkg_ezmtp_homepage = https://github.com/a13x/ezmtp +pkg_ezmtp_fetch = git +pkg_ezmtp_repo = https://github.com/a13x/ezmtp +pkg_ezmtp_commit = master + +PACKAGES += fast_disk_log +pkg_fast_disk_log_name = fast_disk_log +pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger +pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_fetch = git +pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_commit = master + +PACKAGES += feeder +pkg_feeder_name = feeder +pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds. +pkg_feeder_homepage = https://github.com/michaelnisi/feeder +pkg_feeder_fetch = git +pkg_feeder_repo = https://github.com/michaelnisi/feeder +pkg_feeder_commit = v1.4.6 + +PACKAGES += fix +pkg_fix_name = fix +pkg_fix_description = http://fixprotocol.org/ implementation. +pkg_fix_homepage = https://github.com/maxlapshin/fix +pkg_fix_fetch = git +pkg_fix_repo = https://github.com/maxlapshin/fix +pkg_fix_commit = master + +PACKAGES += flower +pkg_flower_name = flower +pkg_flower_description = FlowER - a Erlang OpenFlow development platform +pkg_flower_homepage = https://github.com/travelping/flower +pkg_flower_fetch = git +pkg_flower_repo = https://github.com/travelping/flower +pkg_flower_commit = master + +PACKAGES += fn +pkg_fn_name = fn +pkg_fn_description = Function utilities for Erlang +pkg_fn_homepage = https://github.com/reiddraper/fn +pkg_fn_fetch = git +pkg_fn_repo = https://github.com/reiddraper/fn +pkg_fn_commit = master + +PACKAGES += folsom +pkg_folsom_name = folsom +pkg_folsom_description = Expose Erlang Events and Metrics +pkg_folsom_homepage = https://github.com/boundary/folsom +pkg_folsom_fetch = git +pkg_folsom_repo = https://github.com/boundary/folsom +pkg_folsom_commit = master + +PACKAGES += folsom_cowboy +pkg_folsom_cowboy_name = folsom_cowboy +pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper. +pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_fetch = git +pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_commit = master + +PACKAGES += folsomite +pkg_folsomite_name = folsomite +pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics +pkg_folsomite_homepage = https://github.com/campanja/folsomite +pkg_folsomite_fetch = git +pkg_folsomite_repo = https://github.com/campanja/folsomite +pkg_folsomite_commit = master + +PACKAGES += fs +pkg_fs_name = fs +pkg_fs_description = Erlang FileSystem Listener +pkg_fs_homepage = https://github.com/synrc/fs +pkg_fs_fetch = git +pkg_fs_repo = https://github.com/synrc/fs +pkg_fs_commit = master + +PACKAGES += fuse +pkg_fuse_name = fuse +pkg_fuse_description = A Circuit Breaker for Erlang +pkg_fuse_homepage = https://github.com/jlouis/fuse +pkg_fuse_fetch = git +pkg_fuse_repo = https://github.com/jlouis/fuse +pkg_fuse_commit = master + +PACKAGES += gcm +pkg_gcm_name = gcm +pkg_gcm_description = An Erlang application for Google Cloud Messaging +pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang +pkg_gcm_fetch = git +pkg_gcm_repo = https://github.com/pdincau/gcm-erlang +pkg_gcm_commit = master + +PACKAGES += gcprof +pkg_gcprof_name = gcprof +pkg_gcprof_description = Garbage Collection profiler for Erlang +pkg_gcprof_homepage = https://github.com/knutin/gcprof +pkg_gcprof_fetch = git +pkg_gcprof_repo = https://github.com/knutin/gcprof +pkg_gcprof_commit = master + +PACKAGES += geas +pkg_geas_name = geas +pkg_geas_description = Guess Erlang Application Scattering +pkg_geas_homepage = https://github.com/crownedgrouse/geas +pkg_geas_fetch = git +pkg_geas_repo = https://github.com/crownedgrouse/geas +pkg_geas_commit = master + +PACKAGES += geef +pkg_geef_name = geef +pkg_geef_description = Git NEEEEF (Erlang NIF) +pkg_geef_homepage = https://github.com/carlosmn/geef +pkg_geef_fetch = git +pkg_geef_repo = https://github.com/carlosmn/geef +pkg_geef_commit = master + +PACKAGES += gen_cycle +pkg_gen_cycle_name = gen_cycle +pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks +pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_fetch = git +pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_commit = develop + +PACKAGES += gen_icmp +pkg_gen_icmp_name = gen_icmp +pkg_gen_icmp_description = Erlang interface to ICMP sockets +pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp +pkg_gen_icmp_fetch = git +pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp +pkg_gen_icmp_commit = master + +PACKAGES += gen_nb_server +pkg_gen_nb_server_name = gen_nb_server +pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers +pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_fetch = git +pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_commit = master + +PACKAGES += gen_paxos +pkg_gen_paxos_name = gen_paxos +pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol +pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos +pkg_gen_paxos_fetch = git +pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos +pkg_gen_paxos_commit = master + +PACKAGES += gen_smtp +pkg_gen_smtp_name = gen_smtp +pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules +pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_fetch = git +pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_commit = master + +PACKAGES += gen_tracker +pkg_gen_tracker_name = gen_tracker +pkg_gen_tracker_description = supervisor with ets handling of children and their metadata +pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_fetch = git +pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_commit = master + +PACKAGES += gen_unix +pkg_gen_unix_name = gen_unix +pkg_gen_unix_description = Erlang Unix socket interface +pkg_gen_unix_homepage = https://github.com/msantos/gen_unix +pkg_gen_unix_fetch = git +pkg_gen_unix_repo = https://github.com/msantos/gen_unix +pkg_gen_unix_commit = master + +PACKAGES += getopt +pkg_getopt_name = getopt +pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax +pkg_getopt_homepage = https://github.com/jcomellas/getopt +pkg_getopt_fetch = git +pkg_getopt_repo = https://github.com/jcomellas/getopt +pkg_getopt_commit = master + +PACKAGES += gettext +pkg_gettext_name = gettext +pkg_gettext_description = Erlang internationalization library. +pkg_gettext_homepage = https://github.com/etnt/gettext +pkg_gettext_fetch = git +pkg_gettext_repo = https://github.com/etnt/gettext +pkg_gettext_commit = master + +PACKAGES += giallo +pkg_giallo_name = giallo +pkg_giallo_description = Small and flexible web framework on top of Cowboy +pkg_giallo_homepage = https://github.com/kivra/giallo +pkg_giallo_fetch = git +pkg_giallo_repo = https://github.com/kivra/giallo +pkg_giallo_commit = master + +PACKAGES += gin +pkg_gin_name = gin +pkg_gin_description = The guards and for Erlang parse_transform +pkg_gin_homepage = https://github.com/mad-cocktail/gin +pkg_gin_fetch = git +pkg_gin_repo = https://github.com/mad-cocktail/gin +pkg_gin_commit = master + +PACKAGES += gitty +pkg_gitty_name = gitty +pkg_gitty_description = Git access in erlang +pkg_gitty_homepage = https://github.com/maxlapshin/gitty +pkg_gitty_fetch = git +pkg_gitty_repo = https://github.com/maxlapshin/gitty +pkg_gitty_commit = master + +PACKAGES += gold_fever +pkg_gold_fever_name = gold_fever +pkg_gold_fever_description = A Treasure Hunt for Erlangers +pkg_gold_fever_homepage = https://github.com/inaka/gold_fever +pkg_gold_fever_fetch = git +pkg_gold_fever_repo = https://github.com/inaka/gold_fever +pkg_gold_fever_commit = master + +PACKAGES += gossiperl +pkg_gossiperl_name = gossiperl +pkg_gossiperl_description = Gossip middleware in Erlang +pkg_gossiperl_homepage = http://gossiperl.com/ +pkg_gossiperl_fetch = git +pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl +pkg_gossiperl_commit = master + +PACKAGES += gpb +pkg_gpb_name = gpb +pkg_gpb_description = A Google Protobuf implementation for Erlang +pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_fetch = git +pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_commit = master + +PACKAGES += gproc +pkg_gproc_name = gproc +pkg_gproc_description = Extended process registry for Erlang +pkg_gproc_homepage = https://github.com/uwiger/gproc +pkg_gproc_fetch = git +pkg_gproc_repo = https://github.com/uwiger/gproc +pkg_gproc_commit = master + +PACKAGES += grapherl +pkg_grapherl_name = grapherl +pkg_grapherl_description = Create graphs of Erlang systems and programs +pkg_grapherl_homepage = https://github.com/eproxus/grapherl +pkg_grapherl_fetch = git +pkg_grapherl_repo = https://github.com/eproxus/grapherl +pkg_grapherl_commit = master + +PACKAGES += gun +pkg_gun_name = gun +pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang. +pkg_gun_homepage = http//ninenines.eu +pkg_gun_fetch = git +pkg_gun_repo = https://github.com/ninenines/gun +pkg_gun_commit = master + +PACKAGES += gut +pkg_gut_name = gut +pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman +pkg_gut_homepage = https://github.com/unbalancedparentheses/gut +pkg_gut_fetch = git +pkg_gut_repo = https://github.com/unbalancedparentheses/gut +pkg_gut_commit = master + +PACKAGES += hackney +pkg_hackney_name = hackney +pkg_hackney_description = simple HTTP client in Erlang +pkg_hackney_homepage = https://github.com/benoitc/hackney +pkg_hackney_fetch = git +pkg_hackney_repo = https://github.com/benoitc/hackney +pkg_hackney_commit = master + +PACKAGES += hamcrest +pkg_hamcrest_name = hamcrest +pkg_hamcrest_description = Erlang port of Hamcrest +pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_fetch = git +pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_commit = master + +PACKAGES += hanoidb +pkg_hanoidb_name = hanoidb +pkg_hanoidb_description = Erlang LSM BTree Storage +pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_fetch = git +pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_commit = master + +PACKAGES += hottub +pkg_hottub_name = hottub +pkg_hottub_description = Permanent Erlang Worker Pool +pkg_hottub_homepage = https://github.com/bfrog/hottub +pkg_hottub_fetch = git +pkg_hottub_repo = https://github.com/bfrog/hottub +pkg_hottub_commit = master + +PACKAGES += hpack +pkg_hpack_name = hpack +pkg_hpack_description = HPACK Implementation for Erlang +pkg_hpack_homepage = https://github.com/joedevivo/hpack +pkg_hpack_fetch = git +pkg_hpack_repo = https://github.com/joedevivo/hpack +pkg_hpack_commit = master + +PACKAGES += hyper +pkg_hyper_name = hyper +pkg_hyper_description = Erlang implementation of HyperLogLog +pkg_hyper_homepage = https://github.com/GameAnalytics/hyper +pkg_hyper_fetch = git +pkg_hyper_repo = https://github.com/GameAnalytics/hyper +pkg_hyper_commit = master + +PACKAGES += ibrowse +pkg_ibrowse_name = ibrowse +pkg_ibrowse_description = Erlang HTTP client +pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_fetch = git +pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_commit = v4.1.1 + +PACKAGES += ierlang +pkg_ierlang_name = ierlang +pkg_ierlang_description = An Erlang language kernel for IPython. +pkg_ierlang_homepage = https://github.com/robbielynch/ierlang +pkg_ierlang_fetch = git +pkg_ierlang_repo = https://github.com/robbielynch/ierlang +pkg_ierlang_commit = master + +PACKAGES += iota +pkg_iota_name = iota +pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code +pkg_iota_homepage = https://github.com/jpgneves/iota +pkg_iota_fetch = git +pkg_iota_repo = https://github.com/jpgneves/iota +pkg_iota_commit = master + +PACKAGES += irc_lib +pkg_irc_lib_name = irc_lib +pkg_irc_lib_description = Erlang irc client library +pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_fetch = git +pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_commit = master + +PACKAGES += ircd +pkg_ircd_name = ircd +pkg_ircd_description = A pluggable IRC daemon application/library for Erlang. +pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd +pkg_ircd_fetch = git +pkg_ircd_repo = https://github.com/tonyg/erlang-ircd +pkg_ircd_commit = master + +PACKAGES += iris +pkg_iris_name = iris +pkg_iris_description = Iris Erlang binding +pkg_iris_homepage = https://github.com/project-iris/iris-erl +pkg_iris_fetch = git +pkg_iris_repo = https://github.com/project-iris/iris-erl +pkg_iris_commit = master + +PACKAGES += iso8601 +pkg_iso8601_name = iso8601 +pkg_iso8601_description = Erlang ISO 8601 date formatter/parser +pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_fetch = git +pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_commit = master + +PACKAGES += jamdb_sybase +pkg_jamdb_sybase_name = jamdb_sybase +pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE +pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_fetch = git +pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_commit = 0.6.0 + +PACKAGES += jerg +pkg_jerg_name = jerg +pkg_jerg_description = JSON Schema to Erlang Records Generator +pkg_jerg_homepage = https://github.com/ddossot/jerg +pkg_jerg_fetch = git +pkg_jerg_repo = https://github.com/ddossot/jerg +pkg_jerg_commit = master + +PACKAGES += jesse +pkg_jesse_name = jesse +pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang. +pkg_jesse_homepage = https://github.com/klarna/jesse +pkg_jesse_fetch = git +pkg_jesse_repo = https://github.com/klarna/jesse +pkg_jesse_commit = master + +PACKAGES += jiffy +pkg_jiffy_name = jiffy +pkg_jiffy_description = JSON NIFs for Erlang. +pkg_jiffy_homepage = https://github.com/davisp/jiffy +pkg_jiffy_fetch = git +pkg_jiffy_repo = https://github.com/davisp/jiffy +pkg_jiffy_commit = master + +PACKAGES += jiffy_v +pkg_jiffy_v_name = jiffy_v +pkg_jiffy_v_description = JSON validation utility +pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_fetch = git +pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_commit = 0.3.3 + +PACKAGES += jobs +pkg_jobs_name = jobs +pkg_jobs_description = a Job scheduler for load regulation +pkg_jobs_homepage = https://github.com/esl/jobs +pkg_jobs_fetch = git +pkg_jobs_repo = https://github.com/esl/jobs +pkg_jobs_commit = 0.3 + +PACKAGES += joxa +pkg_joxa_name = joxa +pkg_joxa_description = A Modern Lisp for the Erlang VM +pkg_joxa_homepage = https://github.com/joxa/joxa +pkg_joxa_fetch = git +pkg_joxa_repo = https://github.com/joxa/joxa +pkg_joxa_commit = master + +PACKAGES += json +pkg_json_name = json +pkg_json_description = a high level json library for erlang (17.0+) +pkg_json_homepage = https://github.com/talentdeficit/json +pkg_json_fetch = git +pkg_json_repo = https://github.com/talentdeficit/json +pkg_json_commit = master + +PACKAGES += json_rec +pkg_json_rec_name = json_rec +pkg_json_rec_description = JSON to erlang record +pkg_json_rec_homepage = https://github.com/justinkirby/json_rec +pkg_json_rec_fetch = git +pkg_json_rec_repo = https://github.com/justinkirby/json_rec +pkg_json_rec_commit = master + +PACKAGES += jsonerl +pkg_jsonerl_name = jsonerl +pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder +pkg_jsonerl_homepage = https://github.com/lambder/jsonerl +pkg_jsonerl_fetch = git +pkg_jsonerl_repo = https://github.com/lambder/jsonerl +pkg_jsonerl_commit = master + +PACKAGES += jsonpath +pkg_jsonpath_name = jsonpath +pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation +pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_fetch = git +pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_commit = master + +PACKAGES += jsonx +pkg_jsonx_name = jsonx +pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C. +pkg_jsonx_homepage = https://github.com/iskra/jsonx +pkg_jsonx_fetch = git +pkg_jsonx_repo = https://github.com/iskra/jsonx +pkg_jsonx_commit = master + +PACKAGES += jsx +pkg_jsx_name = jsx +pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON. +pkg_jsx_homepage = https://github.com/talentdeficit/jsx +pkg_jsx_fetch = git +pkg_jsx_repo = https://github.com/talentdeficit/jsx +pkg_jsx_commit = master + +PACKAGES += kafka +pkg_kafka_name = kafka +pkg_kafka_description = Kafka consumer and producer in Erlang +pkg_kafka_homepage = https://github.com/wooga/kafka-erlang +pkg_kafka_fetch = git +pkg_kafka_repo = https://github.com/wooga/kafka-erlang +pkg_kafka_commit = master + +PACKAGES += kai +pkg_kai_name = kai +pkg_kai_description = DHT storage by Takeshi Inoue +pkg_kai_homepage = https://github.com/synrc/kai +pkg_kai_fetch = git +pkg_kai_repo = https://github.com/synrc/kai +pkg_kai_commit = master + +PACKAGES += katja +pkg_katja_name = katja +pkg_katja_description = A simple Riemann client written in Erlang. +pkg_katja_homepage = https://github.com/nifoc/katja +pkg_katja_fetch = git +pkg_katja_repo = https://github.com/nifoc/katja +pkg_katja_commit = master + +PACKAGES += kdht +pkg_kdht_name = kdht +pkg_kdht_description = kdht is an erlang DHT implementation +pkg_kdht_homepage = https://github.com/kevinlynx/kdht +pkg_kdht_fetch = git +pkg_kdht_repo = https://github.com/kevinlynx/kdht +pkg_kdht_commit = master + +PACKAGES += key2value +pkg_key2value_name = key2value +pkg_key2value_description = Erlang 2-way map +pkg_key2value_homepage = https://github.com/okeuday/key2value +pkg_key2value_fetch = git +pkg_key2value_repo = https://github.com/okeuday/key2value +pkg_key2value_commit = master + +PACKAGES += keys1value +pkg_keys1value_name = keys1value +pkg_keys1value_description = Erlang set associative map for key lists +pkg_keys1value_homepage = https://github.com/okeuday/keys1value +pkg_keys1value_fetch = git +pkg_keys1value_repo = https://github.com/okeuday/keys1value +pkg_keys1value_commit = master + +PACKAGES += kinetic +pkg_kinetic_name = kinetic +pkg_kinetic_description = Erlang Kinesis Client +pkg_kinetic_homepage = https://github.com/AdRoll/kinetic +pkg_kinetic_fetch = git +pkg_kinetic_repo = https://github.com/AdRoll/kinetic +pkg_kinetic_commit = master + +PACKAGES += kjell +pkg_kjell_name = kjell +pkg_kjell_description = Erlang Shell +pkg_kjell_homepage = https://github.com/karlll/kjell +pkg_kjell_fetch = git +pkg_kjell_repo = https://github.com/karlll/kjell +pkg_kjell_commit = master + +PACKAGES += kraken +pkg_kraken_name = kraken +pkg_kraken_description = Distributed Pubsub Server for Realtime Apps +pkg_kraken_homepage = https://github.com/Asana/kraken +pkg_kraken_fetch = git +pkg_kraken_repo = https://github.com/Asana/kraken +pkg_kraken_commit = master + +PACKAGES += kucumberl +pkg_kucumberl_name = kucumberl +pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber +pkg_kucumberl_homepage = https://github.com/openshine/kucumberl +pkg_kucumberl_fetch = git +pkg_kucumberl_repo = https://github.com/openshine/kucumberl +pkg_kucumberl_commit = master + +PACKAGES += kvc +pkg_kvc_name = kvc +pkg_kvc_description = KVC - Key Value Coding for Erlang data structures +pkg_kvc_homepage = https://github.com/etrepum/kvc +pkg_kvc_fetch = git +pkg_kvc_repo = https://github.com/etrepum/kvc +pkg_kvc_commit = master + +PACKAGES += kvlists +pkg_kvlists_name = kvlists +pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang +pkg_kvlists_homepage = https://github.com/jcomellas/kvlists +pkg_kvlists_fetch = git +pkg_kvlists_repo = https://github.com/jcomellas/kvlists +pkg_kvlists_commit = master + +PACKAGES += kvs +pkg_kvs_name = kvs +pkg_kvs_description = Container and Iterator +pkg_kvs_homepage = https://github.com/synrc/kvs +pkg_kvs_fetch = git +pkg_kvs_repo = https://github.com/synrc/kvs +pkg_kvs_commit = master + +PACKAGES += lager +pkg_lager_name = lager +pkg_lager_description = A logging framework for Erlang/OTP. +pkg_lager_homepage = https://github.com/basho/lager +pkg_lager_fetch = git +pkg_lager_repo = https://github.com/basho/lager +pkg_lager_commit = master + +PACKAGES += lager_amqp_backend +pkg_lager_amqp_backend_name = lager_amqp_backend +pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend +pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_fetch = git +pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_commit = master + +PACKAGES += lager_syslog +pkg_lager_syslog_name = lager_syslog +pkg_lager_syslog_description = Syslog backend for lager +pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog +pkg_lager_syslog_fetch = git +pkg_lager_syslog_repo = https://github.com/basho/lager_syslog +pkg_lager_syslog_commit = master + +PACKAGES += lambdapad +pkg_lambdapad_name = lambdapad +pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang. +pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad +pkg_lambdapad_fetch = git +pkg_lambdapad_repo = https://github.com/gar1t/lambdapad +pkg_lambdapad_commit = master + +PACKAGES += lasp +pkg_lasp_name = lasp +pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations +pkg_lasp_homepage = http://lasp-lang.org/ +pkg_lasp_fetch = git +pkg_lasp_repo = https://github.com/lasp-lang/lasp +pkg_lasp_commit = master + +PACKAGES += lasse +pkg_lasse_name = lasse +pkg_lasse_description = SSE handler for Cowboy +pkg_lasse_homepage = https://github.com/inaka/lasse +pkg_lasse_fetch = git +pkg_lasse_repo = https://github.com/inaka/lasse +pkg_lasse_commit = 0.1.0 + +PACKAGES += ldap +pkg_ldap_name = ldap +pkg_ldap_description = LDAP server written in Erlang +pkg_ldap_homepage = https://github.com/spawnproc/ldap +pkg_ldap_fetch = git +pkg_ldap_repo = https://github.com/spawnproc/ldap +pkg_ldap_commit = master + +PACKAGES += lethink +pkg_lethink_name = lethink +pkg_lethink_description = erlang driver for rethinkdb +pkg_lethink_homepage = https://github.com/taybin/lethink +pkg_lethink_fetch = git +pkg_lethink_repo = https://github.com/taybin/lethink +pkg_lethink_commit = master + +PACKAGES += lfe +pkg_lfe_name = lfe +pkg_lfe_description = Lisp Flavoured Erlang (LFE) +pkg_lfe_homepage = https://github.com/rvirding/lfe +pkg_lfe_fetch = git +pkg_lfe_repo = https://github.com/rvirding/lfe +pkg_lfe_commit = master + +PACKAGES += ling +pkg_ling_name = ling +pkg_ling_description = Erlang on Xen +pkg_ling_homepage = https://github.com/cloudozer/ling +pkg_ling_fetch = git +pkg_ling_repo = https://github.com/cloudozer/ling +pkg_ling_commit = master + +PACKAGES += live +pkg_live_name = live +pkg_live_description = Automated module and configuration reloader. +pkg_live_homepage = http://ninenines.eu +pkg_live_fetch = git +pkg_live_repo = https://github.com/ninenines/live +pkg_live_commit = master + +PACKAGES += lmq +pkg_lmq_name = lmq +pkg_lmq_description = Lightweight Message Queue +pkg_lmq_homepage = https://github.com/iij/lmq +pkg_lmq_fetch = git +pkg_lmq_repo = https://github.com/iij/lmq +pkg_lmq_commit = master + +PACKAGES += locker +pkg_locker_name = locker +pkg_locker_description = Atomic distributed 'check and set' for short-lived keys +pkg_locker_homepage = https://github.com/wooga/locker +pkg_locker_fetch = git +pkg_locker_repo = https://github.com/wooga/locker +pkg_locker_commit = master + +PACKAGES += locks +pkg_locks_name = locks +pkg_locks_description = A scalable, deadlock-resolving resource locker +pkg_locks_homepage = https://github.com/uwiger/locks +pkg_locks_fetch = git +pkg_locks_repo = https://github.com/uwiger/locks +pkg_locks_commit = master + +PACKAGES += log4erl +pkg_log4erl_name = log4erl +pkg_log4erl_description = A logger for erlang in the spirit of Log4J. +pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl +pkg_log4erl_fetch = git +pkg_log4erl_repo = https://github.com/ahmednawras/log4erl +pkg_log4erl_commit = master + +PACKAGES += lol +pkg_lol_name = lol +pkg_lol_description = Lisp on erLang, and programming is fun again +pkg_lol_homepage = https://github.com/b0oh/lol +pkg_lol_fetch = git +pkg_lol_repo = https://github.com/b0oh/lol +pkg_lol_commit = master + +PACKAGES += lucid +pkg_lucid_name = lucid +pkg_lucid_description = HTTP/2 server written in Erlang +pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid +pkg_lucid_fetch = git +pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid +pkg_lucid_commit = master + +PACKAGES += luerl +pkg_luerl_name = luerl +pkg_luerl_description = Lua in Erlang +pkg_luerl_homepage = https://github.com/rvirding/luerl +pkg_luerl_fetch = git +pkg_luerl_repo = https://github.com/rvirding/luerl +pkg_luerl_commit = develop + +PACKAGES += luwak +pkg_luwak_name = luwak +pkg_luwak_description = Large-object storage interface for Riak +pkg_luwak_homepage = https://github.com/basho/luwak +pkg_luwak_fetch = git +pkg_luwak_repo = https://github.com/basho/luwak +pkg_luwak_commit = master + +PACKAGES += lux +pkg_lux_name = lux +pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands +pkg_lux_homepage = https://github.com/hawk/lux +pkg_lux_fetch = git +pkg_lux_repo = https://github.com/hawk/lux +pkg_lux_commit = master + +PACKAGES += machi +pkg_machi_name = machi +pkg_machi_description = Machi file store +pkg_machi_homepage = https://github.com/basho/machi +pkg_machi_fetch = git +pkg_machi_repo = https://github.com/basho/machi +pkg_machi_commit = master + +PACKAGES += mad +pkg_mad_name = mad +pkg_mad_description = Small and Fast Rebar Replacement +pkg_mad_homepage = https://github.com/synrc/mad +pkg_mad_fetch = git +pkg_mad_repo = https://github.com/synrc/mad +pkg_mad_commit = master + +PACKAGES += marina +pkg_marina_name = marina +pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client +pkg_marina_homepage = https://github.com/lpgauth/marina +pkg_marina_fetch = git +pkg_marina_repo = https://github.com/lpgauth/marina +pkg_marina_commit = master + +PACKAGES += mavg +pkg_mavg_name = mavg +pkg_mavg_description = Erlang :: Exponential moving average library +pkg_mavg_homepage = https://github.com/EchoTeam/mavg +pkg_mavg_fetch = git +pkg_mavg_repo = https://github.com/EchoTeam/mavg +pkg_mavg_commit = master + +PACKAGES += mc_erl +pkg_mc_erl_name = mc_erl +pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. +pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl +pkg_mc_erl_fetch = git +pkg_mc_erl_repo = https://github.com/clonejo/mc-erl +pkg_mc_erl_commit = master + +PACKAGES += mcd +pkg_mcd_name = mcd +pkg_mcd_description = Fast memcached protocol client in pure Erlang +pkg_mcd_homepage = https://github.com/EchoTeam/mcd +pkg_mcd_fetch = git +pkg_mcd_repo = https://github.com/EchoTeam/mcd +pkg_mcd_commit = master + +PACKAGES += mcerlang +pkg_mcerlang_name = mcerlang +pkg_mcerlang_description = The McErlang model checker for Erlang +pkg_mcerlang_homepage = https://github.com/fredlund/McErlang +pkg_mcerlang_fetch = git +pkg_mcerlang_repo = https://github.com/fredlund/McErlang +pkg_mcerlang_commit = master + +PACKAGES += meck +pkg_meck_name = meck +pkg_meck_description = A mocking library for Erlang +pkg_meck_homepage = https://github.com/eproxus/meck +pkg_meck_fetch = git +pkg_meck_repo = https://github.com/eproxus/meck +pkg_meck_commit = master + +PACKAGES += mekao +pkg_mekao_name = mekao +pkg_mekao_description = SQL constructor +pkg_mekao_homepage = https://github.com/ddosia/mekao +pkg_mekao_fetch = git +pkg_mekao_repo = https://github.com/ddosia/mekao +pkg_mekao_commit = master + +PACKAGES += memo +pkg_memo_name = memo +pkg_memo_description = Erlang memoization server +pkg_memo_homepage = https://github.com/tuncer/memo +pkg_memo_fetch = git +pkg_memo_repo = https://github.com/tuncer/memo +pkg_memo_commit = master + +PACKAGES += merge_index +pkg_merge_index_name = merge_index +pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop). +pkg_merge_index_homepage = https://github.com/basho/merge_index +pkg_merge_index_fetch = git +pkg_merge_index_repo = https://github.com/basho/merge_index +pkg_merge_index_commit = master + +PACKAGES += merl +pkg_merl_name = merl +pkg_merl_description = Metaprogramming in Erlang +pkg_merl_homepage = https://github.com/richcarl/merl +pkg_merl_fetch = git +pkg_merl_repo = https://github.com/richcarl/merl +pkg_merl_commit = master + +PACKAGES += mimetypes +pkg_mimetypes_name = mimetypes +pkg_mimetypes_description = Erlang MIME types library +pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes +pkg_mimetypes_fetch = git +pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes +pkg_mimetypes_commit = master + +PACKAGES += mixer +pkg_mixer_name = mixer +pkg_mixer_description = Mix in functions from other modules +pkg_mixer_homepage = https://github.com/chef/mixer +pkg_mixer_fetch = git +pkg_mixer_repo = https://github.com/chef/mixer +pkg_mixer_commit = master + +PACKAGES += mochiweb +pkg_mochiweb_name = mochiweb +pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers. +pkg_mochiweb_homepage = https://github.com/mochi/mochiweb +pkg_mochiweb_fetch = git +pkg_mochiweb_repo = https://github.com/mochi/mochiweb +pkg_mochiweb_commit = master + +PACKAGES += mochiweb_xpath +pkg_mochiweb_xpath_name = mochiweb_xpath +pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser +pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_fetch = git +pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_commit = master + +PACKAGES += mockgyver +pkg_mockgyver_name = mockgyver +pkg_mockgyver_description = A mocking library for Erlang +pkg_mockgyver_homepage = https://github.com/klajo/mockgyver +pkg_mockgyver_fetch = git +pkg_mockgyver_repo = https://github.com/klajo/mockgyver +pkg_mockgyver_commit = master + +PACKAGES += modlib +pkg_modlib_name = modlib +pkg_modlib_description = Web framework based on Erlang's inets httpd +pkg_modlib_homepage = https://github.com/gar1t/modlib +pkg_modlib_fetch = git +pkg_modlib_repo = https://github.com/gar1t/modlib +pkg_modlib_commit = master + +PACKAGES += mongodb +pkg_mongodb_name = mongodb +pkg_mongodb_description = MongoDB driver for Erlang +pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_fetch = git +pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_commit = master + +PACKAGES += mongooseim +pkg_mongooseim_name = mongooseim +pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions +pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform +pkg_mongooseim_fetch = git +pkg_mongooseim_repo = https://github.com/esl/MongooseIM +pkg_mongooseim_commit = master + +PACKAGES += moyo +pkg_moyo_name = moyo +pkg_moyo_description = Erlang utility functions library +pkg_moyo_homepage = https://github.com/dwango/moyo +pkg_moyo_fetch = git +pkg_moyo_repo = https://github.com/dwango/moyo +pkg_moyo_commit = master + +PACKAGES += msgpack +pkg_msgpack_name = msgpack +pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang +pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_fetch = git +pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_commit = master + +PACKAGES += mu2 +pkg_mu2_name = mu2 +pkg_mu2_description = Erlang mutation testing tool +pkg_mu2_homepage = https://github.com/ramsay-t/mu2 +pkg_mu2_fetch = git +pkg_mu2_repo = https://github.com/ramsay-t/mu2 +pkg_mu2_commit = master + +PACKAGES += mustache +pkg_mustache_name = mustache +pkg_mustache_description = Mustache template engine for Erlang. +pkg_mustache_homepage = https://github.com/mojombo/mustache.erl +pkg_mustache_fetch = git +pkg_mustache_repo = https://github.com/mojombo/mustache.erl +pkg_mustache_commit = master + +PACKAGES += myproto +pkg_myproto_name = myproto +pkg_myproto_description = MySQL Server Protocol in Erlang +pkg_myproto_homepage = https://github.com/altenwald/myproto +pkg_myproto_fetch = git +pkg_myproto_repo = https://github.com/altenwald/myproto +pkg_myproto_commit = master + +PACKAGES += mysql +pkg_mysql_name = mysql +pkg_mysql_description = Erlang MySQL Driver (from code.google.com) +pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_fetch = git +pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_commit = master + +PACKAGES += n2o +pkg_n2o_name = n2o +pkg_n2o_description = WebSocket Application Server +pkg_n2o_homepage = https://github.com/5HT/n2o +pkg_n2o_fetch = git +pkg_n2o_repo = https://github.com/5HT/n2o +pkg_n2o_commit = master + +PACKAGES += nat_upnp +pkg_nat_upnp_name = nat_upnp +pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD +pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_fetch = git +pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_commit = master + +PACKAGES += neo4j +pkg_neo4j_name = neo4j +pkg_neo4j_description = Erlang client library for Neo4J. +pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_fetch = git +pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_commit = master + +PACKAGES += neotoma +pkg_neotoma_name = neotoma +pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars. +pkg_neotoma_homepage = https://github.com/seancribbs/neotoma +pkg_neotoma_fetch = git +pkg_neotoma_repo = https://github.com/seancribbs/neotoma +pkg_neotoma_commit = master + +PACKAGES += newrelic +pkg_newrelic_name = newrelic +pkg_newrelic_description = Erlang library for sending metrics to New Relic +pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang +pkg_newrelic_fetch = git +pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang +pkg_newrelic_commit = master + +PACKAGES += nifty +pkg_nifty_name = nifty +pkg_nifty_description = Erlang NIF wrapper generator +pkg_nifty_homepage = https://github.com/parapluu/nifty +pkg_nifty_fetch = git +pkg_nifty_repo = https://github.com/parapluu/nifty +pkg_nifty_commit = master + +PACKAGES += nitrogen_core +pkg_nitrogen_core_name = nitrogen_core +pkg_nitrogen_core_description = The core Nitrogen library. +pkg_nitrogen_core_homepage = http://nitrogenproject.com/ +pkg_nitrogen_core_fetch = git +pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core +pkg_nitrogen_core_commit = master + +PACKAGES += nkbase +pkg_nkbase_name = nkbase +pkg_nkbase_description = NkBASE distributed database +pkg_nkbase_homepage = https://github.com/Nekso/nkbase +pkg_nkbase_fetch = git +pkg_nkbase_repo = https://github.com/Nekso/nkbase +pkg_nkbase_commit = develop + +PACKAGES += nkdocker +pkg_nkdocker_name = nkdocker +pkg_nkdocker_description = Erlang Docker client +pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker +pkg_nkdocker_fetch = git +pkg_nkdocker_repo = https://github.com/Nekso/nkdocker +pkg_nkdocker_commit = master + +PACKAGES += nkpacket +pkg_nkpacket_name = nkpacket +pkg_nkpacket_description = Generic Erlang transport layer +pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket +pkg_nkpacket_fetch = git +pkg_nkpacket_repo = https://github.com/Nekso/nkpacket +pkg_nkpacket_commit = master + +PACKAGES += nksip +pkg_nksip_name = nksip +pkg_nksip_description = Erlang SIP application server +pkg_nksip_homepage = https://github.com/kalta/nksip +pkg_nksip_fetch = git +pkg_nksip_repo = https://github.com/kalta/nksip +pkg_nksip_commit = master + +PACKAGES += nodefinder +pkg_nodefinder_name = nodefinder +pkg_nodefinder_description = automatic node discovery via UDP multicast +pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder +pkg_nodefinder_fetch = git +pkg_nodefinder_repo = https://github.com/okeuday/nodefinder +pkg_nodefinder_commit = master + +PACKAGES += nprocreg +pkg_nprocreg_name = nprocreg +pkg_nprocreg_description = Minimal Distributed Erlang Process Registry +pkg_nprocreg_homepage = http://nitrogenproject.com/ +pkg_nprocreg_fetch = git +pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg +pkg_nprocreg_commit = master + +PACKAGES += oauth +pkg_oauth_name = oauth +pkg_oauth_description = An Erlang OAuth 1.0 implementation +pkg_oauth_homepage = https://github.com/tim/erlang-oauth +pkg_oauth_fetch = git +pkg_oauth_repo = https://github.com/tim/erlang-oauth +pkg_oauth_commit = master + +PACKAGES += oauth2 +pkg_oauth2_name = oauth2 +pkg_oauth2_description = Erlang Oauth2 implementation +pkg_oauth2_homepage = https://github.com/kivra/oauth2 +pkg_oauth2_fetch = git +pkg_oauth2_repo = https://github.com/kivra/oauth2 +pkg_oauth2_commit = master + +PACKAGES += oauth2c +pkg_oauth2c_name = oauth2c +pkg_oauth2c_description = Erlang OAuth2 Client +pkg_oauth2c_homepage = https://github.com/kivra/oauth2_client +pkg_oauth2c_fetch = git +pkg_oauth2c_repo = https://github.com/kivra/oauth2_client +pkg_oauth2c_commit = master + +PACKAGES += octopus +pkg_octopus_name = octopus +pkg_octopus_description = Small and flexible pool manager written in Erlang +pkg_octopus_homepage = https://github.com/erlangbureau/octopus +pkg_octopus_fetch = git +pkg_octopus_repo = https://github.com/erlangbureau/octopus +pkg_octopus_commit = 1.0.0 + +PACKAGES += of_protocol +pkg_of_protocol_name = of_protocol +pkg_of_protocol_description = OpenFlow Protocol Library for Erlang +pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_fetch = git +pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_commit = master + +PACKAGES += opencouch +pkg_opencouch_name = couch +pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB +pkg_opencouch_homepage = https://github.com/benoitc/opencouch +pkg_opencouch_fetch = git +pkg_opencouch_repo = https://github.com/benoitc/opencouch +pkg_opencouch_commit = master + +PACKAGES += openflow +pkg_openflow_name = openflow +pkg_openflow_description = An OpenFlow controller written in pure erlang +pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_fetch = git +pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_commit = master + +PACKAGES += openid +pkg_openid_name = openid +pkg_openid_description = Erlang OpenID +pkg_openid_homepage = https://github.com/brendonh/erl_openid +pkg_openid_fetch = git +pkg_openid_repo = https://github.com/brendonh/erl_openid +pkg_openid_commit = master + +PACKAGES += openpoker +pkg_openpoker_name = openpoker +pkg_openpoker_description = Genesis Texas hold'em Game Server +pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker +pkg_openpoker_fetch = git +pkg_openpoker_repo = https://github.com/hpyhacking/openpoker +pkg_openpoker_commit = master + +PACKAGES += pal +pkg_pal_name = pal +pkg_pal_description = Pragmatic Authentication Library +pkg_pal_homepage = https://github.com/manifest/pal +pkg_pal_fetch = git +pkg_pal_repo = https://github.com/manifest/pal +pkg_pal_commit = master + +PACKAGES += parse_trans +pkg_parse_trans_name = parse_trans +pkg_parse_trans_description = Parse transform utilities for Erlang +pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans +pkg_parse_trans_fetch = git +pkg_parse_trans_repo = https://github.com/uwiger/parse_trans +pkg_parse_trans_commit = master + +PACKAGES += parsexml +pkg_parsexml_name = parsexml +pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API +pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml +pkg_parsexml_fetch = git +pkg_parsexml_repo = https://github.com/maxlapshin/parsexml +pkg_parsexml_commit = master + +PACKAGES += pegjs +pkg_pegjs_name = pegjs +pkg_pegjs_description = An implementation of PEG.js grammar for Erlang. +pkg_pegjs_homepage = https://github.com/dmitriid/pegjs +pkg_pegjs_fetch = git +pkg_pegjs_repo = https://github.com/dmitriid/pegjs +pkg_pegjs_commit = 0.3 + +PACKAGES += percept2 +pkg_percept2_name = percept2 +pkg_percept2_description = Concurrent profiling tool for Erlang +pkg_percept2_homepage = https://github.com/huiqing/percept2 +pkg_percept2_fetch = git +pkg_percept2_repo = https://github.com/huiqing/percept2 +pkg_percept2_commit = master + +PACKAGES += pgsql +pkg_pgsql_name = pgsql +pkg_pgsql_description = Erlang PostgreSQL driver +pkg_pgsql_homepage = https://github.com/semiocast/pgsql +pkg_pgsql_fetch = git +pkg_pgsql_repo = https://github.com/semiocast/pgsql +pkg_pgsql_commit = master + +PACKAGES += pkgx +pkg_pkgx_name = pkgx +pkg_pkgx_description = Build .deb packages from Erlang releases +pkg_pkgx_homepage = https://github.com/arjan/pkgx +pkg_pkgx_fetch = git +pkg_pkgx_repo = https://github.com/arjan/pkgx +pkg_pkgx_commit = master + +PACKAGES += pkt +pkg_pkt_name = pkt +pkg_pkt_description = Erlang network protocol library +pkg_pkt_homepage = https://github.com/msantos/pkt +pkg_pkt_fetch = git +pkg_pkt_repo = https://github.com/msantos/pkt +pkg_pkt_commit = master + +PACKAGES += plain_fsm +pkg_plain_fsm_name = plain_fsm +pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs. +pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_fetch = git +pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_commit = master + +PACKAGES += plumtree +pkg_plumtree_name = plumtree +pkg_plumtree_description = Epidemic Broadcast Trees +pkg_plumtree_homepage = https://github.com/helium/plumtree +pkg_plumtree_fetch = git +pkg_plumtree_repo = https://github.com/helium/plumtree +pkg_plumtree_commit = master + +PACKAGES += pmod_transform +pkg_pmod_transform_name = pmod_transform +pkg_pmod_transform_description = Parse transform for parameterized modules +pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform +pkg_pmod_transform_fetch = git +pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform +pkg_pmod_transform_commit = master + +PACKAGES += pobox +pkg_pobox_name = pobox +pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang +pkg_pobox_homepage = https://github.com/ferd/pobox +pkg_pobox_fetch = git +pkg_pobox_repo = https://github.com/ferd/pobox +pkg_pobox_commit = master + +PACKAGES += ponos +pkg_ponos_name = ponos +pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang +pkg_ponos_homepage = https://github.com/klarna/ponos +pkg_ponos_fetch = git +pkg_ponos_repo = https://github.com/klarna/ponos +pkg_ponos_commit = master + +PACKAGES += poolboy +pkg_poolboy_name = poolboy +pkg_poolboy_description = A hunky Erlang worker pool factory +pkg_poolboy_homepage = https://github.com/devinus/poolboy +pkg_poolboy_fetch = git +pkg_poolboy_repo = https://github.com/devinus/poolboy +pkg_poolboy_commit = master + +PACKAGES += pooler +pkg_pooler_name = pooler +pkg_pooler_description = An OTP Process Pool Application +pkg_pooler_homepage = https://github.com/seth/pooler +pkg_pooler_fetch = git +pkg_pooler_repo = https://github.com/seth/pooler +pkg_pooler_commit = master + +PACKAGES += pqueue +pkg_pqueue_name = pqueue +pkg_pqueue_description = Erlang Priority Queues +pkg_pqueue_homepage = https://github.com/okeuday/pqueue +pkg_pqueue_fetch = git +pkg_pqueue_repo = https://github.com/okeuday/pqueue +pkg_pqueue_commit = master + +PACKAGES += procket +pkg_procket_name = procket +pkg_procket_description = Erlang interface to low level socket operations +pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket +pkg_procket_fetch = git +pkg_procket_repo = https://github.com/msantos/procket +pkg_procket_commit = master + +PACKAGES += prop +pkg_prop_name = prop +pkg_prop_description = An Erlang code scaffolding and generator system. +pkg_prop_homepage = https://github.com/nuex/prop +pkg_prop_fetch = git +pkg_prop_repo = https://github.com/nuex/prop +pkg_prop_commit = master + +PACKAGES += proper +pkg_proper_name = proper +pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang. +pkg_proper_homepage = http://proper.softlab.ntua.gr +pkg_proper_fetch = git +pkg_proper_repo = https://github.com/manopapad/proper +pkg_proper_commit = master + +PACKAGES += props +pkg_props_name = props +pkg_props_description = Property structure library +pkg_props_homepage = https://github.com/greyarea/props +pkg_props_fetch = git +pkg_props_repo = https://github.com/greyarea/props +pkg_props_commit = master + +PACKAGES += protobuffs +pkg_protobuffs_name = protobuffs +pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs. +pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_fetch = git +pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_commit = master + +PACKAGES += psycho +pkg_psycho_name = psycho +pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware. +pkg_psycho_homepage = https://github.com/gar1t/psycho +pkg_psycho_fetch = git +pkg_psycho_repo = https://github.com/gar1t/psycho +pkg_psycho_commit = master + +PACKAGES += purity +pkg_purity_name = purity +pkg_purity_description = A side-effect analyzer for Erlang +pkg_purity_homepage = https://github.com/mpitid/purity +pkg_purity_fetch = git +pkg_purity_repo = https://github.com/mpitid/purity +pkg_purity_commit = master + +PACKAGES += push_service +pkg_push_service_name = push_service +pkg_push_service_description = Push service +pkg_push_service_homepage = https://github.com/hairyhum/push_service +pkg_push_service_fetch = git +pkg_push_service_repo = https://github.com/hairyhum/push_service +pkg_push_service_commit = master + +PACKAGES += qdate +pkg_qdate_name = qdate +pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang. +pkg_qdate_homepage = https://github.com/choptastic/qdate +pkg_qdate_fetch = git +pkg_qdate_repo = https://github.com/choptastic/qdate +pkg_qdate_commit = 0.4.0 + +PACKAGES += qrcode +pkg_qrcode_name = qrcode +pkg_qrcode_description = QR Code encoder in Erlang +pkg_qrcode_homepage = https://github.com/komone/qrcode +pkg_qrcode_fetch = git +pkg_qrcode_repo = https://github.com/komone/qrcode +pkg_qrcode_commit = master + +PACKAGES += quest +pkg_quest_name = quest +pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang. +pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest +pkg_quest_fetch = git +pkg_quest_repo = https://github.com/eriksoe/ErlangQuest +pkg_quest_commit = master + +PACKAGES += quickrand +pkg_quickrand_name = quickrand +pkg_quickrand_description = Quick Erlang Random Number Generation +pkg_quickrand_homepage = https://github.com/okeuday/quickrand +pkg_quickrand_fetch = git +pkg_quickrand_repo = https://github.com/okeuday/quickrand +pkg_quickrand_commit = master + +PACKAGES += rabbit +pkg_rabbit_name = rabbit +pkg_rabbit_description = RabbitMQ Server +pkg_rabbit_homepage = https://www.rabbitmq.com/ +pkg_rabbit_fetch = git +pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git +pkg_rabbit_commit = master + +PACKAGES += rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak +pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_fetch = git +pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_commit = master + +PACKAGES += rack +pkg_rack_name = rack +pkg_rack_description = Rack handler for erlang +pkg_rack_homepage = https://github.com/erlyvideo/rack +pkg_rack_fetch = git +pkg_rack_repo = https://github.com/erlyvideo/rack +pkg_rack_commit = master + +PACKAGES += radierl +pkg_radierl_name = radierl +pkg_radierl_description = RADIUS protocol stack implemented in Erlang. +pkg_radierl_homepage = https://github.com/vances/radierl +pkg_radierl_fetch = git +pkg_radierl_repo = https://github.com/vances/radierl +pkg_radierl_commit = master + +PACKAGES += rafter +pkg_rafter_name = rafter +pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol +pkg_rafter_homepage = https://github.com/andrewjstone/rafter +pkg_rafter_fetch = git +pkg_rafter_repo = https://github.com/andrewjstone/rafter +pkg_rafter_commit = master + +PACKAGES += ranch +pkg_ranch_name = ranch +pkg_ranch_description = Socket acceptor pool for TCP protocols. +pkg_ranch_homepage = http://ninenines.eu +pkg_ranch_fetch = git +pkg_ranch_repo = https://github.com/ninenines/ranch +pkg_ranch_commit = 1.1.0 + +PACKAGES += rbeacon +pkg_rbeacon_name = rbeacon +pkg_rbeacon_description = LAN discovery and presence in Erlang. +pkg_rbeacon_homepage = https://github.com/refuge/rbeacon +pkg_rbeacon_fetch = git +pkg_rbeacon_repo = https://github.com/refuge/rbeacon +pkg_rbeacon_commit = master + +PACKAGES += rebar +pkg_rebar_name = rebar +pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases. +pkg_rebar_homepage = http://www.rebar3.org +pkg_rebar_fetch = git +pkg_rebar_repo = https://github.com/rebar/rebar3 +pkg_rebar_commit = master + +PACKAGES += rebus +pkg_rebus_name = rebus +pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang. +pkg_rebus_homepage = https://github.com/olle/rebus +pkg_rebus_fetch = git +pkg_rebus_repo = https://github.com/olle/rebus +pkg_rebus_commit = master + +PACKAGES += rec2json +pkg_rec2json_name = rec2json +pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily. +pkg_rec2json_homepage = https://github.com/lordnull/rec2json +pkg_rec2json_fetch = git +pkg_rec2json_repo = https://github.com/lordnull/rec2json +pkg_rec2json_commit = master + +PACKAGES += recon +pkg_recon_name = recon +pkg_recon_description = Collection of functions and scripts to debug Erlang in production. +pkg_recon_homepage = https://github.com/ferd/recon +pkg_recon_fetch = git +pkg_recon_repo = https://github.com/ferd/recon +pkg_recon_commit = 2.2.1 + +PACKAGES += record_info +pkg_record_info_name = record_info +pkg_record_info_description = Convert between record and proplist +pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info +pkg_record_info_fetch = git +pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info +pkg_record_info_commit = master + +PACKAGES += redgrid +pkg_redgrid_name = redgrid +pkg_redgrid_description = automatic Erlang node discovery via redis +pkg_redgrid_homepage = https://github.com/jkvor/redgrid +pkg_redgrid_fetch = git +pkg_redgrid_repo = https://github.com/jkvor/redgrid +pkg_redgrid_commit = master + +PACKAGES += redo +pkg_redo_name = redo +pkg_redo_description = pipelined erlang redis client +pkg_redo_homepage = https://github.com/jkvor/redo +pkg_redo_fetch = git +pkg_redo_repo = https://github.com/jkvor/redo +pkg_redo_commit = master + +PACKAGES += reload_mk +pkg_reload_mk_name = reload_mk +pkg_reload_mk_description = Live reload plugin for erlang.mk. +pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk +pkg_reload_mk_fetch = git +pkg_reload_mk_repo = https://github.com/bullno1/reload.mk +pkg_reload_mk_commit = master + +PACKAGES += reltool_util +pkg_reltool_util_name = reltool_util +pkg_reltool_util_description = Erlang reltool utility functionality application +pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util +pkg_reltool_util_fetch = git +pkg_reltool_util_repo = https://github.com/okeuday/reltool_util +pkg_reltool_util_commit = master + +PACKAGES += relx +pkg_relx_name = relx +pkg_relx_description = Sane, simple release creation for Erlang +pkg_relx_homepage = https://github.com/erlware/relx +pkg_relx_fetch = git +pkg_relx_repo = https://github.com/erlware/relx +pkg_relx_commit = master + +PACKAGES += resource_discovery +pkg_resource_discovery_name = resource_discovery +pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster. +pkg_resource_discovery_homepage = http://erlware.org/ +pkg_resource_discovery_fetch = git +pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery +pkg_resource_discovery_commit = master + +PACKAGES += restc +pkg_restc_name = restc +pkg_restc_description = Erlang Rest Client +pkg_restc_homepage = https://github.com/kivra/restclient +pkg_restc_fetch = git +pkg_restc_repo = https://github.com/kivra/restclient +pkg_restc_commit = master + +PACKAGES += rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation. +pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_fetch = git +pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_commit = master + +PACKAGES += riak_control +pkg_riak_control_name = riak_control +pkg_riak_control_description = Webmachine-based administration interface for Riak. +pkg_riak_control_homepage = https://github.com/basho/riak_control +pkg_riak_control_fetch = git +pkg_riak_control_repo = https://github.com/basho/riak_control +pkg_riak_control_commit = master + +PACKAGES += riak_core +pkg_riak_core_name = riak_core +pkg_riak_core_description = Distributed systems infrastructure used by Riak. +pkg_riak_core_homepage = https://github.com/basho/riak_core +pkg_riak_core_fetch = git +pkg_riak_core_repo = https://github.com/basho/riak_core +pkg_riak_core_commit = master + +PACKAGES += riak_dt +pkg_riak_dt_name = riak_dt +pkg_riak_dt_description = Convergent replicated datatypes in Erlang +pkg_riak_dt_homepage = https://github.com/basho/riak_dt +pkg_riak_dt_fetch = git +pkg_riak_dt_repo = https://github.com/basho/riak_dt +pkg_riak_dt_commit = master + +PACKAGES += riak_ensemble +pkg_riak_ensemble_name = riak_ensemble +pkg_riak_ensemble_description = Multi-Paxos framework in Erlang +pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_fetch = git +pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_commit = master + +PACKAGES += riak_kv +pkg_riak_kv_name = riak_kv +pkg_riak_kv_description = Riak Key/Value Store +pkg_riak_kv_homepage = https://github.com/basho/riak_kv +pkg_riak_kv_fetch = git +pkg_riak_kv_repo = https://github.com/basho/riak_kv +pkg_riak_kv_commit = master + +PACKAGES += riak_pg +pkg_riak_pg_name = riak_pg +pkg_riak_pg_description = Distributed process groups with riak_core. +pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_fetch = git +pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_commit = master + +PACKAGES += riak_pipe +pkg_riak_pipe_name = riak_pipe +pkg_riak_pipe_description = Riak Pipelines +pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe +pkg_riak_pipe_fetch = git +pkg_riak_pipe_repo = https://github.com/basho/riak_pipe +pkg_riak_pipe_commit = master + +PACKAGES += riak_sysmon +pkg_riak_sysmon_name = riak_sysmon +pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages +pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_fetch = git +pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_commit = master + +PACKAGES += riak_test +pkg_riak_test_name = riak_test +pkg_riak_test_description = I'm in your cluster, testing your riaks +pkg_riak_test_homepage = https://github.com/basho/riak_test +pkg_riak_test_fetch = git +pkg_riak_test_repo = https://github.com/basho/riak_test +pkg_riak_test_commit = master + +PACKAGES += riakc +pkg_riakc_name = riakc +pkg_riakc_description = Erlang clients for Riak. +pkg_riakc_homepage = https://github.com/basho/riak-erlang-client +pkg_riakc_fetch = git +pkg_riakc_repo = https://github.com/basho/riak-erlang-client +pkg_riakc_commit = master + +PACKAGES += riakhttpc +pkg_riakhttpc_name = riakhttpc +pkg_riakhttpc_description = Riak Erlang client using the HTTP interface +pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_fetch = git +pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_commit = master + +PACKAGES += riaknostic +pkg_riaknostic_name = riaknostic +pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap +pkg_riaknostic_homepage = https://github.com/basho/riaknostic +pkg_riaknostic_fetch = git +pkg_riaknostic_repo = https://github.com/basho/riaknostic +pkg_riaknostic_commit = master + +PACKAGES += riakpool +pkg_riakpool_name = riakpool +pkg_riakpool_description = erlang riak client pool +pkg_riakpool_homepage = https://github.com/dweldon/riakpool +pkg_riakpool_fetch = git +pkg_riakpool_repo = https://github.com/dweldon/riakpool +pkg_riakpool_commit = master + +PACKAGES += rivus_cep +pkg_rivus_cep_name = rivus_cep +pkg_rivus_cep_description = Complex event processing in Erlang +pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_fetch = git +pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_commit = master + +PACKAGES += rlimit +pkg_rlimit_name = rlimit +pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent +pkg_rlimit_homepage = https://github.com/jlouis/rlimit +pkg_rlimit_fetch = git +pkg_rlimit_repo = https://github.com/jlouis/rlimit +pkg_rlimit_commit = master + +PACKAGES += safetyvalve +pkg_safetyvalve_name = safetyvalve +pkg_safetyvalve_description = A safety valve for your erlang node +pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_fetch = git +pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_commit = master + +PACKAGES += seestar +pkg_seestar_name = seestar +pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol +pkg_seestar_homepage = https://github.com/iamaleksey/seestar +pkg_seestar_fetch = git +pkg_seestar_repo = https://github.com/iamaleksey/seestar +pkg_seestar_commit = master + +PACKAGES += service +pkg_service_name = service +pkg_service_description = A minimal Erlang behavior for creating CloudI internal services +pkg_service_homepage = http://cloudi.org/ +pkg_service_fetch = git +pkg_service_repo = https://github.com/CloudI/service +pkg_service_commit = master + +PACKAGES += setup +pkg_setup_name = setup +pkg_setup_description = Generic setup utility for Erlang-based systems +pkg_setup_homepage = https://github.com/uwiger/setup +pkg_setup_fetch = git +pkg_setup_repo = https://github.com/uwiger/setup +pkg_setup_commit = master + +PACKAGES += sext +pkg_sext_name = sext +pkg_sext_description = Sortable Erlang Term Serialization +pkg_sext_homepage = https://github.com/uwiger/sext +pkg_sext_fetch = git +pkg_sext_repo = https://github.com/uwiger/sext +pkg_sext_commit = master + +PACKAGES += sfmt +pkg_sfmt_name = sfmt +pkg_sfmt_description = SFMT pseudo random number generator for Erlang. +pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_fetch = git +pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_commit = master + +PACKAGES += sgte +pkg_sgte_name = sgte +pkg_sgte_description = A simple Erlang Template Engine +pkg_sgte_homepage = https://github.com/filippo/sgte +pkg_sgte_fetch = git +pkg_sgte_repo = https://github.com/filippo/sgte +pkg_sgte_commit = master + +PACKAGES += sheriff +pkg_sheriff_name = sheriff +pkg_sheriff_description = Parse transform for type based validation. +pkg_sheriff_homepage = http://ninenines.eu +pkg_sheriff_fetch = git +pkg_sheriff_repo = https://github.com/extend/sheriff +pkg_sheriff_commit = master + +PACKAGES += shotgun +pkg_shotgun_name = shotgun +pkg_shotgun_description = better than just a gun +pkg_shotgun_homepage = https://github.com/inaka/shotgun +pkg_shotgun_fetch = git +pkg_shotgun_repo = https://github.com/inaka/shotgun +pkg_shotgun_commit = 0.1.0 + +PACKAGES += sidejob +pkg_sidejob_name = sidejob +pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang +pkg_sidejob_homepage = https://github.com/basho/sidejob +pkg_sidejob_fetch = git +pkg_sidejob_repo = https://github.com/basho/sidejob +pkg_sidejob_commit = master + +PACKAGES += sieve +pkg_sieve_name = sieve +pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang +pkg_sieve_homepage = https://github.com/benoitc/sieve +pkg_sieve_fetch = git +pkg_sieve_repo = https://github.com/benoitc/sieve +pkg_sieve_commit = master + +PACKAGES += sighandler +pkg_sighandler_name = sighandler +pkg_sighandler_description = Handle UNIX signals in Er lang +pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler +pkg_sighandler_fetch = git +pkg_sighandler_repo = https://github.com/jkingsbery/sighandler +pkg_sighandler_commit = master + +PACKAGES += simhash +pkg_simhash_name = simhash +pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data. +pkg_simhash_homepage = https://github.com/ferd/simhash +pkg_simhash_fetch = git +pkg_simhash_repo = https://github.com/ferd/simhash +pkg_simhash_commit = master + +PACKAGES += simple_bridge +pkg_simple_bridge_name = simple_bridge +pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers. +pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_fetch = git +pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_commit = master + +PACKAGES += simple_oauth2 +pkg_simple_oauth2_name = simple_oauth2 +pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured) +pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_fetch = git +pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_commit = master + +PACKAGES += skel +pkg_skel_name = skel +pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang +pkg_skel_homepage = https://github.com/ParaPhrase/skel +pkg_skel_fetch = git +pkg_skel_repo = https://github.com/ParaPhrase/skel +pkg_skel_commit = master + +PACKAGES += smother +pkg_smother_name = smother +pkg_smother_description = Extended code coverage metrics for Erlang. +pkg_smother_homepage = https://ramsay-t.github.io/Smother/ +pkg_smother_fetch = git +pkg_smother_repo = https://github.com/ramsay-t/Smother +pkg_smother_commit = master + +PACKAGES += social +pkg_social_name = social +pkg_social_description = Cowboy handler for social login via OAuth2 providers +pkg_social_homepage = https://github.com/dvv/social +pkg_social_fetch = git +pkg_social_repo = https://github.com/dvv/social +pkg_social_commit = master + +PACKAGES += spapi_router +pkg_spapi_router_name = spapi_router +pkg_spapi_router_description = Partially-connected Erlang clustering +pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router +pkg_spapi_router_fetch = git +pkg_spapi_router_repo = https://github.com/spilgames/spapi-router +pkg_spapi_router_commit = master + +PACKAGES += sqerl +pkg_sqerl_name = sqerl +pkg_sqerl_description = An Erlang-flavoured SQL DSL +pkg_sqerl_homepage = https://github.com/hairyhum/sqerl +pkg_sqerl_fetch = git +pkg_sqerl_repo = https://github.com/hairyhum/sqerl +pkg_sqerl_commit = master + +PACKAGES += srly +pkg_srly_name = srly +pkg_srly_description = Native Erlang Unix serial interface +pkg_srly_homepage = https://github.com/msantos/srly +pkg_srly_fetch = git +pkg_srly_repo = https://github.com/msantos/srly +pkg_srly_commit = master + +PACKAGES += sshrpc +pkg_sshrpc_name = sshrpc +pkg_sshrpc_description = Erlang SSH RPC module (experimental) +pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_fetch = git +pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_commit = master + +PACKAGES += stable +pkg_stable_name = stable +pkg_stable_description = Library of assorted helpers for Cowboy web server. +pkg_stable_homepage = https://github.com/dvv/stable +pkg_stable_fetch = git +pkg_stable_repo = https://github.com/dvv/stable +pkg_stable_commit = master + +PACKAGES += statebox +pkg_statebox_name = statebox +pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak. +pkg_statebox_homepage = https://github.com/mochi/statebox +pkg_statebox_fetch = git +pkg_statebox_repo = https://github.com/mochi/statebox +pkg_statebox_commit = master + +PACKAGES += statebox_riak +pkg_statebox_riak_name = statebox_riak +pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media. +pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak +pkg_statebox_riak_fetch = git +pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak +pkg_statebox_riak_commit = master + +PACKAGES += statman +pkg_statman_name = statman +pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM +pkg_statman_homepage = https://github.com/knutin/statman +pkg_statman_fetch = git +pkg_statman_repo = https://github.com/knutin/statman +pkg_statman_commit = master + +PACKAGES += statsderl +pkg_statsderl_name = statsderl +pkg_statsderl_description = StatsD client (erlang) +pkg_statsderl_homepage = https://github.com/lpgauth/statsderl +pkg_statsderl_fetch = git +pkg_statsderl_repo = https://github.com/lpgauth/statsderl +pkg_statsderl_commit = master + +PACKAGES += stdinout_pool +pkg_stdinout_pool_name = stdinout_pool +pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication. +pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_fetch = git +pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_commit = master + +PACKAGES += stockdb +pkg_stockdb_name = stockdb +pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang +pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb +pkg_stockdb_fetch = git +pkg_stockdb_repo = https://github.com/maxlapshin/stockdb +pkg_stockdb_commit = master + +PACKAGES += stripe +pkg_stripe_name = stripe +pkg_stripe_description = Erlang interface to the stripe.com API +pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang +pkg_stripe_fetch = git +pkg_stripe_repo = https://github.com/mattsta/stripe-erlang +pkg_stripe_commit = v1 + +PACKAGES += surrogate +pkg_surrogate_name = surrogate +pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes. +pkg_surrogate_homepage = https://github.com/skruger/Surrogate +pkg_surrogate_fetch = git +pkg_surrogate_repo = https://github.com/skruger/Surrogate +pkg_surrogate_commit = master + +PACKAGES += swab +pkg_swab_name = swab +pkg_swab_description = General purpose buffer handling module +pkg_swab_homepage = https://github.com/crownedgrouse/swab +pkg_swab_fetch = git +pkg_swab_repo = https://github.com/crownedgrouse/swab +pkg_swab_commit = master + +PACKAGES += swarm +pkg_swarm_name = swarm +pkg_swarm_description = Fast and simple acceptor pool for Erlang +pkg_swarm_homepage = https://github.com/jeremey/swarm +pkg_swarm_fetch = git +pkg_swarm_repo = https://github.com/jeremey/swarm +pkg_swarm_commit = master + +PACKAGES += switchboard +pkg_switchboard_name = switchboard +pkg_switchboard_description = A framework for processing email using worker plugins. +pkg_switchboard_homepage = https://github.com/thusfresh/switchboard +pkg_switchboard_fetch = git +pkg_switchboard_repo = https://github.com/thusfresh/switchboard +pkg_switchboard_commit = master + +PACKAGES += syn +pkg_syn_name = syn +pkg_syn_description = A global process registry for Erlang. +pkg_syn_homepage = https://github.com/ostinelli/syn +pkg_syn_fetch = git +pkg_syn_repo = https://github.com/ostinelli/syn +pkg_syn_commit = master + +PACKAGES += sync +pkg_sync_name = sync +pkg_sync_description = On-the-fly recompiling and reloading in Erlang. +pkg_sync_homepage = https://github.com/rustyio/sync +pkg_sync_fetch = git +pkg_sync_repo = https://github.com/rustyio/sync +pkg_sync_commit = master + +PACKAGES += syntaxerl +pkg_syntaxerl_name = syntaxerl +pkg_syntaxerl_description = Syntax checker for Erlang +pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_fetch = git +pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_commit = master + +PACKAGES += syslog +pkg_syslog_name = syslog +pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3) +pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog +pkg_syslog_fetch = git +pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog +pkg_syslog_commit = master + +PACKAGES += taskforce +pkg_taskforce_name = taskforce +pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks. +pkg_taskforce_homepage = https://github.com/g-andrade/taskforce +pkg_taskforce_fetch = git +pkg_taskforce_repo = https://github.com/g-andrade/taskforce +pkg_taskforce_commit = master + +PACKAGES += tddreloader +pkg_tddreloader_name = tddreloader +pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes +pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader +pkg_tddreloader_fetch = git +pkg_tddreloader_repo = https://github.com/version2beta/tddreloader +pkg_tddreloader_commit = master + +PACKAGES += tempo +pkg_tempo_name = tempo +pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang. +pkg_tempo_homepage = https://github.com/selectel/tempo +pkg_tempo_fetch = git +pkg_tempo_repo = https://github.com/selectel/tempo +pkg_tempo_commit = master + +PACKAGES += ticktick +pkg_ticktick_name = ticktick +pkg_ticktick_description = Ticktick is an id generator for message service. +pkg_ticktick_homepage = https://github.com/ericliang/ticktick +pkg_ticktick_fetch = git +pkg_ticktick_repo = https://github.com/ericliang/ticktick +pkg_ticktick_commit = master + +PACKAGES += tinymq +pkg_tinymq_name = tinymq +pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue +pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_fetch = git +pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_commit = master + +PACKAGES += tinymt +pkg_tinymt_name = tinymt +pkg_tinymt_description = TinyMT pseudo random number generator for Erlang. +pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_fetch = git +pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_commit = master + +PACKAGES += tirerl +pkg_tirerl_name = tirerl +pkg_tirerl_description = Erlang interface to Elastic Search +pkg_tirerl_homepage = https://github.com/inaka/tirerl +pkg_tirerl_fetch = git +pkg_tirerl_repo = https://github.com/inaka/tirerl +pkg_tirerl_commit = master + +PACKAGES += traffic_tools +pkg_traffic_tools_name = traffic_tools +pkg_traffic_tools_description = Simple traffic limiting library +pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools +pkg_traffic_tools_fetch = git +pkg_traffic_tools_repo = https://github.com/systra/traffic_tools +pkg_traffic_tools_commit = master + +PACKAGES += trails +pkg_trails_name = trails +pkg_trails_description = A couple of improvements over Cowboy Routes +pkg_trails_homepage = http://inaka.github.io/cowboy-trails/ +pkg_trails_fetch = git +pkg_trails_repo = https://github.com/inaka/cowboy-trails +pkg_trails_commit = master + +PACKAGES += trane +pkg_trane_name = trane +pkg_trane_description = SAX style broken HTML parser in Erlang +pkg_trane_homepage = https://github.com/massemanet/trane +pkg_trane_fetch = git +pkg_trane_repo = https://github.com/massemanet/trane +pkg_trane_commit = master + +PACKAGES += transit +pkg_transit_name = transit +pkg_transit_description = transit format for erlang +pkg_transit_homepage = https://github.com/isaiah/transit-erlang +pkg_transit_fetch = git +pkg_transit_repo = https://github.com/isaiah/transit-erlang +pkg_transit_commit = master + +PACKAGES += trie +pkg_trie_name = trie +pkg_trie_description = Erlang Trie Implementation +pkg_trie_homepage = https://github.com/okeuday/trie +pkg_trie_fetch = git +pkg_trie_repo = https://github.com/okeuday/trie +pkg_trie_commit = master + +PACKAGES += triq +pkg_triq_name = triq +pkg_triq_description = Trifork QuickCheck +pkg_triq_homepage = https://github.com/krestenkrab/triq +pkg_triq_fetch = git +pkg_triq_repo = https://github.com/krestenkrab/triq +pkg_triq_commit = master + +PACKAGES += tunctl +pkg_tunctl_name = tunctl +pkg_tunctl_description = Erlang TUN/TAP interface +pkg_tunctl_homepage = https://github.com/msantos/tunctl +pkg_tunctl_fetch = git +pkg_tunctl_repo = https://github.com/msantos/tunctl +pkg_tunctl_commit = master + +PACKAGES += twerl +pkg_twerl_name = twerl +pkg_twerl_description = Erlang client for the Twitter Streaming API +pkg_twerl_homepage = https://github.com/lucaspiller/twerl +pkg_twerl_fetch = git +pkg_twerl_repo = https://github.com/lucaspiller/twerl +pkg_twerl_commit = oauth + +PACKAGES += twitter_erlang +pkg_twitter_erlang_name = twitter_erlang +pkg_twitter_erlang_description = An Erlang twitter client +pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_fetch = git +pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_commit = master + +PACKAGES += ucol_nif +pkg_ucol_nif_name = ucol_nif +pkg_ucol_nif_description = ICU based collation Erlang module +pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif +pkg_ucol_nif_fetch = git +pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif +pkg_ucol_nif_commit = master + +PACKAGES += unicorn +pkg_unicorn_name = unicorn +pkg_unicorn_description = Generic configuration server +pkg_unicorn_homepage = https://github.com/shizzard/unicorn +pkg_unicorn_fetch = git +pkg_unicorn_repo = https://github.com/shizzard/unicorn +pkg_unicorn_commit = 0.3.0 + +PACKAGES += unsplit +pkg_unsplit_name = unsplit +pkg_unsplit_description = Resolves conflicts in Mnesia after network splits +pkg_unsplit_homepage = https://github.com/uwiger/unsplit +pkg_unsplit_fetch = git +pkg_unsplit_repo = https://github.com/uwiger/unsplit +pkg_unsplit_commit = master + +PACKAGES += uuid +pkg_uuid_name = uuid +pkg_uuid_description = Erlang UUID Implementation +pkg_uuid_homepage = https://github.com/okeuday/uuid +pkg_uuid_fetch = git +pkg_uuid_repo = https://github.com/okeuday/uuid +pkg_uuid_commit = v1.4.0 + +PACKAGES += ux +pkg_ux_name = ux +pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation) +pkg_ux_homepage = https://github.com/erlang-unicode/ux +pkg_ux_fetch = git +pkg_ux_repo = https://github.com/erlang-unicode/ux +pkg_ux_commit = master + +PACKAGES += vert +pkg_vert_name = vert +pkg_vert_description = Erlang binding to libvirt virtualization API +pkg_vert_homepage = https://github.com/msantos/erlang-libvirt +pkg_vert_fetch = git +pkg_vert_repo = https://github.com/msantos/erlang-libvirt +pkg_vert_commit = master + +PACKAGES += verx +pkg_verx_name = verx +pkg_verx_description = Erlang implementation of the libvirtd remote protocol +pkg_verx_homepage = https://github.com/msantos/verx +pkg_verx_fetch = git +pkg_verx_repo = https://github.com/msantos/verx +pkg_verx_commit = master + +PACKAGES += vmq_acl +pkg_vmq_acl_name = vmq_acl +pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_acl_homepage = https://verne.mq/ +pkg_vmq_acl_fetch = git +pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl +pkg_vmq_acl_commit = master + +PACKAGES += vmq_bridge +pkg_vmq_bridge_name = vmq_bridge +pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_bridge_homepage = https://verne.mq/ +pkg_vmq_bridge_fetch = git +pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge +pkg_vmq_bridge_commit = master + +PACKAGES += vmq_graphite +pkg_vmq_graphite_name = vmq_graphite +pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_graphite_homepage = https://verne.mq/ +pkg_vmq_graphite_fetch = git +pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite +pkg_vmq_graphite_commit = master + +PACKAGES += vmq_passwd +pkg_vmq_passwd_name = vmq_passwd +pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_passwd_homepage = https://verne.mq/ +pkg_vmq_passwd_fetch = git +pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd +pkg_vmq_passwd_commit = master + +PACKAGES += vmq_server +pkg_vmq_server_name = vmq_server +pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_server_homepage = https://verne.mq/ +pkg_vmq_server_fetch = git +pkg_vmq_server_repo = https://github.com/erlio/vmq_server +pkg_vmq_server_commit = master + +PACKAGES += vmq_snmp +pkg_vmq_snmp_name = vmq_snmp +pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_snmp_homepage = https://verne.mq/ +pkg_vmq_snmp_fetch = git +pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp +pkg_vmq_snmp_commit = master + +PACKAGES += vmq_systree +pkg_vmq_systree_name = vmq_systree +pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_systree_homepage = https://verne.mq/ +pkg_vmq_systree_fetch = git +pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree +pkg_vmq_systree_commit = master + +PACKAGES += vmstats +pkg_vmstats_name = vmstats +pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs. +pkg_vmstats_homepage = https://github.com/ferd/vmstats +pkg_vmstats_fetch = git +pkg_vmstats_repo = https://github.com/ferd/vmstats +pkg_vmstats_commit = master + +PACKAGES += walrus +pkg_walrus_name = walrus +pkg_walrus_description = Walrus - Mustache-like Templating +pkg_walrus_homepage = https://github.com/devinus/walrus +pkg_walrus_fetch = git +pkg_walrus_repo = https://github.com/devinus/walrus +pkg_walrus_commit = master + +PACKAGES += webmachine +pkg_webmachine_name = webmachine +pkg_webmachine_description = A REST-based system for building web applications. +pkg_webmachine_homepage = https://github.com/basho/webmachine +pkg_webmachine_fetch = git +pkg_webmachine_repo = https://github.com/basho/webmachine +pkg_webmachine_commit = master + +PACKAGES += websocket_client +pkg_websocket_client_name = websocket_client +pkg_websocket_client_description = Erlang websocket client (ws and wss supported) +pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client +pkg_websocket_client_fetch = git +pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client +pkg_websocket_client_commit = master + +PACKAGES += worker_pool +pkg_worker_pool_name = worker_pool +pkg_worker_pool_description = a simple erlang worker pool +pkg_worker_pool_homepage = https://github.com/inaka/worker_pool +pkg_worker_pool_fetch = git +pkg_worker_pool_repo = https://github.com/inaka/worker_pool +pkg_worker_pool_commit = 1.0.3 + +PACKAGES += wrangler +pkg_wrangler_name = wrangler +pkg_wrangler_description = Import of the Wrangler svn repository. +pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html +pkg_wrangler_fetch = git +pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler +pkg_wrangler_commit = master + +PACKAGES += wsock +pkg_wsock_name = wsock +pkg_wsock_description = Erlang library to build WebSocket clients and servers +pkg_wsock_homepage = https://github.com/madtrick/wsock +pkg_wsock_fetch = git +pkg_wsock_repo = https://github.com/madtrick/wsock +pkg_wsock_commit = master + +PACKAGES += xhttpc +pkg_xhttpc_name = xhttpc +pkg_xhttpc_description = Extensible HTTP Client for Erlang +pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc +pkg_xhttpc_fetch = git +pkg_xhttpc_repo = https://github.com/seriyps/xhttpc +pkg_xhttpc_commit = master + +PACKAGES += xref_runner +pkg_xref_runner_name = xref_runner +pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref) +pkg_xref_runner_homepage = https://github.com/inaka/xref_runner +pkg_xref_runner_fetch = git +pkg_xref_runner_repo = https://github.com/inaka/xref_runner +pkg_xref_runner_commit = 0.2.0 + +PACKAGES += yamerl +pkg_yamerl_name = yamerl +pkg_yamerl_description = YAML 1.2 parser in pure Erlang +pkg_yamerl_homepage = https://github.com/yakaz/yamerl +pkg_yamerl_fetch = git +pkg_yamerl_repo = https://github.com/yakaz/yamerl +pkg_yamerl_commit = master + +PACKAGES += yamler +pkg_yamler_name = yamler +pkg_yamler_description = libyaml-based yaml loader for Erlang +pkg_yamler_homepage = https://github.com/goertzenator/yamler +pkg_yamler_fetch = git +pkg_yamler_repo = https://github.com/goertzenator/yamler +pkg_yamler_commit = master + +PACKAGES += yaws +pkg_yaws_name = yaws +pkg_yaws_description = Yaws webserver +pkg_yaws_homepage = http://yaws.hyber.org +pkg_yaws_fetch = git +pkg_yaws_repo = https://github.com/klacke/yaws +pkg_yaws_commit = master + +PACKAGES += zab_engine +pkg_zab_engine_name = zab_engine +pkg_zab_engine_description = zab propotocol implement by erlang +pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_fetch = git +pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_commit = master + +PACKAGES += zeta +pkg_zeta_name = zeta +pkg_zeta_description = HTTP access log parser in Erlang +pkg_zeta_homepage = https://github.com/s1n4/zeta +pkg_zeta_fetch = git +pkg_zeta_repo = https://github.com/s1n4/zeta +pkg_zeta_commit = + +PACKAGES += zippers +pkg_zippers_name = zippers +pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers +pkg_zippers_homepage = https://github.com/ferd/zippers +pkg_zippers_fetch = git +pkg_zippers_repo = https://github.com/ferd/zippers +pkg_zippers_commit = master + +PACKAGES += zlists +pkg_zlists_name = zlists +pkg_zlists_description = Erlang lazy lists library. +pkg_zlists_homepage = https://github.com/vjache/erlang-zlists +pkg_zlists_fetch = git +pkg_zlists_repo = https://github.com/vjache/erlang-zlists +pkg_zlists_commit = master + +PACKAGES += zraft_lib +pkg_zraft_lib_name = zraft_lib +pkg_zraft_lib_description = Erlang raft consensus protocol implementation +pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_fetch = git +pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_commit = master + +PACKAGES += zucchini +pkg_zucchini_name = zucchini +pkg_zucchini_description = An Erlang INI parser +pkg_zucchini_homepage = https://github.com/devinus/zucchini +pkg_zucchini_fetch = git +pkg_zucchini_repo = https://github.com/devinus/zucchini +pkg_zucchini_commit = master + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: search + +define pkg_print + $(verbose) printf "%s\n" \ + $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \ + "App name: $(pkg_$(1)_name)" \ + "Description: $(pkg_$(1)_description)" \ + "Home page: $(pkg_$(1)_homepage)" \ + "Fetch with: $(pkg_$(1)_fetch)" \ + "Repository: $(pkg_$(1)_repo)" \ + "Commit: $(pkg_$(1)_commit)" \ + "" + +endef + +search: +ifdef q + $(foreach p,$(PACKAGES), \ + $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \ + $(call pkg_print,$(p)))) +else + $(foreach p,$(PACKAGES),$(call pkg_print,$(p))) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-deps + +# Configuration. + +ifdef OTP_DEPS +$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.) +endif + +IGNORE_DEPS ?= +export IGNORE_DEPS + +APPS_DIR ?= $(CURDIR)/apps +export APPS_DIR + +DEPS_DIR ?= $(CURDIR)/deps +export DEPS_DIR + +REBAR_DEPS_DIR = $(DEPS_DIR) +export REBAR_DEPS_DIR + +dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) +dep_repo = $(patsubst git://github.com/%,https://github.com/%, \ + $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))) +dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) + +ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) +ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) + +ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),) +ifeq ($(ERL_LIBS),) + ERL_LIBS = $(APPS_DIR):$(DEPS_DIR) +else + ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR) +endif +endif +export ERL_LIBS + +export NO_AUTOPATCH + +# Verbosity. + +dep_verbose_0 = @echo " DEP " $(1); +dep_verbose_2 = set -x; +dep_verbose = $(dep_verbose_$(V)) + +# Core targets. + +ifneq ($(SKIP_DEPS),) +deps:: +else +deps:: $(ALL_DEPS_DIRS) +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \ + if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \ + :; \ + else \ + echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ + if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ + $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \ + else \ + echo "Error: No Makefile to build dependency $$dep."; \ + exit 2; \ + fi \ + fi \ + done +endif + +# Deps related targets. + +# @todo rename GNUmakefile and makefile into Makefile first, if they exist +# While Makefile file could be GNUmakefile or makefile, +# in practice only Makefile is needed so far. +define dep_autopatch + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \ + $(call dep_autopatch2,$(1)); \ + else \ + if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + $(call dep_autopatch_erlang_mk,$(1)); \ + else \ + $(call erlang,$(call dep_autopatch_app.erl,$(1))); \ + fi \ + fi \ + else \ + if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ + $(call dep_autopatch_noop,$(1)); \ + else \ + $(call dep_autopatch2,$(1)); \ + fi \ + fi +endef + +define dep_autopatch2 + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + if [ -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \ + $(call dep_autopatch_fetch_rebar); \ + $(call dep_autopatch_rebar,$(1)); \ + else \ + $(call dep_autopatch_gen,$(1)); \ + fi +endef + +define dep_autopatch_noop + printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile +endef + +# Overwrite erlang.mk with the current file by default. +ifeq ($(NO_AUTOPATCH_ERLANG_MK),) +define dep_autopatch_erlang_mk + echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \ + > $(DEPS_DIR)/$1/erlang.mk +endef +else +define dep_autopatch_erlang_mk + : +endef +endif + +define dep_autopatch_gen + printf "%s\n" \ + "ERLC_OPTS = +debug_info" \ + "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile +endef + +define dep_autopatch_fetch_rebar + mkdir -p $(ERLANG_MK_TMP); \ + if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ + git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \ + cd $(ERLANG_MK_TMP)/rebar; \ + git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \ + $(MAKE); \ + cd -; \ + fi +endef + +define dep_autopatch_rebar + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \ + fi; \ + $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \ + rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app +endef + +define dep_autopatch_rebar.erl + application:load(rebar), + application:set_env(rebar, log_level, debug), + Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of + {ok, Conf0} -> Conf0; + _ -> [] + end, + {Conf, OsEnv} = fun() -> + case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of + false -> {Conf1, []}; + true -> + Bindings0 = erl_eval:new_bindings(), + Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0), + Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1), + Before = os:getenv(), + {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings), + {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)} + end + end(), + Write = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append]) + end, + Escape = fun (Text) -> + re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}]) + end, + Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package " + "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"), + Write("C_SRC_DIR = /path/do/not/exist\n"), + Write("C_SRC_TYPE = rebar\n"), + Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"), + Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]), + fun() -> + Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), + case lists:keyfind(erl_opts, 1, Conf) of + false -> ok; + {_, ErlOpts} -> + lists:foreach(fun + ({d, D}) -> + Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + ({i, I}) -> + Write(["ERLC_OPTS += -I ", I, "\n"]); + ({platform_define, Regex, D}) -> + case rebar_utils:is_arch(Regex) of + true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + false -> ok + end; + ({parse_transform, PT}) -> + Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n"); + (_) -> ok + end, ErlOpts) + end, + Write("\n") + end(), + fun() -> + File = case lists:keyfind(deps, 1, Conf) of + false -> []; + {_, Deps} -> + [begin case case Dep of + {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}}; + {N, S} when is_tuple(S) -> {N, S}; + {N, _, S} -> {N, S}; + {N, _, S, _} -> {N, S}; + _ -> false + end of + false -> ok; + {Name, Source} -> + {Method, Repo, Commit} = case Source of + {hex, V} -> {hex, V, undefined}; + {git, R} -> {git, R, master}; + {M, R, {branch, C}} -> {M, R, C}; + {M, R, {ref, C}} -> {M, R, C}; + {M, R, {tag, C}} -> {M, R, C}; + {M, R, C} -> {M, R, C} + end, + Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit])) + end end || Dep <- Deps] + end + end(), + fun() -> + case lists:keyfind(erl_first_files, 1, Conf) of + false -> ok; + {_, Files} -> + Names = [[" ", case lists:reverse(F) of + "lre." ++ Elif -> lists:reverse(Elif); + Elif -> lists:reverse(Elif) + end] || "src/" ++ F <- Files], + Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) + end + end(), + FindFirst = fun(F, Fd) -> + case io:parse_erl_form(Fd, undefined) of + {ok, {attribute, _, compile, {parse_transform, PT}}, _} -> + [PT, F(F, Fd)]; + {ok, {attribute, _, compile, CompileOpts}, _} when is_list(CompileOpts) -> + case proplists:get_value(parse_transform, CompileOpts) of + undefined -> [F(F, Fd)]; + PT -> [PT, F(F, Fd)] + end; + {ok, {attribute, _, include, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end + end; + {ok, {attribute, _, include_lib, "$(1)/include/" ++ Hrl}, _} -> + {ok, HrlFd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]), + [F(F, HrlFd), F(F, Fd)]; + {ok, {attribute, _, include_lib, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {ok, {attribute, _, import, {Imp, _}}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(Imp) ++ ".erl", [read]) of + {ok, ImpFd} -> [Imp, F(F, ImpFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {eof, _} -> + file:close(Fd), + []; + _ -> + F(F, Fd) + end + end, + fun() -> + ErlFiles = filelib:wildcard("$(call core_native_path,$(DEPS_DIR)/$1/src/)*.erl"), + First0 = lists:usort(lists:flatten([begin + {ok, Fd} = file:open(F, [read]), + FindFirst(FindFirst, Fd) + end || F <- ErlFiles])), + First = lists:flatten([begin + {ok, Fd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", [read]), + FindFirst(FindFirst, Fd) + end || M <- First0, lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)]) ++ First0, + Write(["COMPILE_FIRST +=", [[" ", atom_to_list(M)] || M <- First, + lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)], "\n"]) + end(), + Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"), + Write("\npreprocess::\n"), + Write("\npre-deps::\n"), + Write("\npre-app::\n"), + PatchHook = fun(Cmd) -> + case Cmd of + "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + _ -> Escape(Cmd) + end + end, + fun() -> + case lists:keyfind(pre_hooks, 1, Conf) of + false -> ok; + {_, Hooks} -> + [case H of + {'get-deps', Cmd} -> + Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n"); + {compile, Cmd} -> + Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + {Regex, compile, Cmd} -> + case rebar_utils:is_arch(Regex) of + true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + false -> ok + end; + _ -> ok + end || H <- Hooks] + end + end(), + ShellToMk = fun(V) -> + re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]), + "-Werror\\\\b", "", [{return, list}, global]) + end, + PortSpecs = fun() -> + case lists:keyfind(port_specs, 1, Conf) of + false -> + case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of + false -> []; + true -> + [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"), + proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}] + end; + {_, Specs} -> + lists:flatten([case S of + {Output, Input} -> {ShellToMk(Output), Input, []}; + {Regex, Output, Input} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, []}; + false -> [] + end; + {Regex, Output, Input, [{env, Env}]} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, Env}; + false -> [] + end + end || S <- Specs]) + end + end(), + PortSpecWrite = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append]) + end, + case PortSpecs of + [] -> ok; + _ -> + Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"), + PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I ~s/erts-~s/include -I ~s\n", + [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])), + PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L ~s -lerl_interface -lei\n", + [code:lib_dir(erl_interface, lib)])), + [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv], + FilterEnv = fun(Env) -> + lists:flatten([case E of + {_, _} -> E; + {Regex, K, V} -> + case rebar_utils:is_arch(Regex) of + true -> {K, V}; + false -> [] + end + end || E <- Env]) + end, + MergeEnv = fun(Env) -> + lists:foldl(fun ({K, V}, Acc) -> + case lists:keyfind(K, 1, Acc) of + false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc]; + {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc] + end + end, [], Env) + end, + PortEnv = case lists:keyfind(port_env, 1, Conf) of + false -> []; + {_, PortEnv0} -> FilterEnv(PortEnv0) + end, + PortSpec = fun ({Output, Input0, Env}) -> + filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output), + Input = [[" ", I] || I <- Input0], + PortSpecWrite([ + [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))], + case $(PLATFORM) of + darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress"; + _ -> "" + end, + "\n\nall:: ", Output, "\n\n", + "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))], + Output, ": $$\(foreach ext,.c .C .cc .cpp,", + "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n", + "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)", + case filename:extension(Output) of + [] -> "\n"; + _ -> " -shared\n" + end]) + end, + [PortSpec(S) || S <- PortSpecs] + end, + Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"), + RunPlugin = fun(Plugin, Step) -> + case erlang:function_exported(Plugin, Step, 2) of + false -> ok; + true -> + c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"), + Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(), + dict:store(base_dir, "", dict:new())}, undefined), + io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret]) + end + end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins} -> + [begin + case lists:keyfind(deps, 1, Conf) of + false -> ok; + {_, Deps} -> + case lists:keyfind(P, 1, Deps) of + false -> ok; + _ -> + Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P), + io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]), + io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]), + code:add_patha(Path ++ "/ebin") + end + end + end || P <- Plugins], + [case code:load_file(P) of + {module, P} -> ok; + _ -> + case lists:keyfind(plugin_dir, 1, Conf) of + false -> ok; + {_, PluginsDir} -> + ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl", + {ok, P, Bin} = compile:file(ErlFile, [binary]), + {module, P} = code:load_binary(P, ErlFile, Bin) + end + end || P <- Plugins], + [RunPlugin(P, preprocess) || P <- Plugins], + [RunPlugin(P, pre_compile) || P <- Plugins], + [RunPlugin(P, compile) || P <- Plugins] + end + end(), + halt() +endef + +define dep_autopatch_app.erl + UpdateModules = fun(App) -> + case filelib:is_regular(App) of + false -> ok; + true -> + {ok, [{application, '$(1)', L0}]} = file:consult(App), + Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true, + fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []), + L = lists:keystore(modules, 1, L0, {modules, Mods}), + ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}])) + end + end, + UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"), + halt() +endef + +define dep_autopatch_appsrc.erl + AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", + AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end, + case filelib:is_regular(AppSrcIn) of + false -> ok; + true -> + {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), + L1 = lists:keystore(modules, 1, L0, {modules, []}), + L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end, + L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end, + ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])), + case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end + end, + halt() +endef + +define dep_fetch_git + git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1)); +endef + +define dep_fetch_git-submodule + git submodule update --init -- $(DEPS_DIR)/$1; +endef + +define dep_fetch_hg + hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1)); +endef + +define dep_fetch_svn + svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_cp + cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_hex.erl + ssl:start(), + inets:start(), + {ok, {{_, 200, _}, _, Body}} = httpc:request(get, + {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []}, + [], [{body_format, binary}]), + {ok, Files} = erl_tar:extract({binary, Body}, [memory]), + {_, Source} = lists:keyfind("contents.tar.gz", 1, Files), + ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]), + halt() +endef + +# Hex only has a package version. No need to look in the Erlang.mk packages. +define dep_fetch_hex + $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1)))))); +endef + +define dep_fetch_fail + echo "Error: Unknown or invalid dependency: $(1)." >&2; \ + exit 78; +endef + +# Kept for compatibility purposes with older Erlang.mk configuration. +define dep_fetch_legacy + $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \ + git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \ + cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master); +endef + +define dep_fetch + $(if $(dep_$(1)), \ + $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \ + $(word 1,$(dep_$(1))), \ + $(if $(IS_DEP),legacy,fail)), \ + $(if $(filter $(1),$(PACKAGES)), \ + $(pkg_$(1)_fetch), \ + fail)) +endef + +define dep_target +$(DEPS_DIR)/$(call dep_name,$1): + $(eval DEP_NAME := $(call dep_name,$1)) + $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) + $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ + echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \ + exit 17; \ + fi + $(verbose) mkdir -p $(DEPS_DIR) + $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$1)),$1) + $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure.ac -o -f $(DEPS_DIR)/$(DEP_NAME)/configure.in ]; then \ + echo " AUTO " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && autoreconf -Wall -vif -I m4; \ + fi + - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \ + echo " CONF " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \ + fi +ifeq ($(filter $(1),$(NO_AUTOPATCH)),) + $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi; \ + if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \ + echo " PATCH Downloading rabbitmq-server"; \ + git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \ + fi; \ + ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \ + elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi \ + else \ + $$(call dep_autopatch,$(DEP_NAME)) \ + fi +endif +endef + +$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep)))) + +ifndef IS_APP +clean:: clean-apps + +clean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \ + done + +distclean:: distclean-apps + +distclean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \ + done +endif + +ifndef SKIP_DEPS +distclean:: distclean-deps + +distclean-deps: + $(gen_verbose) rm -rf $(DEPS_DIR) +endif + +# Forward-declare variables used in core/deps-tools.mk. This is required +# in case plugins use them. + +ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/list-deps.log +ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/list-doc-deps.log +ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/list-rel-deps.log +ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/list-test-deps.log +ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/list-shell-deps.log + +# External plugins. + +DEP_PLUGINS ?= + +define core_dep_plugin +-include $(DEPS_DIR)/$(1) + +$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; +endef + +$(foreach p,$(DEP_PLUGINS),\ + $(eval $(if $(findstring /,$p),\ + $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ + $(call core_dep_plugin,$p/plugins.mk,$p)))) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Configuration. + +DTL_FULL_PATH ?= +DTL_PATH ?= templates/ +DTL_SUFFIX ?= _dtl + +# Verbosity. + +dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); +dtl_verbose = $(dtl_verbose_$(V)) + +# Core targets. + +define erlydtl_compile.erl + [begin + Module0 = case "$(strip $(DTL_FULL_PATH))" of + "" -> + filename:basename(F, ".dtl"); + _ -> + "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"), + re:replace(F2, "/", "_", [{return, list}, global]) + end, + Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), + case erlydtl:compile(F, Module, [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of + ok -> ok; + {ok, _} -> ok + end + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) + +DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl)) + +ifdef DTL_FULL_PATH +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%)))) +else +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES)))) +endif + +ifneq ($(words $(DTL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(DTL_FILES); \ + fi + @touch $@ + +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl +endif + +ebin/$(PROJECT).app:: $(DTL_FILES) + $(if $(strip $?),\ + $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?,-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Verbosity. + +proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); +proto_verbose = $(proto_verbose_$(V)) + +# Core targets. + +define compile_proto + $(verbose) mkdir -p ebin/ include/ + $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1))) + $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl + $(verbose) rm ebin/*.erl +endef + +define compile_proto.erl + [begin + Dir = filename:dirname(filename:dirname(F)), + protobuffs_compile:generate_source(F, + [{output_include_dir, Dir ++ "/include"}, + {output_src_dir, Dir ++ "/ebin"}]) + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) +ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto)) + $(if $(strip $?),$(call compile_proto,$?)) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-app + +# Configuration. + +ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \ + +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec +COMPILE_FIRST ?= +COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST))) +ERLC_EXCLUDE ?= +ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE))) + +ERLC_MIB_OPTS ?= +COMPILE_MIB_FIRST ?= +COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST))) + +# Verbosity. + +app_verbose_0 = @echo " APP " $(PROJECT); +app_verbose_2 = set -x; +app_verbose = $(app_verbose_$(V)) + +appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src; +appsrc_verbose_2 = set -x; +appsrc_verbose = $(appsrc_verbose_$(V)) + +makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d; +makedep_verbose_2 = set -x; +makedep_verbose = $(makedep_verbose_$(V)) + +erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\ + $(filter %.erl %.core,$(?F))); +erlc_verbose_2 = set -x; +erlc_verbose = $(erlc_verbose_$(V)) + +xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F)); +xyrl_verbose_2 = set -x; +xyrl_verbose = $(xyrl_verbose_$(V)) + +asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F)); +asn1_verbose_2 = set -x; +asn1_verbose = $(asn1_verbose_$(V)) + +mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F)); +mib_verbose_2 = set -x; +mib_verbose = $(mib_verbose_$(V)) + +ifneq ($(wildcard src/),) + +# Targets. + +ifeq ($(wildcard ebin/test),) +app:: deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +else +app:: clean deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +endif + +ifeq ($(wildcard src/$(PROJECT)_app.erl),) +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, []}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]} +]}. +endef +else +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}, + {mod, {$(PROJECT)_app, []}} +]}. +endef +endif + +app-build: ebin/$(PROJECT).app + $(verbose) : + +# Source files. + +ERL_FILES = $(sort $(call core_find,src/,*.erl)) +CORE_FILES = $(sort $(call core_find,src/,*.core)) + +# ASN.1 files. + +ifneq ($(wildcard asn1/),) +ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1)) +ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +define compile_asn1 + $(verbose) mkdir -p include/ + $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1) + $(verbose) mv asn1/*.erl src/ + $(verbose) mv asn1/*.hrl include/ + $(verbose) mv asn1/*.asn1db include/ +endef + +$(PROJECT).d:: $(ASN1_FILES) + $(if $(strip $?),$(call compile_asn1,$?)) +endif + +# SNMP MIB files. + +ifneq ($(wildcard mibs/),) +MIB_FILES = $(sort $(call core_find,mibs/,*.mib)) + +$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES) + $(verbose) mkdir -p include/ priv/mibs/ + $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $? + $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?))) +endif + +# Leex and Yecc files. + +XRL_FILES = $(sort $(call core_find,src/,*.xrl)) +XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES)))) +ERL_FILES += $(XRL_ERL_FILES) + +YRL_FILES = $(sort $(call core_find,src/,*.yrl)) +YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES)))) +ERL_FILES += $(YRL_ERL_FILES) + +$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES) + $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?) + +# Erlang and Core Erlang files. + +define makedep.erl + ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")), + Modules = [{filename:basename(F, ".erl"), F} || F <- ErlFiles], + Add = fun (Dep, Acc) -> + case lists:keyfind(atom_to_list(Dep), 1, Modules) of + {_, DepFile} -> [DepFile|Acc]; + false -> Acc + end + end, + AddHd = fun (Dep, Acc) -> + case {Dep, lists:keymember(Dep, 2, Modules)} of + {"src/" ++ _, false} -> [Dep|Acc]; + {"include/" ++ _, false} -> [Dep|Acc]; + _ -> Acc + end + end, + CompileFirst = fun (Deps) -> + First0 = [case filename:extension(D) of + ".erl" -> filename:basename(D, ".erl"); + _ -> [] + end || D <- Deps], + case lists:usort(First0) of + [] -> []; + [[]] -> []; + First -> ["COMPILE_FIRST +=", [[" ", F] || F <- First], "\n"] + end + end, + Depend = [begin + case epp:parse_file(F, ["include/"], []) of + {ok, Forms} -> + Deps = lists:usort(lists:foldl(fun + ({attribute, _, behavior, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, behaviour, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, compile, {parse_transform, Dep}}, Acc) -> Add(Dep, Acc); + ({attribute, _, file, {Dep, _}}, Acc) -> AddHd(Dep, Acc); + (_, Acc) -> Acc + end, [], Forms)), + case Deps of + [] -> ""; + _ -> [F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n", CompileFirst(Deps)] + end; + {error, enoent} -> + [] + end + end || F <- ErlFiles], + ok = file:write_file("$(1)", Depend), + halt() +endef + +ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),) +$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST) + $(makedep_verbose) $(call erlang,$(call makedep.erl,$@)) +endif + +ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \ + touch -c $(PROJECT).d; \ + fi + @touch $@ + +$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change +endif + +-include $(PROJECT).d + +ebin/$(PROJECT).app:: ebin/ + +ebin/: + $(verbose) mkdir -p ebin/ + +define compile_erl + $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \ + -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1)) +endef + +ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src) + $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?)) + $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE))) + $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true)) + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES))))))) +ifeq ($(wildcard src/$(PROJECT).app.src),) + $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \ + > ebin/$(PROJECT).app +else + $(verbose) if [ -z "$$(grep -E '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \ + echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \ + exit 1; \ + fi + $(appsrc_verbose) cat src/$(PROJECT).app.src \ + | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \ + | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(GITDESCRIBE)\"}/" \ + > ebin/$(PROJECT).app +endif + +clean:: clean-app + +clean-app: + $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \ + $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \ + $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +endif + +# Copyright (c) 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: docs-deps + +# Configuration. + +ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS)) + +# Targets. + +$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +doc-deps: +else +doc-deps: $(ALL_DOC_DEPS_DIRS) + $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rel-deps + +# Configuration. + +ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS)) + +# Targets. + +$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +rel-deps: +else +rel-deps: $(ALL_REL_DEPS_DIRS) + $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: test-deps test-dir test-build clean-test-dir + +# Configuration. + +TEST_DIR ?= $(CURDIR)/test + +ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS)) + +TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard +TEST_ERLC_OPTS += -DTEST=1 + +# Targets. + +$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +test-deps: +else +test-deps: $(ALL_TEST_DEPS_DIRS) + $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done +endif + +ifneq ($(wildcard $(TEST_DIR)),) +test-dir: + $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \ + $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/ +endif + +ifeq ($(wildcard ebin/test),) +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: clean deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" + $(gen_verbose) touch ebin/test +else +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" +endif + +clean:: clean-test-dir + +clean-test-dir: +ifneq ($(wildcard $(TEST_DIR)/*.beam),) + $(gen_verbose) rm -f $(TEST_DIR)/*.beam +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rebar.config + +# We strip out -Werror because we don't want to fail due to +# warnings when used as a dependency. + +compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/') + +define compat_convert_erlc_opts +$(if $(filter-out -Werror,$1),\ + $(if $(findstring +,$1),\ + $(shell echo $1 | cut -b 2-))) +endef + +define compat_rebar_config +{deps, [$(call comma_list,$(foreach d,$(DEPS),\ + {$(call dep_name,$d),".*",{git,"$(call dep_repo,$d)","$(call dep_commit,$d)"}}))]}. +{erl_opts, [$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$(ERLC_OPTS)),\ + $(call compat_convert_erlc_opts,$o)))]}. +endef + +$(eval _compat_rebar_config = $$(compat_rebar_config)) +$(eval export _compat_rebar_config) + +rebar.config: + $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc + +MAN_INSTALL_PATH ?= /usr/local/share/man +MAN_SECTIONS ?= 3 7 + +docs:: asciidoc + +asciidoc: distclean-asciidoc doc-deps asciidoc-guide asciidoc-manual + +ifeq ($(wildcard doc/src/guide/book.asciidoc),) +asciidoc-guide: +else +asciidoc-guide: + a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf + a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ +endif + +ifeq ($(wildcard doc/src/manual/*.asciidoc),) +asciidoc-manual: +else +asciidoc-manual: + for f in doc/src/manual/*.asciidoc ; do \ + a2x -v -f manpage $$f ; \ + done + for s in $(MAN_SECTIONS); do \ + mkdir -p doc/man$$s/ ; \ + mv doc/src/manual/*.$$s doc/man$$s/ ; \ + gzip doc/man$$s/*.$$s ; \ + done + +install-docs:: install-asciidoc + +install-asciidoc: asciidoc-manual + for s in $(MAN_SECTIONS); do \ + mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \ + install -g 0 -o 0 -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \ + done +endif + +distclean:: distclean-asciidoc + +distclean-asciidoc: + $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/ + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Bootstrap targets:" \ + " bootstrap Generate a skeleton of an OTP application" \ + " bootstrap-lib Generate a skeleton of an OTP library" \ + " bootstrap-rel Generate the files needed to build a release" \ + " new-app n=NAME Create a new local OTP application NAME" \ + " new-lib n=NAME Create a new local OTP library NAME" \ + " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ + " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \ + " list-templates List available templates" + +# Bootstrap templates. + +define bs_appsrc +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]}, + {mod, {$p_app, []}}, + {env, []} +]}. +endef + +define bs_appsrc_lib +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. +endef + +ifdef SP +define bs_Makefile +PROJECT = $p +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.0.1 + +# Whitespace to be used when creating files from templates. +SP = $(SP) + +include erlang.mk +endef +else +define bs_Makefile +PROJECT = $p +include erlang.mk +endef +endif + +define bs_apps_Makefile +PROJECT = $p +include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk +endef + +define bs_app +-module($p_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). + +start(_Type, _Args) -> + $p_sup:start_link(). + +stop(_State) -> + ok. +endef + +define bs_relx_config +{release, {$p_release, "1"}, [$p]}. +{extended_start_script, true}. +{sys_config, "rel/sys.config"}. +{vm_args, "rel/vm.args"}. +endef + +define bs_sys_config +[ +]. +endef + +define bs_vm_args +-name $p@127.0.0.1 +-setcookie $p +-heart +endef + +# Normal templates. + +define tpl_supervisor +-module($(n)). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +init([]) -> + Procs = [], + {ok, {{one_for_one, 1, 5}, Procs}}. +endef + +define tpl_gen_server +-module($(n)). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +%% gen_server. + +init([]) -> + {ok, #state{}}. + +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +endef + +define tpl_cowboy_http +-module($(n)). +-behaviour(cowboy_http_handler). + +-export([init/3]). +-export([handle/2]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {ok, Req, #state{}}. + +handle(Req, State=#state{}) -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_gen_fsm +-module($(n)). +-behaviour(gen_fsm). + +%% API. +-export([start_link/0]). + +%% gen_fsm. +-export([init/1]). +-export([state_name/2]). +-export([handle_event/3]). +-export([state_name/3]). +-export([handle_sync_event/4]). +-export([handle_info/3]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_fsm:start_link(?MODULE, [], []). + +%% gen_fsm. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_Event, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_Event, StateName, StateData) -> + {next_state, StateName, StateData}. + +state_name(_Event, _From, StateData) -> + {reply, ignored, state_name, StateData}. + +handle_sync_event(_Event, _From, StateName, StateData) -> + {reply, ignored, StateName, StateData}. + +handle_info(_Info, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + +define tpl_cowboy_loop +-module($(n)). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {loop, Req, #state{}, 5000, hibernate}. + +info(_Info, Req, State) -> + {loop, Req, State, hibernate}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_cowboy_rest +-module($(n)). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_html/2]). + +init(_, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. + +get_html(Req, State) -> + {<<"This is REST!">>, Req, State}. +endef + +define tpl_cowboy_ws +-module($(n)). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +-record(state, { +}). + +init(_, _, _) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, #state{}}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_ranch_protocol +-module($(n)). +-behaviour(ranch_protocol). + +-export([start_link/4]). +-export([init/4]). + +-type opts() :: []. +-export_type([opts/0]). + +-record(state, { + socket :: inet:socket(), + transport :: module() +}). + +start_link(Ref, Socket, Transport, Opts) -> + Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]), + {ok, Pid}. + +-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Ref, Socket, Transport, _Opts) -> + ok = ranch:accept_ack(Ref), + loop(#state{socket=Socket, transport=Transport}). + +loop(State) -> + loop(State). +endef + +# Plugin-specific targets. + +define render_template + $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) +endef + +ifndef WS +ifdef SP +WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a)) +else +WS = $(tab) +endif +endif + +bootstrap: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(eval n := $(PROJECT)_sup) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc,src/$(PROJECT).app.src) +endif + $(call render_template,bs_app,src/$(PROJECT)_app.erl) + $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl) + +bootstrap-lib: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src) +endif + +bootstrap-rel: +ifneq ($(wildcard relx.config),) + $(error Error: relx.config already exists) +endif +ifneq ($(wildcard rel/),) + $(error Error: rel/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_relx_config,relx.config) + $(verbose) mkdir rel/ + $(call render_template,bs_sys_config,rel/sys.config) + $(call render_template,bs_vm_args,rel/vm.args) + +new-app: +ifndef in + $(error Usage: $(MAKE) new-app in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(eval n := $(in)_sup) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) +endif + $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) + $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) + +new-lib: +ifndef in + $(error Usage: $(MAKE) new-lib in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) +endif + +new: +ifeq ($(wildcard src/)$(in),) + $(error Error: src/ directory does not exist) +endif +ifndef t + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifndef tpl_$(t) + $(error Unknown template) +endif +ifndef n + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in= +else + $(call render_template,tpl_$(t),src/$(n).erl) +endif + +list-templates: + $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-c_src distclean-c_src-env + +# Configuration. + +C_SRC_DIR ?= $(CURDIR)/c_src +C_SRC_ENV ?= $(C_SRC_DIR)/env.mk +C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT).so +C_SRC_TYPE ?= shared + +# System type and C compiler/flags. + +ifeq ($(PLATFORM),darwin) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall + LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress +else ifeq ($(PLATFORM),freebsd) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +else ifeq ($(PLATFORM),linux) + CC ?= gcc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +endif + +CFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) +CXXFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) + +LDLIBS += -L $(ERL_INTERFACE_LIB_DIR) -lerl_interface -lei + +# Verbosity. + +c_verbose_0 = @echo " C " $(?F); +c_verbose = $(c_verbose_$(V)) + +cpp_verbose_0 = @echo " CPP " $(?F); +cpp_verbose = $(cpp_verbose_$(V)) + +link_verbose_0 = @echo " LD " $(@F); +link_verbose = $(link_verbose_$(V)) + +# Targets. + +ifeq ($(wildcard $(C_SRC_DIR)),) +else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),) +app:: app-c_src + +test-build:: app-c_src + +app-c_src: + $(MAKE) -C $(C_SRC_DIR) + +clean:: + $(MAKE) -C $(C_SRC_DIR) clean + +else + +ifeq ($(SOURCES),) +SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat)))) +endif +OBJECTS = $(addsuffix .o, $(basename $(SOURCES))) + +COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c +COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c + +app:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +$(C_SRC_OUTPUT): $(OBJECTS) + $(verbose) mkdir -p priv/ + $(link_verbose) $(CC) $(OBJECTS) \ + $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \ + -o $(C_SRC_OUTPUT) + +%.o: %.c + $(COMPILE_C) $(OUTPUT_OPTION) $< + +%.o: %.cc + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.C + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.cpp + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +clean:: clean-c_src + +clean-c_src: + $(gen_verbose) rm -f $(C_SRC_OUTPUT) $(OBJECTS) + +endif + +ifneq ($(wildcard $(C_SRC_DIR)),) +$(C_SRC_ENV): + $(verbose) $(ERL) -eval "file:write_file(\"$(C_SRC_ENV)\", \ + io_lib:format( \ + \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \ + \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \ + \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \ + [code:root_dir(), erlang:system_info(version), \ + code:lib_dir(erl_interface, include), \ + code:lib_dir(erl_interface, lib)])), \ + halt()." + +distclean:: distclean-c_src-env + +distclean-c_src-env: + $(gen_verbose) rm -f $(C_SRC_ENV) + +-include $(C_SRC_ENV) +endif + +# Templates. + +define bs_c_nif +#include "erl_nif.h" + +static int loads = 0; + +static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info) +{ + /* Initialize private data. */ + *priv_data = NULL; + + loads++; + + return 0; +} + +static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info) +{ + /* Convert the private data to the new version. */ + *priv_data = *old_priv_data; + + loads++; + + return 0; +} + +static void unload(ErlNifEnv* env, void* priv_data) +{ + if (loads == 1) { + /* Destroy the private data. */ + } + + loads--; +} + +static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{ + if (enif_is_atom(env, argv[0])) { + return enif_make_tuple2(env, + enif_make_atom(env, "hello"), + argv[0]); + } + + return enif_make_tuple2(env, + enif_make_atom(env, "error"), + enif_make_atom(env, "badarg")); +} + +static ErlNifFunc nif_funcs[] = { + {"hello", 1, hello} +}; + +ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload) +endef + +define bs_erl_nif +-module($n). + +-export([hello/1]). + +-on_load(on_load/0). +on_load() -> + PrivDir = case code:priv_dir(?MODULE) of + {error, _} -> + AppPath = filename:dirname(filename:dirname(code:which(?MODULE))), + filename:join(AppPath, "priv"); + Path -> + Path + end, + erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0). + +hello(_) -> + erlang:nif_error({not_loaded, ?MODULE}). +endef + +new-nif: +ifneq ($(wildcard $(C_SRC_DIR)/$n.c),) + $(error Error: $(C_SRC_DIR)/$n.c already exists) +endif +ifneq ($(wildcard src/$n.erl),) + $(error Error: src/$n.erl already exists) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in= +else + $(verbose) mkdir -p $(C_SRC_DIR) src/ + $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c) + $(call render_template,bs_erl_nif,src/$n.erl) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ci ci-setup distclean-kerl + +KERL ?= $(CURDIR)/kerl +export KERL + +KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl + +OTP_GIT ?= https://github.com/erlang/otp + +CI_INSTALL_DIR ?= $(HOME)/erlang +CI_OTP ?= + +ifeq ($(strip $(CI_OTP)),) +ci:: +else +ci:: $(addprefix ci-,$(CI_OTP)) + +ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP)) + +ci-setup:: + +ci_verbose_0 = @echo " CI " $(1); +ci_verbose = $(ci_verbose_$(V)) + +define ci_target +ci-$(1): $(CI_INSTALL_DIR)/$(1) + $(ci_verbose) \ + PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \ + CI_OTP_RELEASE="$(1)" \ + CT_OPTS="-label $(1)" \ + $(MAKE) clean ci-setup tests +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp)))) + +define ci_otp_target +ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),) +$(CI_INSTALL_DIR)/$(1): $(KERL) + $(KERL) build git $(OTP_GIT) $(1) $(1) + $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1) +endif +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp)))) + +$(KERL): + $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL)) + $(verbose) chmod +x $(KERL) + +help:: + $(verbose) printf "%s\n" "" \ + "Continuous Integration targets:" \ + " ci Run '$(MAKE) tests' on all configured Erlang versions." \ + "" \ + "The CI_OTP variable must be defined with the Erlang versions" \ + "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3" + +distclean:: distclean-kerl + +distclean-kerl: + $(gen_verbose) rm -rf $(KERL) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ct distclean-ct + +# Configuration. + +CT_OPTS ?= +ifneq ($(wildcard $(TEST_DIR)),) + CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl)))) +else + CT_SUITES ?= +endif + +# Core targets. + +tests:: ct + +distclean:: distclean-ct + +help:: + $(verbose) printf "%s\n" "" \ + "Common_test targets:" \ + " ct Run all the common_test suites for this project" \ + "" \ + "All your common_test suites have their associated targets." \ + "A suite named http_SUITE can be ran using the ct-http target." + +# Plugin-specific targets. + +CT_RUN = ct_run \ + -no_auto_compile \ + -noinput \ + -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(TEST_DIR) \ + -dir $(TEST_DIR) \ + -logdir $(CURDIR)/logs + +ifeq ($(CT_SUITES),) +ct: +else +ct: test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) +endif + +define ct_suite_target +ct-$(1): test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(1)) $(CT_OPTS) +endef + +$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) + +distclean-ct: + $(gen_verbose) rm -rf $(CURDIR)/logs/ + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: plt distclean-plt dialyze + +# Configuration. + +DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt +export DIALYZER_PLT + +PLT_APPS ?= +DIALYZER_DIRS ?= --src -r src +DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions \ + -Wunmatched_returns # -Wunderspecs + +# Core targets. + +check:: dialyze + +distclean:: distclean-plt + +help:: + $(verbose) printf "%s\n" "" \ + "Dialyzer targets:" \ + " plt Build a PLT file for this project" \ + " dialyze Analyze the project using Dialyzer" + +# Plugin-specific targets. + +$(DIALYZER_PLT): deps app + $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS) + +plt: $(DIALYZER_PLT) + +distclean-plt: + $(gen_verbose) rm -f $(DIALYZER_PLT) + +ifneq ($(wildcard $(DIALYZER_PLT)),) +dialyze: +else +dialyze: $(DIALYZER_PLT) +endif + $(verbose) dialyzer --no_native $(DIALYZER_DIRS) $(DIALYZER_OPTS) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-edoc edoc + +# Configuration. + +EDOC_OPTS ?= + +# Core targets. + +docs:: distclean-edoc edoc + +distclean:: distclean-edoc + +# Plugin-specific targets. + +edoc: doc-deps + $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().' + +distclean-edoc: + $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: elvis distclean-elvis + +# Configuration. + +ELVIS_CONFIG ?= $(CURDIR)/elvis.config + +ELVIS ?= $(CURDIR)/elvis +export ELVIS + +ELVIS_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis +ELVIS_CONFIG_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis.config +ELVIS_OPTS ?= + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Elvis targets:" \ + " elvis Run Elvis using the local elvis.config or download the default otherwise" + +distclean:: distclean-elvis + +# Plugin-specific targets. + +$(ELVIS): + $(gen_verbose) $(call core_http_get,$(ELVIS),$(ELVIS_URL)) + $(verbose) chmod +x $(ELVIS) + +$(ELVIS_CONFIG): + $(verbose) $(call core_http_get,$(ELVIS_CONFIG),$(ELVIS_CONFIG_URL)) + +elvis: $(ELVIS) $(ELVIS_CONFIG) + $(verbose) $(ELVIS) rock -c $(ELVIS_CONFIG) $(ELVIS_OPTS) + +distclean-elvis: + $(gen_verbose) rm -rf $(ELVIS) + +# Copyright (c) 2014 Dave Cottlehuber +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-escript escript + +# Configuration. + +ESCRIPT_NAME ?= $(PROJECT) +ESCRIPT_COMMENT ?= This is an -*- erlang -*- file + +ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*" +ESCRIPT_SYS_CONFIG ?= "rel/sys.config" +ESCRIPT_EMU_ARGS ?= -pa . \ + -sasl errlog_type error \ + -escript main $(ESCRIPT_NAME) +ESCRIPT_SHEBANG ?= /usr/bin/env escript +ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**" + +# Core targets. + +distclean:: distclean-escript + +help:: + $(verbose) printf "%s\n" "" \ + "Escript targets:" \ + " escript Build an executable escript archive" \ + +# Plugin-specific targets. + +# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl +# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center +# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE : +# Software may only be used for the great good and the true happiness of all +# sentient beings. + +define ESCRIPT_RAW +'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\ +'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\ +' [F || F <- A, not filelib:is_dir(F) ] end,'\ +'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\ +'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\ +'Ez = fun(Escript) ->'\ +' Static = Files([$(ESCRIPT_STATIC)]),'\ +' Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\ +' Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\ +' escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\ +' {archive, Archive, [memory]},'\ +' {shebang, "$(ESCRIPT_SHEBANG)"},'\ +' {comment, "$(ESCRIPT_COMMENT)"},'\ +' {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\ +' ]),'\ +' file:change_mode(Escript, 8#755)'\ +'end,'\ +'Ez("$(ESCRIPT_NAME)"),'\ +'halt().' +endef + +ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW)) + +escript:: distclean-escript deps app + $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND) + +distclean-escript: + $(gen_verbose) rm -f $(ESCRIPT_NAME) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: relx-rel distclean-relx-rel distclean-relx run + +# Configuration. + +RELX ?= $(CURDIR)/relx +RELX_CONFIG ?= $(CURDIR)/relx.config + +RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.5.0/relx +RELX_OPTS ?= +RELX_OUTPUT_DIR ?= _rel + +ifeq ($(firstword $(RELX_OPTS)),-o) + RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) +else + RELX_OPTS += -o $(RELX_OUTPUT_DIR) +endif + +# Core targets. + +ifeq ($(IS_DEP),) +ifneq ($(wildcard $(RELX_CONFIG)),) +rel:: relx-rel +endif +endif + +distclean:: distclean-relx-rel distclean-relx + +# Plugin-specific targets. + +$(RELX): + $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) + $(verbose) chmod +x $(RELX) + +relx-rel: $(RELX) rel-deps app + $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) + +distclean-relx-rel: + $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) + +distclean-relx: + $(gen_verbose) rm -rf $(RELX) + +# Run target. + +ifeq ($(wildcard $(RELX_CONFIG)),) +run: +else + +define get_relx_release.erl + {ok, Config} = file:consult("$(RELX_CONFIG)"), + {release, {Name, _}, _} = lists:keyfind(release, 1, Config), + io:format("~s", [Name]), + halt(0). +endef + +RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))` + +run: all + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console + +help:: + $(verbose) printf "%s\n" "" \ + "Relx targets:" \ + " run Compile the project, build the release and run it" + +endif + +# Copyright (c) 2014, M Robert Martin +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: shell + +# Configuration. + +SHELL_ERL ?= erl +SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin +SHELL_OPTS ?= + +ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) + +# Core targets + +help:: + $(verbose) printf "%s\n" "" \ + "Shell targets:" \ + " shell Run an erlang shell with SHELL_OPTS or reasonable default" + +# Plugin-specific targets. + +$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) + +build-shell-deps: $(ALL_SHELL_DEPS_DIRS) + $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done + +shell: build-shell-deps + $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS) + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq) +.PHONY: triq + +# Targets. + +tests:: triq + +define triq_check.erl + code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]), + try + case $(1) of + all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]); + module -> triq:check($(2)); + function -> triq:check($(2)) + end + of + true -> halt(0); + _ -> halt(1) + catch error:undef -> + io:format("Undefined property or module~n"), + halt(0) + end. +endef + +ifdef t +ifeq (,$(findstring :,$(t))) +triq: test-build + $(verbose) $(call erlang,$(call triq_check.erl,module,$(t))) +else +triq: test-build + $(verbose) echo Testing $(t)/0 + $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)())) +endif +else +triq: test-build + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam)))))) + $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES))) +endif +endif + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: xref distclean-xref + +# Configuration. + +ifeq ($(XREF_CONFIG),) + XREF_ARGS := +else + XREF_ARGS := -c $(XREF_CONFIG) +endif + +XREFR ?= $(CURDIR)/xrefr +export XREFR + +XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Xref targets:" \ + " xref Run Xrefr using $XREF_CONFIG as config file if defined" + +distclean:: distclean-xref + +# Plugin-specific targets. + +$(XREFR): + $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL)) + $(verbose) chmod +x $(XREFR) + +xref: deps app $(XREFR) + $(gen_verbose) $(XREFR) $(XREFR_ARGS) + +distclean-xref: + $(gen_verbose) rm -rf $(XREFR) + +# Copyright 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +COVER_REPORT_DIR = cover + +# Hook in coverage to ct + +ifdef COVER +ifdef CT_RUN +# All modules in 'ebin' +COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam))) + +test-build:: $(TEST_DIR)/ct.cover.spec + +$(TEST_DIR)/ct.cover.spec: + $(verbose) echo Cover mods: $(COVER_MODS) + $(gen_verbose) printf "%s\n" \ + '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \ + '{export,"$(CURDIR)/ct.coverdata"}.' > $@ + +CT_RUN += -cover $(TEST_DIR)/ct.cover.spec +endif +endif + +# Core targets + +ifdef COVER +ifneq ($(COVER_REPORT_DIR),) +tests:: + $(verbose) $(MAKE) --no-print-directory cover-report +endif +endif + +clean:: coverdata-clean + +ifneq ($(COVER_REPORT_DIR),) +distclean:: cover-report-clean +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Cover targets:" \ + " cover-report Generate a HTML coverage report from previously collected" \ + " cover data." \ + " all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \ + "" \ + "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \ + "target tests additionally generates a HTML coverage report from the combined" \ + "coverdata files from each of these testing tools. HTML reports can be disabled" \ + "by setting COVER_REPORT_DIR to empty." + +# Plugin specific targets + +COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata)) + +.PHONY: coverdata-clean +coverdata-clean: + $(gen_verbose) rm -f *.coverdata ct.cover.spec + +# Merge all coverdata files into one. +all.coverdata: $(COVERDATA) + $(gen_verbose) $(ERL) -eval ' \ + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \ + cover:export("$@"), halt(0).' + +# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to +# empty if you want the coverdata files but not the HTML report. +ifneq ($(COVER_REPORT_DIR),) + +.PHONY: cover-report-clean cover-report + +cover-report-clean: + $(gen_verbose) rm -rf $(COVER_REPORT_DIR) + +ifeq ($(COVERDATA),) +cover-report: +else + +# Modules which include eunit.hrl always contain one line without coverage +# because eunit defines test/0 which is never called. We compensate for this. +EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \ + grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ + | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq)) + +define cover_report.erl + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) + Ms = cover:imported_modules(), + [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M) + ++ ".COVER.html", [html]) || M <- Ms], + Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms], + EunitHrlMods = [$(EUNIT_HRL_MODS)], + Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of + true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report], + TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]), + TotalN = lists:sum([N || {_, {_, N}} <- Report1]), + TotalPerc = round(100 * TotalY / (TotalY + TotalN)), + {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]), + io:format(F, "~n" + "~n" + "Coverage report~n" + "~n", []), + io:format(F, "

Coverage

~n

Total: ~p%

~n", [TotalPerc]), + io:format(F, "~n", []), + [io:format(F, "" + "~n", + [M, M, round(100 * Y / (Y + N))]) || {M, {Y, N}} <- Report1], + How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))", + Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")", + io:format(F, "
ModuleCoverage
~p~p%
~n" + "

Generated using ~s and erlang.mk on ~s.

~n" + "", [How, Date]), + halt(). +endef + +cover-report: + $(gen_verbose) mkdir -p $(COVER_REPORT_DIR) + $(gen_verbose) $(call erlang,$(cover_report.erl)) + +endif +endif # ifneq ($(COVER_REPORT_DIR),) + +# Copyright (c) 2013-2015, Loïc Hoguin +# Copyright (c) 2015, Jean-Sébastien Pédron +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Fetch dependencies (without building them). + +.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \ + fetch-shell-deps + +ifneq ($(SKIP_DEPS),) +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: + @: +else +# By default, we fetch "normal" dependencies. They are also included no +# matter the type of requested dependencies. +# +# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS). +fetch-deps: $(ALL_DEPS_DIRS) +fetch-doc-deps: $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS) +fetch-rel-deps: $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS) +fetch-test-deps: $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS) +fetch-shell-deps: $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(filter doc,$(DEP_TYPES)),) +fetch-deps: $(ALL_DOC_DEPS_DIRS) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +fetch-deps: $(ALL_REL_DEPS_DIRS) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +fetch-deps: $(ALL_TEST_DEPS_DIRS) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +fetch-deps: $(ALL_SHELL_DEPS_DIRS) +endif + +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep $@ IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/$@.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $^ ; do \ + if ! grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/$@.log; then \ + echo $$dep >> $(ERLANG_MK_TMP)/$@.log; \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C $$dep fetch-deps IS_DEP=1 || exit $$?; \ + fi \ + fi \ + done +endif # ifneq ($(SKIP_DEPS),) + +# List dependencies recursively. + +.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \ + list-shell-deps + +ifneq ($(SKIP_DEPS),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): + $(verbose) :> $@ +else +LIST_DIRS = $(ALL_DEPS_DIRS) +LIST_DEPS = $(BUILD_DEPS) $(DEPS) + +$(ERLANG_MK_RECURSIVE_DEPS_LIST): fetch-deps + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DIRS += $(ALL_DOC_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DEPS += $(DOC_DEPS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-doc-deps +else +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DIRS += $(ALL_REL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DEPS += $(REL_DEPS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-rel-deps +else +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DIRS += $(ALL_TEST_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DEPS += $(TEST_DEPS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-test-deps +else +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DIRS += $(ALL_SHELL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DEPS += $(SHELL_DEPS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-shell-deps +else +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-deps +endif + +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): +ifneq ($(IS_DEP),1) + $(verbose) rm -f $@.orig +endif +ifndef IS_APP + $(verbose) for app in $(filter-out $(CURDIR),$(ALL_APPS_DIRS)); do \ + $(MAKE) -C "$$app" --no-print-directory $@ IS_APP=1 || :; \ + done +endif + $(verbose) for dep in $(filter-out $(CURDIR),$(LIST_DIRS)); do \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C "$$dep" --no-print-directory $@ IS_DEP=1; \ + fi; \ + done + $(verbose) for dep in $(LIST_DEPS); do \ + echo $(DEPS_DIR)/$$dep; \ + done >> $@.orig +ifndef IS_APP +ifneq ($(IS_DEP),1) + $(verbose) sort < $@.orig | uniq > $@ + $(verbose) rm -f $@.orig +endif +endif +endif # ifneq ($(SKIP_DEPS),) + +ifneq ($(SKIP_DEPS),) +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + @: +else +list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST) +list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(IS_DEP),1) +ifneq ($(filter doc,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) +endif +endif + +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + $(verbose) cat $^ | sort | uniq +endif # ifneq ($(SKIP_DEPS),) diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/etc/rabbit-test.config b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/etc/rabbit-test.config similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/etc/rabbit-test.config rename to rabbitmq-server/deps/rabbitmq_auth_backend_ldap/etc/rabbit-test.config diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/README b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/README similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/README rename to rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/README diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/global.ldif b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/global.ldif similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/global.ldif rename to rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/global.ldif diff --git a/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/groups.ldif b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/groups.ldif new file mode 100644 index 0000000..82402be --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/groups.ldif @@ -0,0 +1,16 @@ +dn: ou=groups,dc=example,dc=com +objectclass:organizationalunit +ou: groups + +dn: cn=wheel,ou=groups,dc=example,dc=com +objectclass: groupOfNames +cn: wheel +member: cn=Simon MacMullen,ou=people,dc=example,dc=com +member: cn=Jean-Sebastien Pedron,ou=people,dc=example,dc=com +member: cn=Michael Klishin,ou=people,dc=example,dc=com + +dn: cn=people,ou=groups,dc=example,dc=com +objectclass: groupOfNames +cn: people +member: cn=Jean-Sebastien Pedron,ou=people,dc=example,dc=com +member: cn=Michael Klishin,ou=people,dc=example,dc=com diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/people.ldif b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/people.ldif similarity index 56% rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/people.ldif rename to rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/people.ldif index 444879f..5ec0b8c 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/people.ldif +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/people.ldif @@ -22,3 +22,21 @@ objectClass: person cn: Mike Bridgen sn: Bridgen userPassword: password + +dn: cn=Michael Klishin,ou=people,dc=example,dc=com +objectClass: person +cn: Michael Klishin +sn: Klishin +userPassword: password + +dn: cn=Jean-Sebastien Pedron,ou=people,dc=example,dc=com +objectClass: person +cn: Jean-Sebastien Pedron +sn: Pedron +userPassword: password + +dn: cn=John Doe,ou=people,dc=example,dc=com +objectClass: person +cn: John Doe +sn: Doe +userPassword: password diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/rabbit.ldif b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/rabbit.ldif similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/rabbit.ldif rename to rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/rabbit.ldif diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/setup.sh b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/seed.sh similarity index 53% rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/setup.sh rename to rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/seed.sh index bca4dcb..cf7611d 100755 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/setup.sh +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/seed.sh @@ -1,14 +1,5 @@ #!/bin/sh -e -# Based on instructions found at -# http://ubuntuforums.org/showthread.php?p=8161118#post8161118 -# - yes that does seem to be the most authoritative place. - -sudo apt-get --yes purge slapd -sudo rm -rf /var/lib/ldap -sudo apt-get --yes install slapd ldap-utils -sleep 1 - DIR=$(dirname $0) sudo ldapadd -Y EXTERNAL -H ldapi:/// -f ${DIR}/global.ldif diff --git a/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/setup.sh b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/setup.sh new file mode 100755 index 0000000..5c227f2 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/example/setup.sh @@ -0,0 +1,10 @@ +#!/bin/sh -e + +sudo apt-get --yes purge slapd +sudo rm -rf /var/lib/ldap +sudo apt-get --yes install slapd ldap-utils +sleep 1 + +DIR=$(dirname $0) + +./$DIR/seed.sh diff --git a/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/rabbitmq-components.mk b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/rabbitmq-components.mk new file mode 100644 index 0000000..eed26fd --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/rabbitmq-components.mk @@ -0,0 +1,331 @@ +ifeq ($(.DEFAULT_GOAL),) +# Define default goal to `all` because this file defines some targets +# before the inclusion of erlang.mk leading to the wrong target becoming +# the default. +.DEFAULT_GOAL = all +endif + +# Automatically add rabbitmq-common to the dependencies, at least for +# the Makefiles. +ifneq ($(PROJECT),rabbit_common) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +ifeq ($(filter rabbit_common,$(DEPS)),) +DEPS += rabbit_common +endif +endif +endif + +# -------------------------------------------------------------------- +# RabbitMQ components. +# -------------------------------------------------------------------- + +# For RabbitMQ repositories, we want to checkout branches which match +# the parent project. For instance, if the parent project is on a +# release tag, dependencies must be on the same release tag. If the +# parent project is on a topic branch, dependencies must be on the same +# topic branch or fallback to `stable` or `master` whichever was the +# base of the topic branch. + +dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_clusterer = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_lvc = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_visualiser = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master +dep_sockjs = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master +dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master + +dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master + +# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk +# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch +# needs to add "ranch" as a BUILD_DEPS. The list of projects needing +# this workaround are: +# o rabbitmq-web-stomp +dep_ranch = git https://github.com/ninenines/ranch 1.2.1 + +RABBITMQ_COMPONENTS = amqp_client \ + rabbit \ + rabbit_common \ + rabbitmq_amqp1_0 \ + rabbitmq_auth_backend_amqp \ + rabbitmq_auth_backend_http \ + rabbitmq_auth_backend_ldap \ + rabbitmq_auth_mechanism_ssl \ + rabbitmq_boot_steps_visualiser \ + rabbitmq_clusterer \ + rabbitmq_codegen \ + rabbitmq_consistent_hash_exchange \ + rabbitmq_delayed_message_exchange \ + rabbitmq_dotnet_client \ + rabbitmq_event_exchange \ + rabbitmq_federation \ + rabbitmq_federation_management \ + rabbitmq_java_client \ + rabbitmq_lvc \ + rabbitmq_management \ + rabbitmq_management_agent \ + rabbitmq_management_exchange \ + rabbitmq_management_themes \ + rabbitmq_management_visualiser \ + rabbitmq_message_timestamp \ + rabbitmq_metronome \ + rabbitmq_mqtt \ + rabbitmq_recent_history_exchange \ + rabbitmq_rtopic_exchange \ + rabbitmq_sharding \ + rabbitmq_shovel \ + rabbitmq_shovel_management \ + rabbitmq_stomp \ + rabbitmq_test \ + rabbitmq_toke \ + rabbitmq_top \ + rabbitmq_tracing \ + rabbitmq_web_dispatch \ + rabbitmq_web_stomp \ + rabbitmq_web_stomp_examples \ + rabbitmq_website + +# Several components have a custom erlang.mk/build.config, mainly +# to disable eunit. Therefore, we can't use the top-level project's +# erlang.mk copy. +NO_AUTOPATCH += $(RABBITMQ_COMPONENTS) + +ifeq ($(origin current_rmq_ref),undefined) +ifneq ($(wildcard .git),) +current_rmq_ref := $(shell (\ + ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\ + if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi)) +else +current_rmq_ref := master +endif +endif +export current_rmq_ref + +ifeq ($(origin base_rmq_ref),undefined) +ifneq ($(wildcard .git),) +base_rmq_ref := $(shell \ + (git rev-parse --verify -q stable >/dev/null && \ + git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \ + echo stable) || \ + echo master) +else +base_rmq_ref := master +endif +endif +export base_rmq_ref + +# Repository URL selection. +# +# First, we infer other components' location from the current project +# repository URL, if it's a Git repository: +# - We take the "origin" remote URL as the base +# - The current project name and repository name is replaced by the +# target's properties: +# eg. rabbitmq-common is replaced by rabbitmq-codegen +# eg. rabbit_common is replaced by rabbitmq_codegen +# +# If cloning from this computed location fails, we fallback to RabbitMQ +# upstream which is GitHub. + +# Maccro to transform eg. "rabbit_common" to "rabbitmq-common". +rmq_cmp_repo_name = $(word 2,$(dep_$(1))) + +# Upstream URL for the current project. +RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT)) +RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git +RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git + +# Current URL for the current project. If this is not a Git clone, +# default to the upstream Git repository. +ifneq ($(wildcard .git),) +git_origin_fetch_url := $(shell git config remote.origin.url) +git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url) +RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url) +RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url) +else +RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL) +RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL) +endif + +# Macro to replace the following pattern: +# 1. /foo.git -> /bar.git +# 2. /foo -> /bar +# 3. /foo/ -> /bar/ +subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3)))) + +# Macro to replace both the project's name (eg. "rabbit_common") and +# repository name (eg. "rabbitmq-common") by the target's equivalent. +# +# This macro is kept on one line because we don't want whitespaces in +# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell +# single-quoted string. +dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo)) + +dep_rmq_commits = $(if $(dep_$(1)), \ + $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \ + $(pkg_$(1)_commit)) + +define dep_fetch_git_rmq + fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \ + fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \ + if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \ + git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url1"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \ + elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url2"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \ + fi; \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \ + $(foreach ref,$(call dep_rmq_commits,$(1)), \ + git checkout -q $(ref) >/dev/null 2>&1 || \ + ) \ + (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \ + 1>&2 && false) ) && \ + (test "$$$$fetch_url" = "$$$$push_url" || \ + git remote set-url --push origin "$$$$push_url") +endef + +# -------------------------------------------------------------------- +# Component distribution. +# -------------------------------------------------------------------- + +list-dist-deps:: + @: + +prepare-dist:: + @: + +# -------------------------------------------------------------------- +# Run a RabbitMQ node (moved from rabbitmq-run.mk as a workaround). +# -------------------------------------------------------------------- + +# Add "rabbit" to the build dependencies when the user wants to start +# a broker or to the test dependencies when the user wants to test a +# project. +# +# NOTE: This should belong to rabbitmq-run.mk. Unfortunately, it is +# loaded *after* erlang.mk which is too late to add a dependency. That's +# why rabbitmq-components.mk knows the list of targets which start a +# broker and add "rabbit" to the dependencies in this case. + +ifneq ($(PROJECT),rabbit) +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS)),) +RUN_RMQ_TARGETS = run-broker \ + run-background-broker \ + run-node \ + run-background-node \ + start-background-node + +ifneq ($(filter $(RUN_RMQ_TARGETS),$(MAKECMDGOALS)),) +BUILD_DEPS += rabbit +endif +endif + +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +ifneq ($(filter check tests tests-with-broker test,$(MAKECMDGOALS)),) +TEST_DEPS += rabbit +endif +endif +endif + +ifeq ($(filter rabbit_public_umbrella amqp_client rabbit_common rabbitmq_test,$(PROJECT)),) +ifeq ($(filter rabbitmq_test,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +TEST_DEPS += rabbitmq_test +endif +endif + +# -------------------------------------------------------------------- +# rabbitmq-components.mk checks. +# -------------------------------------------------------------------- + +ifeq ($(PROJECT),rabbit_common) +else ifdef SKIP_RMQCOMP_CHECK +else ifeq ($(IS_DEP),1) +else ifneq ($(filter co up,$(MAKECMDGOALS)),) +else +# In all other cases, rabbitmq-components.mk must be in sync. +deps:: check-rabbitmq-components.mk +fetch-deps: check-rabbitmq-components.mk +endif + +# If this project is under the Umbrella project, we override $(DEPS_DIR) +# to point to the Umbrella's one. We also disable `make distclean` so +# $(DEPS_DIR) is not accidentally removed. + +ifneq ($(wildcard ../../UMBRELLA.md),) +UNDER_UMBRELLA = 1 +else ifneq ($(wildcard UMBRELLA.md),) +UNDER_UMBRELLA = 1 +endif + +ifeq ($(UNDER_UMBRELLA),1) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +DEPS_DIR ?= $(abspath ..) + +distclean:: distclean-components + @: + +distclean-components: +endif + +ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),) +SKIP_DEPS = 1 +endif +endif + +UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk + +check-rabbitmq-components.mk: + $(verbose) cmp -s rabbitmq-components.mk \ + $(UPSTREAM_RMQ_COMPONENTS_MK) || \ + (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \ + false) + +ifeq ($(PROJECT),rabbit_common) +rabbitmq-components-mk: + @: +else +rabbitmq-components-mk: + $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) . +ifeq ($(DO_COMMIT),yes) + $(verbose) git diff --quiet rabbitmq-components.mk \ + || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk +endif +endif diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap.erl b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap.erl similarity index 90% rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap.erl rename to rabbitmq-server/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap.erl index 7a7e91f..3487173 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap.erl +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_auth_backend_ldap). @@ -246,8 +246,10 @@ with_ldap(_Creds, _Fun, undefined) -> with_ldap({error, _} = E, _Fun, _State) -> E; -%% TODO - ATM we create and destroy a new LDAP connection on every -%% call. This could almost certainly be more efficient. + +%% TODO - while we now pool LDAP connections we don't make any attempt +%% to avoid rebinding if the connection is already bound as the user +%% of interest, so this could still be more efficient. with_ldap({ok, Creds}, Fun, Servers) -> Opts0 = [{port, env(port)}], Opts1 = case env(log) of @@ -266,9 +268,23 @@ with_ldap({ok, Creds}, Fun, Servers) -> infinity -> Opts1; MS -> [{timeout, MS} | Opts1] end, - case eldap_open(Servers, Opts) of + worker_pool:submit( + ldap_pool, + fun () -> + case with_login(Creds, Servers, Opts, Fun) of + {error, {gen_tcp_error, closed}} -> + %% retry with new connection + ?L1("server closed connection", []), + purge_conn(Creds == anon, Servers, Opts), + with_login(Creds, Servers, Opts, Fun); + Result -> Result + end + end, reuse). + +with_login(Creds, Servers, Opts, Fun) -> + case get_or_create_conn(Creds == anon, Servers, Opts) of {ok, LDAP} -> - try Creds of + case Creds of anon -> ?L1("anonymous bind", []), Fun(LDAP); @@ -285,14 +301,36 @@ with_ldap({ok, Creds}, Fun, Servers) -> ?L1("bind error: ~s ~p", [UserDN, E]), {error, E} end - after - eldap:close(LDAP) end; Error -> ?L1("connect error: ~p", [Error]), Error end. +%% Gets either the anonymous or bound (authenticated) connection +get_or_create_conn(IsAnon, Servers, Opts) -> + Conns = case get(ldap_conns) of + undefined -> dict:new(); + Dict -> Dict + end, + Key = {IsAnon, Servers, Opts}, + case dict:find(Key, Conns) of + {ok, Conn} -> Conn; + error -> + case eldap_open(Servers, Opts) of + {ok, _} = Conn -> put(ldap_conns, dict:store(Key, Conn, Conns)), Conn; + Error -> Error + end + end. + +purge_conn(IsAnon, Servers, Opts) -> + Conns = get(ldap_conns), + Key = {IsAnon, Servers, Opts}, + {_, {_, Conn}} = dict:find(Key, Conns), + ?L1("Purging dead server connection", []), + eldap:close(Conn), %% May already be closed + put(ldap_conns, dict:erase(Key, Conns)). + eldap_open(Servers, Opts) -> case eldap:open(Servers, ssl_conf() ++ Opts) of {ok, LDAP} -> diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap_app.erl b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_app.erl similarity index 78% rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap_app.erl rename to rabbitmq-server/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_app.erl index e0cd7aa..e96a377 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap_app.erl +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_app.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_auth_backend_ldap_app). @@ -21,7 +21,16 @@ %% Dummy supervisor to get this application behaviour working -behaviour(supervisor). --export([init/1]). +-export([create_ldap_pool/0, init/1]). + +-rabbit_boot_step({ldap_pool, + [{description, "LDAP pool"}, + {mfa, {?MODULE, create_ldap_pool, []}}, + {requires, kernel_ready}]}). + +create_ldap_pool() -> + {ok, PoolSize} = application:get_env(rabbitmq_auth_backend_ldap, pool_size), + rabbit_sup:start_supervisor_child(ldap_pool_sup, worker_pool_sup, [PoolSize, ldap_pool]). start(_Type, _StartArgs) -> {ok, Backends} = application:get_env(rabbit, auth_backends), @@ -51,3 +60,4 @@ configured(M, [_ |T]) -> configured(M, T). %%---------------------------------------------------------------------------- init([]) -> {ok, {{one_for_one, 3, 10}, []}}. + diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap_util.erl b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_util.erl similarity index 94% rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap_util.erl rename to rabbitmq-server/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_util.erl index 47c3d66..86216ac 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbit_auth_backend_ldap_util.erl +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/src/rabbit_auth_backend_ldap_util.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_auth_backend_ldap_util). diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbitmq_auth_backend_ldap.app.src b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/src/rabbitmq_auth_backend_ldap.app.src similarity index 89% rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbitmq_auth_backend_ldap.app.src rename to rabbitmq-server/deps/rabbitmq_auth_backend_ldap/src/rabbitmq_auth_backend_ldap.app.src index b2139d6..e9f076b 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/src/rabbitmq_auth_backend_ldap.app.src +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/src/rabbitmq_auth_backend_ldap.app.src @@ -1,7 +1,7 @@ %% -*- erlang -*- {application, rabbitmq_auth_backend_ldap, [{description, "RabbitMQ LDAP Authentication Backend"}, - {vsn, "%%VSN%%"}, + {vsn, "3.6.1"}, {modules, []}, {registered, []}, {mod, {rabbit_auth_backend_ldap_app, []}}, @@ -19,5 +19,6 @@ {ssl_options, []}, {port, 389}, {timeout, infinity}, - {log, false} ] }, + {log, false}, + {pool_size, 10} ] }, {applications, [kernel, stdlib, eldap, rabbit]}]}. diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/test/src/rabbit_auth_backend_ldap_test.erl b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/test/src/rabbit_auth_backend_ldap_test.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/test/src/rabbit_auth_backend_ldap_test.erl rename to rabbitmq-server/deps/rabbitmq_auth_backend_ldap/test/src/rabbit_auth_backend_ldap_test.erl index c340d68..cd29160 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/test/src/rabbit_auth_backend_ldap_test.erl +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/test/src/rabbit_auth_backend_ldap_test.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_auth_backend_ldap_test). diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/test/src/rabbit_auth_backend_ldap_unit_test.erl b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/test/src/rabbit_auth_backend_ldap_unit_test.erl similarity index 95% rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/test/src/rabbit_auth_backend_ldap_unit_test.erl rename to rabbitmq-server/deps/rabbitmq_auth_backend_ldap/test/src/rabbit_auth_backend_ldap_unit_test.erl index 47223f9..f0ed8d4 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/test/src/rabbit_auth_backend_ldap_unit_test.erl +++ b/rabbitmq-server/deps/rabbitmq_auth_backend_ldap/test/src/rabbit_auth_backend_ldap_unit_test.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_auth_backend_ldap_unit_test). diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/CONTRIBUTING.md b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/CONTRIBUTING.md similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-amqp1.0/CONTRIBUTING.md rename to rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/CONTRIBUTING.md diff --git a/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/Makefile b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/Makefile new file mode 100644 index 0000000..b76a9e6 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/Makefile @@ -0,0 +1,14 @@ +PROJECT = rabbitmq_auth_mechanism_ssl + +DEPS = amqp_client + +DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk + +# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be +# reviewed and merged. + +ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git +ERLANG_MK_COMMIT = rabbitmq-tmp + +include rabbitmq-components.mk +include erlang.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/README b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/README.md similarity index 75% rename from rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/README rename to rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/README.md index 4eb4f14..8d659ec 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/README +++ b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/README.md @@ -1,3 +1,5 @@ +# x509 (TLS/SSL) certificate Authentication Mechanism for RabbitMQ + Authenticates the user, obtaining the username from the client's SSL certificate. The user's password is not checked. @@ -5,15 +7,19 @@ In order to use this mechanism the client must connect over SSL, and present a client certificate. The mechanism must also be enabled in RabbitMQ's configuration file - -see http://www.rabbitmq.com/authentication.html for more details, or +see [TLS Authentication guide](http://www.rabbitmq.com/authentication.html) for more details, or in short, ensure that the 'rabbit' section of your configuration contains: +``` erlang {auth_mechanisms, ['PLAIN', 'AMQPLAIN', 'EXTERNAL']} +``` to allow this mechanism in addition to the defaults, or: +``` erlang {auth_mechanisms, ['EXTERNAL']} +``` to allow only this mechanism. @@ -27,18 +33,30 @@ produced by OpenSSL's "-nameopt RFC2253" option. You can obtain this string form from a certificate with a command like: -$ openssl x509 -in path/to/cert.pem -nameopt RFC2253 -subject -noout +``` +openssl x509 -in path/to/cert.pem -nameopt RFC2253 -subject -noout +``` or from an existing amqps connection with commands like: -$ rabbitmqctl list_connections peer_cert_subject +``` +rabbitmqctl list_connections peer_cert_subject +``` To use the Common Name instead, ensure that the 'rabbit' section of your configuration contains: +``` {ssl_cert_login_from, common_name} +``` Note that the authenticated user will then be looked up in the configured authentication / authorisation backend(s) - this will be the mnesia-based user database by default, but could include other backends if so configured. + +## Copyright & License + +(c) Pivotal Software Inc., 2007 — 2015. + +Released under the same license as RabbitMQ. diff --git a/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/erlang.mk b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/erlang.mk new file mode 100644 index 0000000..1688ee8 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/erlang.mk @@ -0,0 +1,6640 @@ +# Copyright (c) 2013-2015, Loïc Hoguin +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +.PHONY: all app deps search rel docs install-docs check tests clean distclean help erlang-mk + +ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) + +ERLANG_MK_VERSION = 2.0.0-pre.2-16-gb52203c + +# Core configuration. + +PROJECT ?= $(notdir $(CURDIR)) +PROJECT := $(strip $(PROJECT)) + +PROJECT_VERSION ?= rolling + +# Verbosity. + +V ?= 0 + +verbose_0 = @ +verbose_2 = set -x; +verbose = $(verbose_$(V)) + +gen_verbose_0 = @echo " GEN " $@; +gen_verbose_2 = set -x; +gen_verbose = $(gen_verbose_$(V)) + +# Temporary files directory. + +ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk +export ERLANG_MK_TMP + +# "erl" command. + +ERL = erl +A0 -noinput -boot start_clean + +# Platform detection. + +ifeq ($(PLATFORM),) +UNAME_S := $(shell uname -s) + +ifeq ($(UNAME_S),Linux) +PLATFORM = linux +else ifeq ($(UNAME_S),Darwin) +PLATFORM = darwin +else ifeq ($(UNAME_S),SunOS) +PLATFORM = solaris +else ifeq ($(UNAME_S),GNU) +PLATFORM = gnu +else ifeq ($(UNAME_S),FreeBSD) +PLATFORM = freebsd +else ifeq ($(UNAME_S),NetBSD) +PLATFORM = netbsd +else ifeq ($(UNAME_S),OpenBSD) +PLATFORM = openbsd +else ifeq ($(UNAME_S),DragonFly) +PLATFORM = dragonfly +else ifeq ($(shell uname -o),Msys) +PLATFORM = msys2 +else +$(error Unable to detect platform. Please open a ticket with the output of uname -a.) +endif + +export PLATFORM +endif + +# Core targets. + +all:: deps app rel + +# Noop to avoid a Make warning when there's nothing to do. +rel:: + $(verbose) : + +check:: clean app tests + +clean:: clean-crashdump + +clean-crashdump: +ifneq ($(wildcard erl_crash.dump),) + $(gen_verbose) rm -f erl_crash.dump +endif + +distclean:: clean distclean-tmp + +distclean-tmp: + $(gen_verbose) rm -rf $(ERLANG_MK_TMP) + +help:: + $(verbose) printf "%s\n" \ + "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \ + "Copyright (c) 2013-2015 Loïc Hoguin " \ + "" \ + "Usage: [V=1] $(MAKE) [target]..." \ + "" \ + "Core targets:" \ + " all Run deps, app and rel targets in that order" \ + " app Compile the project" \ + " deps Fetch dependencies (if needed) and compile them" \ + " fetch-deps Fetch dependencies (if needed) without compiling them" \ + " list-deps Fetch dependencies (if needed) and list them" \ + " search q=... Search for a package in the built-in index" \ + " rel Build a release for this project, if applicable" \ + " docs Build the documentation for this project" \ + " install-docs Install the man pages for this project" \ + " check Compile and run all tests and analysis for this project" \ + " tests Run the tests for this project" \ + " clean Delete temporary and output files from most targets" \ + " distclean Delete all temporary and output files" \ + " help Display this help and exit" \ + " erlang-mk Update erlang.mk to the latest version" + +# Core functions. + +empty := +space := $(empty) $(empty) +tab := $(empty) $(empty) +comma := , + +define newline + + +endef + +define comma_list +$(subst $(space),$(comma),$(strip $(1))) +endef + +# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. +define erlang +$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk +endef + +ifeq ($(PLATFORM),msys2) +core_native_path = $(subst \,\\\\,$(shell cygpath -w $1)) +else +core_native_path = $1 +endif + +ifeq ($(shell which wget 2>/dev/null | wc -l), 1) +define core_http_get + wget --no-check-certificate -O $(1) $(2)|| rm $(1) +endef +else +define core_http_get.erl + ssl:start(), + inets:start(), + case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of + {ok, {{_, 200, _}, _, Body}} -> + case file:write_file("$(1)", Body) of + ok -> ok; + {error, R1} -> halt(R1) + end; + {error, R2} -> + halt(R2) + end, + halt(0). +endef + +define core_http_get + $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2)) +endef +endif + +core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1))) + +core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2))) + +core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1))))))))))))))))))))))))))) + +core_ls = $(filter-out $(1),$(shell echo $(1))) + +# @todo Use a solution that does not require using perl. +core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2) + +# Automated update. + +ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk +ERLANG_MK_COMMIT ?= +ERLANG_MK_BUILD_CONFIG ?= build.config +ERLANG_MK_BUILD_DIR ?= .erlang.mk.build + +erlang-mk: + git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) +ifdef ERLANG_MK_COMMIT + cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) +endif + if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi + $(MAKE) -C $(ERLANG_MK_BUILD_DIR) + cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk + rm -rf $(ERLANG_MK_BUILD_DIR) + +# The erlang.mk package index is bundled in the default erlang.mk build. +# Search for the string "copyright" to skip to the rest of the code. + +PACKAGES += aberth +pkg_aberth_name = aberth +pkg_aberth_description = Generic BERT-RPC server in Erlang +pkg_aberth_homepage = https://github.com/a13x/aberth +pkg_aberth_fetch = git +pkg_aberth_repo = https://github.com/a13x/aberth +pkg_aberth_commit = master + +PACKAGES += active +pkg_active_name = active +pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running +pkg_active_homepage = https://github.com/proger/active +pkg_active_fetch = git +pkg_active_repo = https://github.com/proger/active +pkg_active_commit = master + +PACKAGES += actordb_core +pkg_actordb_core_name = actordb_core +pkg_actordb_core_description = ActorDB main source +pkg_actordb_core_homepage = http://www.actordb.com/ +pkg_actordb_core_fetch = git +pkg_actordb_core_repo = https://github.com/biokoda/actordb_core +pkg_actordb_core_commit = master + +PACKAGES += actordb_thrift +pkg_actordb_thrift_name = actordb_thrift +pkg_actordb_thrift_description = Thrift API for ActorDB +pkg_actordb_thrift_homepage = http://www.actordb.com/ +pkg_actordb_thrift_fetch = git +pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift +pkg_actordb_thrift_commit = master + +PACKAGES += aleppo +pkg_aleppo_name = aleppo +pkg_aleppo_description = Alternative Erlang Pre-Processor +pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo +pkg_aleppo_fetch = git +pkg_aleppo_repo = https://github.com/ErlyORM/aleppo +pkg_aleppo_commit = master + +PACKAGES += alog +pkg_alog_name = alog +pkg_alog_description = Simply the best logging framework for Erlang +pkg_alog_homepage = https://github.com/siberian-fast-food/alogger +pkg_alog_fetch = git +pkg_alog_repo = https://github.com/siberian-fast-food/alogger +pkg_alog_commit = master + +PACKAGES += amqp_client +pkg_amqp_client_name = amqp_client +pkg_amqp_client_description = RabbitMQ Erlang AMQP client +pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html +pkg_amqp_client_fetch = git +pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git +pkg_amqp_client_commit = master + +PACKAGES += annotations +pkg_annotations_name = annotations +pkg_annotations_description = Simple code instrumentation utilities +pkg_annotations_homepage = https://github.com/hyperthunk/annotations +pkg_annotations_fetch = git +pkg_annotations_repo = https://github.com/hyperthunk/annotations +pkg_annotations_commit = master + +PACKAGES += antidote +pkg_antidote_name = antidote +pkg_antidote_description = Large-scale computation without synchronisation +pkg_antidote_homepage = https://syncfree.lip6.fr/ +pkg_antidote_fetch = git +pkg_antidote_repo = https://github.com/SyncFree/antidote +pkg_antidote_commit = master + +PACKAGES += apns +pkg_apns_name = apns +pkg_apns_description = Apple Push Notification Server for Erlang +pkg_apns_homepage = http://inaka.github.com/apns4erl +pkg_apns_fetch = git +pkg_apns_repo = https://github.com/inaka/apns4erl +pkg_apns_commit = 1.0.4 + +PACKAGES += azdht +pkg_azdht_name = azdht +pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang +pkg_azdht_homepage = https://github.com/arcusfelis/azdht +pkg_azdht_fetch = git +pkg_azdht_repo = https://github.com/arcusfelis/azdht +pkg_azdht_commit = master + +PACKAGES += backoff +pkg_backoff_name = backoff +pkg_backoff_description = Simple exponential backoffs in Erlang +pkg_backoff_homepage = https://github.com/ferd/backoff +pkg_backoff_fetch = git +pkg_backoff_repo = https://github.com/ferd/backoff +pkg_backoff_commit = master + +PACKAGES += barrel_tcp +pkg_barrel_tcp_name = barrel_tcp +pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang. +pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_fetch = git +pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_commit = master + +PACKAGES += basho_bench +pkg_basho_bench_name = basho_bench +pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for. +pkg_basho_bench_homepage = https://github.com/basho/basho_bench +pkg_basho_bench_fetch = git +pkg_basho_bench_repo = https://github.com/basho/basho_bench +pkg_basho_bench_commit = master + +PACKAGES += bcrypt +pkg_bcrypt_name = bcrypt +pkg_bcrypt_description = Bcrypt Erlang / C library +pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt +pkg_bcrypt_fetch = git +pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt +pkg_bcrypt_commit = master + +PACKAGES += beam +pkg_beam_name = beam +pkg_beam_description = BEAM emulator written in Erlang +pkg_beam_homepage = https://github.com/tonyrog/beam +pkg_beam_fetch = git +pkg_beam_repo = https://github.com/tonyrog/beam +pkg_beam_commit = master + +PACKAGES += beanstalk +pkg_beanstalk_name = beanstalk +pkg_beanstalk_description = An Erlang client for beanstalkd +pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_fetch = git +pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_commit = master + +PACKAGES += bear +pkg_bear_name = bear +pkg_bear_description = a set of statistics functions for erlang +pkg_bear_homepage = https://github.com/boundary/bear +pkg_bear_fetch = git +pkg_bear_repo = https://github.com/boundary/bear +pkg_bear_commit = master + +PACKAGES += bertconf +pkg_bertconf_name = bertconf +pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded +pkg_bertconf_homepage = https://github.com/ferd/bertconf +pkg_bertconf_fetch = git +pkg_bertconf_repo = https://github.com/ferd/bertconf +pkg_bertconf_commit = master + +PACKAGES += bifrost +pkg_bifrost_name = bifrost +pkg_bifrost_description = Erlang FTP Server Framework +pkg_bifrost_homepage = https://github.com/thorstadt/bifrost +pkg_bifrost_fetch = git +pkg_bifrost_repo = https://github.com/thorstadt/bifrost +pkg_bifrost_commit = master + +PACKAGES += binpp +pkg_binpp_name = binpp +pkg_binpp_description = Erlang Binary Pretty Printer +pkg_binpp_homepage = https://github.com/jtendo/binpp +pkg_binpp_fetch = git +pkg_binpp_repo = https://github.com/jtendo/binpp +pkg_binpp_commit = master + +PACKAGES += bisect +pkg_bisect_name = bisect +pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang +pkg_bisect_homepage = https://github.com/knutin/bisect +pkg_bisect_fetch = git +pkg_bisect_repo = https://github.com/knutin/bisect +pkg_bisect_commit = master + +PACKAGES += bitcask +pkg_bitcask_name = bitcask +pkg_bitcask_description = because you need another a key/value storage engine +pkg_bitcask_homepage = https://github.com/basho/bitcask +pkg_bitcask_fetch = git +pkg_bitcask_repo = https://github.com/basho/bitcask +pkg_bitcask_commit = master + +PACKAGES += bitstore +pkg_bitstore_name = bitstore +pkg_bitstore_description = A document based ontology development environment +pkg_bitstore_homepage = https://github.com/bdionne/bitstore +pkg_bitstore_fetch = git +pkg_bitstore_repo = https://github.com/bdionne/bitstore +pkg_bitstore_commit = master + +PACKAGES += bootstrap +pkg_bootstrap_name = bootstrap +pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application. +pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap +pkg_bootstrap_fetch = git +pkg_bootstrap_repo = https://github.com/schlagert/bootstrap +pkg_bootstrap_commit = master + +PACKAGES += boss +pkg_boss_name = boss +pkg_boss_description = Erlang web MVC, now featuring Comet +pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_fetch = git +pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_commit = master + +PACKAGES += boss_db +pkg_boss_db_name = boss_db +pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang +pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db +pkg_boss_db_fetch = git +pkg_boss_db_repo = https://github.com/ErlyORM/boss_db +pkg_boss_db_commit = master + +PACKAGES += bson +pkg_bson_name = bson +pkg_bson_description = BSON documents in Erlang, see bsonspec.org +pkg_bson_homepage = https://github.com/comtihon/bson-erlang +pkg_bson_fetch = git +pkg_bson_repo = https://github.com/comtihon/bson-erlang +pkg_bson_commit = master + +PACKAGES += bullet +pkg_bullet_name = bullet +pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy. +pkg_bullet_homepage = http://ninenines.eu +pkg_bullet_fetch = git +pkg_bullet_repo = https://github.com/ninenines/bullet +pkg_bullet_commit = master + +PACKAGES += cache +pkg_cache_name = cache +pkg_cache_description = Erlang in-memory cache +pkg_cache_homepage = https://github.com/fogfish/cache +pkg_cache_fetch = git +pkg_cache_repo = https://github.com/fogfish/cache +pkg_cache_commit = master + +PACKAGES += cake +pkg_cake_name = cake +pkg_cake_description = Really simple terminal colorization +pkg_cake_homepage = https://github.com/darach/cake-erl +pkg_cake_fetch = git +pkg_cake_repo = https://github.com/darach/cake-erl +pkg_cake_commit = v0.1.2 + +PACKAGES += carotene +pkg_carotene_name = carotene +pkg_carotene_description = Real-time server +pkg_carotene_homepage = https://github.com/carotene/carotene +pkg_carotene_fetch = git +pkg_carotene_repo = https://github.com/carotene/carotene +pkg_carotene_commit = master + +PACKAGES += cberl +pkg_cberl_name = cberl +pkg_cberl_description = NIF based Erlang bindings for Couchbase +pkg_cberl_homepage = https://github.com/chitika/cberl +pkg_cberl_fetch = git +pkg_cberl_repo = https://github.com/chitika/cberl +pkg_cberl_commit = master + +PACKAGES += cecho +pkg_cecho_name = cecho +pkg_cecho_description = An ncurses library for Erlang +pkg_cecho_homepage = https://github.com/mazenharake/cecho +pkg_cecho_fetch = git +pkg_cecho_repo = https://github.com/mazenharake/cecho +pkg_cecho_commit = master + +PACKAGES += cferl +pkg_cferl_name = cferl +pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client +pkg_cferl_homepage = https://github.com/ddossot/cferl +pkg_cferl_fetch = git +pkg_cferl_repo = https://github.com/ddossot/cferl +pkg_cferl_commit = master + +PACKAGES += chaos_monkey +pkg_chaos_monkey_name = chaos_monkey +pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes. +pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_fetch = git +pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_commit = master + +PACKAGES += check_node +pkg_check_node_name = check_node +pkg_check_node_description = Nagios Scripts for monitoring Riak +pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios +pkg_check_node_fetch = git +pkg_check_node_repo = https://github.com/basho-labs/riak_nagios +pkg_check_node_commit = master + +PACKAGES += chronos +pkg_chronos_name = chronos +pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests. +pkg_chronos_homepage = https://github.com/lehoff/chronos +pkg_chronos_fetch = git +pkg_chronos_repo = https://github.com/lehoff/chronos +pkg_chronos_commit = master + +PACKAGES += cl +pkg_cl_name = cl +pkg_cl_description = OpenCL binding for Erlang +pkg_cl_homepage = https://github.com/tonyrog/cl +pkg_cl_fetch = git +pkg_cl_repo = https://github.com/tonyrog/cl +pkg_cl_commit = master + +PACKAGES += classifier +pkg_classifier_name = classifier +pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier +pkg_classifier_homepage = https://github.com/inaka/classifier +pkg_classifier_fetch = git +pkg_classifier_repo = https://github.com/inaka/classifier +pkg_classifier_commit = master + +PACKAGES += clique +pkg_clique_name = clique +pkg_clique_description = CLI Framework for Erlang +pkg_clique_homepage = https://github.com/basho/clique +pkg_clique_fetch = git +pkg_clique_repo = https://github.com/basho/clique +pkg_clique_commit = develop + +PACKAGES += cloudi_core +pkg_cloudi_core_name = cloudi_core +pkg_cloudi_core_description = CloudI internal service runtime +pkg_cloudi_core_homepage = http://cloudi.org/ +pkg_cloudi_core_fetch = git +pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core +pkg_cloudi_core_commit = master + +PACKAGES += cloudi_service_api_requests +pkg_cloudi_service_api_requests_name = cloudi_service_api_requests +pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support) +pkg_cloudi_service_api_requests_homepage = http://cloudi.org/ +pkg_cloudi_service_api_requests_fetch = git +pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests +pkg_cloudi_service_api_requests_commit = master + +PACKAGES += cloudi_service_db +pkg_cloudi_service_db_name = cloudi_service_db +pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) +pkg_cloudi_service_db_homepage = http://cloudi.org/ +pkg_cloudi_service_db_fetch = git +pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db +pkg_cloudi_service_db_commit = master + +PACKAGES += cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service +pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_fetch = git +pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_commit = master + +PACKAGES += cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service +pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_cql_fetch = git +pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_commit = master + +PACKAGES += cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service +pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/ +pkg_cloudi_service_db_couchdb_fetch = git +pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_commit = master + +PACKAGES += cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service +pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/ +pkg_cloudi_service_db_elasticsearch_fetch = git +pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_commit = master + +PACKAGES += cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_description = memcached CloudI Service +pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/ +pkg_cloudi_service_db_memcached_fetch = git +pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_commit = master + +PACKAGES += cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_description = MySQL CloudI Service +pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_mysql_fetch = git +pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_commit = master + +PACKAGES += cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service +pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_pgsql_fetch = git +pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_commit = master + +PACKAGES += cloudi_service_db_riak +pkg_cloudi_service_db_riak_name = cloudi_service_db_riak +pkg_cloudi_service_db_riak_description = Riak CloudI Service +pkg_cloudi_service_db_riak_homepage = http://cloudi.org/ +pkg_cloudi_service_db_riak_fetch = git +pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak +pkg_cloudi_service_db_riak_commit = master + +PACKAGES += cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service +pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/ +pkg_cloudi_service_db_tokyotyrant_fetch = git +pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_commit = master + +PACKAGES += cloudi_service_filesystem +pkg_cloudi_service_filesystem_name = cloudi_service_filesystem +pkg_cloudi_service_filesystem_description = Filesystem CloudI Service +pkg_cloudi_service_filesystem_homepage = http://cloudi.org/ +pkg_cloudi_service_filesystem_fetch = git +pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem +pkg_cloudi_service_filesystem_commit = master + +PACKAGES += cloudi_service_http_client +pkg_cloudi_service_http_client_name = cloudi_service_http_client +pkg_cloudi_service_http_client_description = HTTP client CloudI Service +pkg_cloudi_service_http_client_homepage = http://cloudi.org/ +pkg_cloudi_service_http_client_fetch = git +pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client +pkg_cloudi_service_http_client_commit = master + +PACKAGES += cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service +pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/ +pkg_cloudi_service_http_cowboy_fetch = git +pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_commit = master + +PACKAGES += cloudi_service_http_elli +pkg_cloudi_service_http_elli_name = cloudi_service_http_elli +pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service +pkg_cloudi_service_http_elli_homepage = http://cloudi.org/ +pkg_cloudi_service_http_elli_fetch = git +pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli +pkg_cloudi_service_http_elli_commit = master + +PACKAGES += cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service +pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/ +pkg_cloudi_service_map_reduce_fetch = git +pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_commit = master + +PACKAGES += cloudi_service_oauth1 +pkg_cloudi_service_oauth1_name = cloudi_service_oauth1 +pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service +pkg_cloudi_service_oauth1_homepage = http://cloudi.org/ +pkg_cloudi_service_oauth1_fetch = git +pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1 +pkg_cloudi_service_oauth1_commit = master + +PACKAGES += cloudi_service_queue +pkg_cloudi_service_queue_name = cloudi_service_queue +pkg_cloudi_service_queue_description = Persistent Queue Service +pkg_cloudi_service_queue_homepage = http://cloudi.org/ +pkg_cloudi_service_queue_fetch = git +pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue +pkg_cloudi_service_queue_commit = master + +PACKAGES += cloudi_service_quorum +pkg_cloudi_service_quorum_name = cloudi_service_quorum +pkg_cloudi_service_quorum_description = CloudI Quorum Service +pkg_cloudi_service_quorum_homepage = http://cloudi.org/ +pkg_cloudi_service_quorum_fetch = git +pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum +pkg_cloudi_service_quorum_commit = master + +PACKAGES += cloudi_service_router +pkg_cloudi_service_router_name = cloudi_service_router +pkg_cloudi_service_router_description = CloudI Router Service +pkg_cloudi_service_router_homepage = http://cloudi.org/ +pkg_cloudi_service_router_fetch = git +pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router +pkg_cloudi_service_router_commit = master + +PACKAGES += cloudi_service_tcp +pkg_cloudi_service_tcp_name = cloudi_service_tcp +pkg_cloudi_service_tcp_description = TCP CloudI Service +pkg_cloudi_service_tcp_homepage = http://cloudi.org/ +pkg_cloudi_service_tcp_fetch = git +pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp +pkg_cloudi_service_tcp_commit = master + +PACKAGES += cloudi_service_timers +pkg_cloudi_service_timers_name = cloudi_service_timers +pkg_cloudi_service_timers_description = Timers CloudI Service +pkg_cloudi_service_timers_homepage = http://cloudi.org/ +pkg_cloudi_service_timers_fetch = git +pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers +pkg_cloudi_service_timers_commit = master + +PACKAGES += cloudi_service_udp +pkg_cloudi_service_udp_name = cloudi_service_udp +pkg_cloudi_service_udp_description = UDP CloudI Service +pkg_cloudi_service_udp_homepage = http://cloudi.org/ +pkg_cloudi_service_udp_fetch = git +pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp +pkg_cloudi_service_udp_commit = master + +PACKAGES += cloudi_service_validate +pkg_cloudi_service_validate_name = cloudi_service_validate +pkg_cloudi_service_validate_description = CloudI Validate Service +pkg_cloudi_service_validate_homepage = http://cloudi.org/ +pkg_cloudi_service_validate_fetch = git +pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate +pkg_cloudi_service_validate_commit = master + +PACKAGES += cloudi_service_zeromq +pkg_cloudi_service_zeromq_name = cloudi_service_zeromq +pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service +pkg_cloudi_service_zeromq_homepage = http://cloudi.org/ +pkg_cloudi_service_zeromq_fetch = git +pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq +pkg_cloudi_service_zeromq_commit = master + +PACKAGES += cluster_info +pkg_cluster_info_name = cluster_info +pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app +pkg_cluster_info_homepage = https://github.com/basho/cluster_info +pkg_cluster_info_fetch = git +pkg_cluster_info_repo = https://github.com/basho/cluster_info +pkg_cluster_info_commit = master + +PACKAGES += color +pkg_color_name = color +pkg_color_description = ANSI colors for your Erlang +pkg_color_homepage = https://github.com/julianduque/erlang-color +pkg_color_fetch = git +pkg_color_repo = https://github.com/julianduque/erlang-color +pkg_color_commit = master + +PACKAGES += confetti +pkg_confetti_name = confetti +pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids +pkg_confetti_homepage = https://github.com/jtendo/confetti +pkg_confetti_fetch = git +pkg_confetti_repo = https://github.com/jtendo/confetti +pkg_confetti_commit = master + +PACKAGES += couchbeam +pkg_couchbeam_name = couchbeam +pkg_couchbeam_description = Apache CouchDB client in Erlang +pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam +pkg_couchbeam_fetch = git +pkg_couchbeam_repo = https://github.com/benoitc/couchbeam +pkg_couchbeam_commit = master + +PACKAGES += covertool +pkg_covertool_name = covertool +pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports +pkg_covertool_homepage = https://github.com/idubrov/covertool +pkg_covertool_fetch = git +pkg_covertool_repo = https://github.com/idubrov/covertool +pkg_covertool_commit = master + +PACKAGES += cowboy +pkg_cowboy_name = cowboy +pkg_cowboy_description = Small, fast and modular HTTP server. +pkg_cowboy_homepage = http://ninenines.eu +pkg_cowboy_fetch = git +pkg_cowboy_repo = https://github.com/ninenines/cowboy +pkg_cowboy_commit = 1.0.1 + +PACKAGES += cowdb +pkg_cowdb_name = cowdb +pkg_cowdb_description = Pure Key/Value database library for Erlang Applications +pkg_cowdb_homepage = https://github.com/refuge/cowdb +pkg_cowdb_fetch = git +pkg_cowdb_repo = https://github.com/refuge/cowdb +pkg_cowdb_commit = master + +PACKAGES += cowlib +pkg_cowlib_name = cowlib +pkg_cowlib_description = Support library for manipulating Web protocols. +pkg_cowlib_homepage = http://ninenines.eu +pkg_cowlib_fetch = git +pkg_cowlib_repo = https://github.com/ninenines/cowlib +pkg_cowlib_commit = 1.0.1 + +PACKAGES += cpg +pkg_cpg_name = cpg +pkg_cpg_description = CloudI Process Groups +pkg_cpg_homepage = https://github.com/okeuday/cpg +pkg_cpg_fetch = git +pkg_cpg_repo = https://github.com/okeuday/cpg +pkg_cpg_commit = master + +PACKAGES += cqerl +pkg_cqerl_name = cqerl +pkg_cqerl_description = Native Erlang CQL client for Cassandra +pkg_cqerl_homepage = https://matehat.github.io/cqerl/ +pkg_cqerl_fetch = git +pkg_cqerl_repo = https://github.com/matehat/cqerl +pkg_cqerl_commit = master + +PACKAGES += cr +pkg_cr_name = cr +pkg_cr_description = Chain Replication +pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm +pkg_cr_fetch = git +pkg_cr_repo = https://github.com/spawnproc/cr +pkg_cr_commit = master + +PACKAGES += cuttlefish +pkg_cuttlefish_name = cuttlefish +pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me? +pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish +pkg_cuttlefish_fetch = git +pkg_cuttlefish_repo = https://github.com/basho/cuttlefish +pkg_cuttlefish_commit = master + +PACKAGES += damocles +pkg_damocles_name = damocles +pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box. +pkg_damocles_homepage = https://github.com/lostcolony/damocles +pkg_damocles_fetch = git +pkg_damocles_repo = https://github.com/lostcolony/damocles +pkg_damocles_commit = master + +PACKAGES += debbie +pkg_debbie_name = debbie +pkg_debbie_description = .DEB Built In Erlang +pkg_debbie_homepage = https://github.com/crownedgrouse/debbie +pkg_debbie_fetch = git +pkg_debbie_repo = https://github.com/crownedgrouse/debbie +pkg_debbie_commit = master + +PACKAGES += decimal +pkg_decimal_name = decimal +pkg_decimal_description = An Erlang decimal arithmetic library +pkg_decimal_homepage = https://github.com/tim/erlang-decimal +pkg_decimal_fetch = git +pkg_decimal_repo = https://github.com/tim/erlang-decimal +pkg_decimal_commit = master + +PACKAGES += detergent +pkg_detergent_name = detergent +pkg_detergent_description = An emulsifying Erlang SOAP library +pkg_detergent_homepage = https://github.com/devinus/detergent +pkg_detergent_fetch = git +pkg_detergent_repo = https://github.com/devinus/detergent +pkg_detergent_commit = master + +PACKAGES += detest +pkg_detest_name = detest +pkg_detest_description = Tool for running tests on a cluster of erlang nodes +pkg_detest_homepage = https://github.com/biokoda/detest +pkg_detest_fetch = git +pkg_detest_repo = https://github.com/biokoda/detest +pkg_detest_commit = master + +PACKAGES += dh_date +pkg_dh_date_name = dh_date +pkg_dh_date_description = Date formatting / parsing library for erlang +pkg_dh_date_homepage = https://github.com/daleharvey/dh_date +pkg_dh_date_fetch = git +pkg_dh_date_repo = https://github.com/daleharvey/dh_date +pkg_dh_date_commit = master + +PACKAGES += dhtcrawler +pkg_dhtcrawler_name = dhtcrawler +pkg_dhtcrawler_description = dhtcrawler is a DHT crawler written in erlang. It can join a DHT network and crawl many P2P torrents. +pkg_dhtcrawler_homepage = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_fetch = git +pkg_dhtcrawler_repo = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_commit = master + +PACKAGES += dirbusterl +pkg_dirbusterl_name = dirbusterl +pkg_dirbusterl_description = DirBuster successor in Erlang +pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_fetch = git +pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_commit = master + +PACKAGES += dispcount +pkg_dispcount_name = dispcount +pkg_dispcount_description = Erlang task dispatcher based on ETS counters. +pkg_dispcount_homepage = https://github.com/ferd/dispcount +pkg_dispcount_fetch = git +pkg_dispcount_repo = https://github.com/ferd/dispcount +pkg_dispcount_commit = master + +PACKAGES += dlhttpc +pkg_dlhttpc_name = dlhttpc +pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints +pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc +pkg_dlhttpc_fetch = git +pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc +pkg_dlhttpc_commit = master + +PACKAGES += dns +pkg_dns_name = dns +pkg_dns_description = Erlang DNS library +pkg_dns_homepage = https://github.com/aetrion/dns_erlang +pkg_dns_fetch = git +pkg_dns_repo = https://github.com/aetrion/dns_erlang +pkg_dns_commit = master + +PACKAGES += dnssd +pkg_dnssd_name = dnssd +pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation +pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_fetch = git +pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_commit = master + +PACKAGES += dtl +pkg_dtl_name = dtl +pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang. +pkg_dtl_homepage = https://github.com/oinksoft/dtl +pkg_dtl_fetch = git +pkg_dtl_repo = https://github.com/oinksoft/dtl +pkg_dtl_commit = master + +PACKAGES += dynamic_compile +pkg_dynamic_compile_name = dynamic_compile +pkg_dynamic_compile_description = compile and load erlang modules from string input +pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_fetch = git +pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_commit = master + +PACKAGES += e2 +pkg_e2_name = e2 +pkg_e2_description = Library to simply writing correct OTP applications. +pkg_e2_homepage = http://e2project.org +pkg_e2_fetch = git +pkg_e2_repo = https://github.com/gar1t/e2 +pkg_e2_commit = master + +PACKAGES += eamf +pkg_eamf_name = eamf +pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang +pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf +pkg_eamf_fetch = git +pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf +pkg_eamf_commit = master + +PACKAGES += eavro +pkg_eavro_name = eavro +pkg_eavro_description = Apache Avro encoder/decoder +pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_fetch = git +pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_commit = master + +PACKAGES += ecapnp +pkg_ecapnp_name = ecapnp +pkg_ecapnp_description = Cap'n Proto library for Erlang +pkg_ecapnp_homepage = https://github.com/kaos/ecapnp +pkg_ecapnp_fetch = git +pkg_ecapnp_repo = https://github.com/kaos/ecapnp +pkg_ecapnp_commit = master + +PACKAGES += econfig +pkg_econfig_name = econfig +pkg_econfig_description = simple Erlang config handler using INI files +pkg_econfig_homepage = https://github.com/benoitc/econfig +pkg_econfig_fetch = git +pkg_econfig_repo = https://github.com/benoitc/econfig +pkg_econfig_commit = master + +PACKAGES += edate +pkg_edate_name = edate +pkg_edate_description = date manipulation library for erlang +pkg_edate_homepage = https://github.com/dweldon/edate +pkg_edate_fetch = git +pkg_edate_repo = https://github.com/dweldon/edate +pkg_edate_commit = master + +PACKAGES += edgar +pkg_edgar_name = edgar +pkg_edgar_description = Erlang Does GNU AR +pkg_edgar_homepage = https://github.com/crownedgrouse/edgar +pkg_edgar_fetch = git +pkg_edgar_repo = https://github.com/crownedgrouse/edgar +pkg_edgar_commit = master + +PACKAGES += edis +pkg_edis_name = edis +pkg_edis_description = An Erlang implementation of Redis KV Store +pkg_edis_homepage = http://inaka.github.com/edis/ +pkg_edis_fetch = git +pkg_edis_repo = https://github.com/inaka/edis +pkg_edis_commit = master + +PACKAGES += edns +pkg_edns_name = edns +pkg_edns_description = Erlang/OTP DNS server +pkg_edns_homepage = https://github.com/hcvst/erlang-dns +pkg_edns_fetch = git +pkg_edns_repo = https://github.com/hcvst/erlang-dns +pkg_edns_commit = master + +PACKAGES += edown +pkg_edown_name = edown +pkg_edown_description = EDoc extension for generating Github-flavored Markdown +pkg_edown_homepage = https://github.com/uwiger/edown +pkg_edown_fetch = git +pkg_edown_repo = https://github.com/uwiger/edown +pkg_edown_commit = master + +PACKAGES += eep +pkg_eep_name = eep +pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy +pkg_eep_homepage = https://github.com/virtan/eep +pkg_eep_fetch = git +pkg_eep_repo = https://github.com/virtan/eep +pkg_eep_commit = master + +PACKAGES += eep_app +pkg_eep_app_name = eep_app +pkg_eep_app_description = Embedded Event Processing +pkg_eep_app_homepage = https://github.com/darach/eep-erl +pkg_eep_app_fetch = git +pkg_eep_app_repo = https://github.com/darach/eep-erl +pkg_eep_app_commit = master + +PACKAGES += efene +pkg_efene_name = efene +pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX +pkg_efene_homepage = https://github.com/efene/efene +pkg_efene_fetch = git +pkg_efene_repo = https://github.com/efene/efene +pkg_efene_commit = master + +PACKAGES += eganglia +pkg_eganglia_name = eganglia +pkg_eganglia_description = Erlang library to interact with Ganglia +pkg_eganglia_homepage = https://github.com/inaka/eganglia +pkg_eganglia_fetch = git +pkg_eganglia_repo = https://github.com/inaka/eganglia +pkg_eganglia_commit = v0.9.1 + +PACKAGES += egeoip +pkg_egeoip_name = egeoip +pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database. +pkg_egeoip_homepage = https://github.com/mochi/egeoip +pkg_egeoip_fetch = git +pkg_egeoip_repo = https://github.com/mochi/egeoip +pkg_egeoip_commit = master + +PACKAGES += ehsa +pkg_ehsa_name = ehsa +pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules +pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa +pkg_ehsa_fetch = hg +pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa +pkg_ehsa_commit = 2.0.4 + +PACKAGES += ej +pkg_ej_name = ej +pkg_ej_description = Helper module for working with Erlang terms representing JSON +pkg_ej_homepage = https://github.com/seth/ej +pkg_ej_fetch = git +pkg_ej_repo = https://github.com/seth/ej +pkg_ej_commit = master + +PACKAGES += ejabberd +pkg_ejabberd_name = ejabberd +pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform +pkg_ejabberd_homepage = https://github.com/processone/ejabberd +pkg_ejabberd_fetch = git +pkg_ejabberd_repo = https://github.com/processone/ejabberd +pkg_ejabberd_commit = master + +PACKAGES += ejwt +pkg_ejwt_name = ejwt +pkg_ejwt_description = erlang library for JSON Web Token +pkg_ejwt_homepage = https://github.com/artefactop/ejwt +pkg_ejwt_fetch = git +pkg_ejwt_repo = https://github.com/artefactop/ejwt +pkg_ejwt_commit = master + +PACKAGES += ekaf +pkg_ekaf_name = ekaf +pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang. +pkg_ekaf_homepage = https://github.com/helpshift/ekaf +pkg_ekaf_fetch = git +pkg_ekaf_repo = https://github.com/helpshift/ekaf +pkg_ekaf_commit = master + +PACKAGES += elarm +pkg_elarm_name = elarm +pkg_elarm_description = Alarm Manager for Erlang. +pkg_elarm_homepage = https://github.com/esl/elarm +pkg_elarm_fetch = git +pkg_elarm_repo = https://github.com/esl/elarm +pkg_elarm_commit = master + +PACKAGES += eleveldb +pkg_eleveldb_name = eleveldb +pkg_eleveldb_description = Erlang LevelDB API +pkg_eleveldb_homepage = https://github.com/basho/eleveldb +pkg_eleveldb_fetch = git +pkg_eleveldb_repo = https://github.com/basho/eleveldb +pkg_eleveldb_commit = master + +PACKAGES += elli +pkg_elli_name = elli +pkg_elli_description = Simple, robust and performant Erlang web server +pkg_elli_homepage = https://github.com/knutin/elli +pkg_elli_fetch = git +pkg_elli_repo = https://github.com/knutin/elli +pkg_elli_commit = master + +PACKAGES += elvis +pkg_elvis_name = elvis +pkg_elvis_description = Erlang Style Reviewer +pkg_elvis_homepage = https://github.com/inaka/elvis +pkg_elvis_fetch = git +pkg_elvis_repo = https://github.com/inaka/elvis +pkg_elvis_commit = 0.2.4 + +PACKAGES += emagick +pkg_emagick_name = emagick +pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool. +pkg_emagick_homepage = https://github.com/kivra/emagick +pkg_emagick_fetch = git +pkg_emagick_repo = https://github.com/kivra/emagick +pkg_emagick_commit = master + +PACKAGES += emysql +pkg_emysql_name = emysql +pkg_emysql_description = Stable, pure Erlang MySQL driver. +pkg_emysql_homepage = https://github.com/Eonblast/Emysql +pkg_emysql_fetch = git +pkg_emysql_repo = https://github.com/Eonblast/Emysql +pkg_emysql_commit = master + +PACKAGES += enm +pkg_enm_name = enm +pkg_enm_description = Erlang driver for nanomsg +pkg_enm_homepage = https://github.com/basho/enm +pkg_enm_fetch = git +pkg_enm_repo = https://github.com/basho/enm +pkg_enm_commit = master + +PACKAGES += entop +pkg_entop_name = entop +pkg_entop_description = A top-like tool for monitoring an Erlang node +pkg_entop_homepage = https://github.com/mazenharake/entop +pkg_entop_fetch = git +pkg_entop_repo = https://github.com/mazenharake/entop +pkg_entop_commit = master + +PACKAGES += epcap +pkg_epcap_name = epcap +pkg_epcap_description = Erlang packet capture interface using pcap +pkg_epcap_homepage = https://github.com/msantos/epcap +pkg_epcap_fetch = git +pkg_epcap_repo = https://github.com/msantos/epcap +pkg_epcap_commit = master + +PACKAGES += eper +pkg_eper_name = eper +pkg_eper_description = Erlang performance and debugging tools. +pkg_eper_homepage = https://github.com/massemanet/eper +pkg_eper_fetch = git +pkg_eper_repo = https://github.com/massemanet/eper +pkg_eper_commit = master + +PACKAGES += epgsql +pkg_epgsql_name = epgsql +pkg_epgsql_description = Erlang PostgreSQL client library. +pkg_epgsql_homepage = https://github.com/epgsql/epgsql +pkg_epgsql_fetch = git +pkg_epgsql_repo = https://github.com/epgsql/epgsql +pkg_epgsql_commit = master + +PACKAGES += episcina +pkg_episcina_name = episcina +pkg_episcina_description = A simple non intrusive resource pool for connections +pkg_episcina_homepage = https://github.com/erlware/episcina +pkg_episcina_fetch = git +pkg_episcina_repo = https://github.com/erlware/episcina +pkg_episcina_commit = master + +PACKAGES += eplot +pkg_eplot_name = eplot +pkg_eplot_description = A plot engine written in erlang. +pkg_eplot_homepage = https://github.com/psyeugenic/eplot +pkg_eplot_fetch = git +pkg_eplot_repo = https://github.com/psyeugenic/eplot +pkg_eplot_commit = master + +PACKAGES += epocxy +pkg_epocxy_name = epocxy +pkg_epocxy_description = Erlang Patterns of Concurrency +pkg_epocxy_homepage = https://github.com/duomark/epocxy +pkg_epocxy_fetch = git +pkg_epocxy_repo = https://github.com/duomark/epocxy +pkg_epocxy_commit = master + +PACKAGES += epubnub +pkg_epubnub_name = epubnub +pkg_epubnub_description = Erlang PubNub API +pkg_epubnub_homepage = https://github.com/tsloughter/epubnub +pkg_epubnub_fetch = git +pkg_epubnub_repo = https://github.com/tsloughter/epubnub +pkg_epubnub_commit = master + +PACKAGES += eqm +pkg_eqm_name = eqm +pkg_eqm_description = Erlang pub sub with supply-demand channels +pkg_eqm_homepage = https://github.com/loucash/eqm +pkg_eqm_fetch = git +pkg_eqm_repo = https://github.com/loucash/eqm +pkg_eqm_commit = master + +PACKAGES += eredis +pkg_eredis_name = eredis +pkg_eredis_description = Erlang Redis client +pkg_eredis_homepage = https://github.com/wooga/eredis +pkg_eredis_fetch = git +pkg_eredis_repo = https://github.com/wooga/eredis +pkg_eredis_commit = master + +PACKAGES += eredis_pool +pkg_eredis_pool_name = eredis_pool +pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy. +pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_fetch = git +pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_commit = master + +PACKAGES += erl_streams +pkg_erl_streams_name = erl_streams +pkg_erl_streams_description = Streams in Erlang +pkg_erl_streams_homepage = https://github.com/epappas/erl_streams +pkg_erl_streams_fetch = git +pkg_erl_streams_repo = https://github.com/epappas/erl_streams +pkg_erl_streams_commit = master + +PACKAGES += erlang_cep +pkg_erlang_cep_name = erlang_cep +pkg_erlang_cep_description = A basic CEP package written in erlang +pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_fetch = git +pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_commit = master + +PACKAGES += erlang_js +pkg_erlang_js_name = erlang_js +pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime. +pkg_erlang_js_homepage = https://github.com/basho/erlang_js +pkg_erlang_js_fetch = git +pkg_erlang_js_repo = https://github.com/basho/erlang_js +pkg_erlang_js_commit = master + +PACKAGES += erlang_localtime +pkg_erlang_localtime_name = erlang_localtime +pkg_erlang_localtime_description = Erlang library for conversion from one local time to another +pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_fetch = git +pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_commit = master + +PACKAGES += erlang_smtp +pkg_erlang_smtp_name = erlang_smtp +pkg_erlang_smtp_description = Erlang SMTP and POP3 server code. +pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_fetch = git +pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_commit = master + +PACKAGES += erlang_term +pkg_erlang_term_name = erlang_term +pkg_erlang_term_description = Erlang Term Info +pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term +pkg_erlang_term_fetch = git +pkg_erlang_term_repo = https://github.com/okeuday/erlang_term +pkg_erlang_term_commit = master + +PACKAGES += erlastic_search +pkg_erlastic_search_name = erlastic_search +pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface. +pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_fetch = git +pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_commit = master + +PACKAGES += erlasticsearch +pkg_erlasticsearch_name = erlasticsearch +pkg_erlasticsearch_description = Erlang thrift interface to elastic_search +pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_fetch = git +pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_commit = master + +PACKAGES += erlbrake +pkg_erlbrake_name = erlbrake +pkg_erlbrake_description = Erlang Airbrake notification client +pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake +pkg_erlbrake_fetch = git +pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake +pkg_erlbrake_commit = master + +PACKAGES += erlcloud +pkg_erlcloud_name = erlcloud +pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB) +pkg_erlcloud_homepage = https://github.com/gleber/erlcloud +pkg_erlcloud_fetch = git +pkg_erlcloud_repo = https://github.com/gleber/erlcloud +pkg_erlcloud_commit = master + +PACKAGES += erlcron +pkg_erlcron_name = erlcron +pkg_erlcron_description = Erlang cronish system +pkg_erlcron_homepage = https://github.com/erlware/erlcron +pkg_erlcron_fetch = git +pkg_erlcron_repo = https://github.com/erlware/erlcron +pkg_erlcron_commit = master + +PACKAGES += erldb +pkg_erldb_name = erldb +pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang +pkg_erldb_homepage = http://erldb.org +pkg_erldb_fetch = git +pkg_erldb_repo = https://github.com/erldb/erldb +pkg_erldb_commit = master + +PACKAGES += erldis +pkg_erldis_name = erldis +pkg_erldis_description = redis erlang client library +pkg_erldis_homepage = https://github.com/cstar/erldis +pkg_erldis_fetch = git +pkg_erldis_repo = https://github.com/cstar/erldis +pkg_erldis_commit = master + +PACKAGES += erldns +pkg_erldns_name = erldns +pkg_erldns_description = DNS server, in erlang. +pkg_erldns_homepage = https://github.com/aetrion/erl-dns +pkg_erldns_fetch = git +pkg_erldns_repo = https://github.com/aetrion/erl-dns +pkg_erldns_commit = master + +PACKAGES += erldocker +pkg_erldocker_name = erldocker +pkg_erldocker_description = Docker Remote API client for Erlang +pkg_erldocker_homepage = https://github.com/proger/erldocker +pkg_erldocker_fetch = git +pkg_erldocker_repo = https://github.com/proger/erldocker +pkg_erldocker_commit = master + +PACKAGES += erlfsmon +pkg_erlfsmon_name = erlfsmon +pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX +pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon +pkg_erlfsmon_fetch = git +pkg_erlfsmon_repo = https://github.com/proger/erlfsmon +pkg_erlfsmon_commit = master + +PACKAGES += erlgit +pkg_erlgit_name = erlgit +pkg_erlgit_description = Erlang convenience wrapper around git executable +pkg_erlgit_homepage = https://github.com/gleber/erlgit +pkg_erlgit_fetch = git +pkg_erlgit_repo = https://github.com/gleber/erlgit +pkg_erlgit_commit = master + +PACKAGES += erlguten +pkg_erlguten_name = erlguten +pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang. +pkg_erlguten_homepage = https://github.com/richcarl/erlguten +pkg_erlguten_fetch = git +pkg_erlguten_repo = https://github.com/richcarl/erlguten +pkg_erlguten_commit = master + +PACKAGES += erlmc +pkg_erlmc_name = erlmc +pkg_erlmc_description = Erlang memcached binary protocol client +pkg_erlmc_homepage = https://github.com/jkvor/erlmc +pkg_erlmc_fetch = git +pkg_erlmc_repo = https://github.com/jkvor/erlmc +pkg_erlmc_commit = master + +PACKAGES += erlmongo +pkg_erlmongo_name = erlmongo +pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support +pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_fetch = git +pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_commit = master + +PACKAGES += erlog +pkg_erlog_name = erlog +pkg_erlog_description = Prolog interpreter in and for Erlang +pkg_erlog_homepage = https://github.com/rvirding/erlog +pkg_erlog_fetch = git +pkg_erlog_repo = https://github.com/rvirding/erlog +pkg_erlog_commit = master + +PACKAGES += erlpass +pkg_erlpass_name = erlpass +pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever. +pkg_erlpass_homepage = https://github.com/ferd/erlpass +pkg_erlpass_fetch = git +pkg_erlpass_repo = https://github.com/ferd/erlpass +pkg_erlpass_commit = master + +PACKAGES += erlport +pkg_erlport_name = erlport +pkg_erlport_description = ErlPort - connect Erlang to other languages +pkg_erlport_homepage = https://github.com/hdima/erlport +pkg_erlport_fetch = git +pkg_erlport_repo = https://github.com/hdima/erlport +pkg_erlport_commit = master + +PACKAGES += erlsh +pkg_erlsh_name = erlsh +pkg_erlsh_description = Erlang shell tools +pkg_erlsh_homepage = https://github.com/proger/erlsh +pkg_erlsh_fetch = git +pkg_erlsh_repo = https://github.com/proger/erlsh +pkg_erlsh_commit = master + +PACKAGES += erlsha2 +pkg_erlsha2_name = erlsha2 +pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs. +pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2 +pkg_erlsha2_fetch = git +pkg_erlsha2_repo = https://github.com/vinoski/erlsha2 +pkg_erlsha2_commit = master + +PACKAGES += erlsom +pkg_erlsom_name = erlsom +pkg_erlsom_description = XML parser for Erlang +pkg_erlsom_homepage = https://github.com/willemdj/erlsom +pkg_erlsom_fetch = git +pkg_erlsom_repo = https://github.com/willemdj/erlsom +pkg_erlsom_commit = master + +PACKAGES += erlubi +pkg_erlubi_name = erlubi +pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer) +pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi +pkg_erlubi_fetch = git +pkg_erlubi_repo = https://github.com/krestenkrab/erlubi +pkg_erlubi_commit = master + +PACKAGES += erlvolt +pkg_erlvolt_name = erlvolt +pkg_erlvolt_description = VoltDB Erlang Client Driver +pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_fetch = git +pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_commit = master + +PACKAGES += erlware_commons +pkg_erlware_commons_name = erlware_commons +pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components. +pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons +pkg_erlware_commons_fetch = git +pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons +pkg_erlware_commons_commit = master + +PACKAGES += erlydtl +pkg_erlydtl_name = erlydtl +pkg_erlydtl_description = Django Template Language for Erlang. +pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl +pkg_erlydtl_fetch = git +pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl +pkg_erlydtl_commit = master + +PACKAGES += errd +pkg_errd_name = errd +pkg_errd_description = Erlang RRDTool library +pkg_errd_homepage = https://github.com/archaelus/errd +pkg_errd_fetch = git +pkg_errd_repo = https://github.com/archaelus/errd +pkg_errd_commit = master + +PACKAGES += erserve +pkg_erserve_name = erserve +pkg_erserve_description = Erlang/Rserve communication interface +pkg_erserve_homepage = https://github.com/del/erserve +pkg_erserve_fetch = git +pkg_erserve_repo = https://github.com/del/erserve +pkg_erserve_commit = master + +PACKAGES += erwa +pkg_erwa_name = erwa +pkg_erwa_description = A WAMP router and client written in Erlang. +pkg_erwa_homepage = https://github.com/bwegh/erwa +pkg_erwa_fetch = git +pkg_erwa_repo = https://github.com/bwegh/erwa +pkg_erwa_commit = 0.1.1 + +PACKAGES += espec +pkg_espec_name = espec +pkg_espec_description = ESpec: Behaviour driven development framework for Erlang +pkg_espec_homepage = https://github.com/lucaspiller/espec +pkg_espec_fetch = git +pkg_espec_repo = https://github.com/lucaspiller/espec +pkg_espec_commit = master + +PACKAGES += estatsd +pkg_estatsd_name = estatsd +pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite +pkg_estatsd_homepage = https://github.com/RJ/estatsd +pkg_estatsd_fetch = git +pkg_estatsd_repo = https://github.com/RJ/estatsd +pkg_estatsd_commit = master + +PACKAGES += etap +pkg_etap_name = etap +pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output. +pkg_etap_homepage = https://github.com/ngerakines/etap +pkg_etap_fetch = git +pkg_etap_repo = https://github.com/ngerakines/etap +pkg_etap_commit = master + +PACKAGES += etest +pkg_etest_name = etest +pkg_etest_description = A lightweight, convention over configuration test framework for Erlang +pkg_etest_homepage = https://github.com/wooga/etest +pkg_etest_fetch = git +pkg_etest_repo = https://github.com/wooga/etest +pkg_etest_commit = master + +PACKAGES += etest_http +pkg_etest_http_name = etest_http +pkg_etest_http_description = etest Assertions around HTTP (client-side) +pkg_etest_http_homepage = https://github.com/wooga/etest_http +pkg_etest_http_fetch = git +pkg_etest_http_repo = https://github.com/wooga/etest_http +pkg_etest_http_commit = master + +PACKAGES += etoml +pkg_etoml_name = etoml +pkg_etoml_description = TOML language erlang parser +pkg_etoml_homepage = https://github.com/kalta/etoml +pkg_etoml_fetch = git +pkg_etoml_repo = https://github.com/kalta/etoml +pkg_etoml_commit = master + +PACKAGES += eunit +pkg_eunit_name = eunit +pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. +pkg_eunit_homepage = https://github.com/richcarl/eunit +pkg_eunit_fetch = git +pkg_eunit_repo = https://github.com/richcarl/eunit +pkg_eunit_commit = master + +PACKAGES += eunit_formatters +pkg_eunit_formatters_name = eunit_formatters +pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better. +pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_fetch = git +pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_commit = master + +PACKAGES += euthanasia +pkg_euthanasia_name = euthanasia +pkg_euthanasia_description = Merciful killer for your Erlang processes +pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia +pkg_euthanasia_fetch = git +pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia +pkg_euthanasia_commit = master + +PACKAGES += evum +pkg_evum_name = evum +pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM +pkg_evum_homepage = https://github.com/msantos/evum +pkg_evum_fetch = git +pkg_evum_repo = https://github.com/msantos/evum +pkg_evum_commit = master + +PACKAGES += exec +pkg_exec_name = exec +pkg_exec_description = Execute and control OS processes from Erlang/OTP. +pkg_exec_homepage = http://saleyn.github.com/erlexec +pkg_exec_fetch = git +pkg_exec_repo = https://github.com/saleyn/erlexec +pkg_exec_commit = master + +PACKAGES += exml +pkg_exml_name = exml +pkg_exml_description = XML parsing library in Erlang +pkg_exml_homepage = https://github.com/paulgray/exml +pkg_exml_fetch = git +pkg_exml_repo = https://github.com/paulgray/exml +pkg_exml_commit = master + +PACKAGES += exometer +pkg_exometer_name = exometer +pkg_exometer_description = Basic measurement objects and probe behavior +pkg_exometer_homepage = https://github.com/Feuerlabs/exometer +pkg_exometer_fetch = git +pkg_exometer_repo = https://github.com/Feuerlabs/exometer +pkg_exometer_commit = 1.2 + +PACKAGES += exs1024 +pkg_exs1024_name = exs1024 +pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang. +pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024 +pkg_exs1024_fetch = git +pkg_exs1024_repo = https://github.com/jj1bdx/exs1024 +pkg_exs1024_commit = master + +PACKAGES += exs64 +pkg_exs64_name = exs64 +pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang. +pkg_exs64_homepage = https://github.com/jj1bdx/exs64 +pkg_exs64_fetch = git +pkg_exs64_repo = https://github.com/jj1bdx/exs64 +pkg_exs64_commit = master + +PACKAGES += exsplus116 +pkg_exsplus116_name = exsplus116 +pkg_exsplus116_description = Xorshift116plus for Erlang +pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_fetch = git +pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_commit = master + +PACKAGES += exsplus128 +pkg_exsplus128_name = exsplus128 +pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang. +pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_fetch = git +pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_commit = master + +PACKAGES += ezmq +pkg_ezmq_name = ezmq +pkg_ezmq_description = zMQ implemented in Erlang +pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq +pkg_ezmq_fetch = git +pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq +pkg_ezmq_commit = master + +PACKAGES += ezmtp +pkg_ezmtp_name = ezmtp +pkg_ezmtp_description = ZMTP protocol in pure Erlang. +pkg_ezmtp_homepage = https://github.com/a13x/ezmtp +pkg_ezmtp_fetch = git +pkg_ezmtp_repo = https://github.com/a13x/ezmtp +pkg_ezmtp_commit = master + +PACKAGES += fast_disk_log +pkg_fast_disk_log_name = fast_disk_log +pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger +pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_fetch = git +pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_commit = master + +PACKAGES += feeder +pkg_feeder_name = feeder +pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds. +pkg_feeder_homepage = https://github.com/michaelnisi/feeder +pkg_feeder_fetch = git +pkg_feeder_repo = https://github.com/michaelnisi/feeder +pkg_feeder_commit = v1.4.6 + +PACKAGES += fix +pkg_fix_name = fix +pkg_fix_description = http://fixprotocol.org/ implementation. +pkg_fix_homepage = https://github.com/maxlapshin/fix +pkg_fix_fetch = git +pkg_fix_repo = https://github.com/maxlapshin/fix +pkg_fix_commit = master + +PACKAGES += flower +pkg_flower_name = flower +pkg_flower_description = FlowER - a Erlang OpenFlow development platform +pkg_flower_homepage = https://github.com/travelping/flower +pkg_flower_fetch = git +pkg_flower_repo = https://github.com/travelping/flower +pkg_flower_commit = master + +PACKAGES += fn +pkg_fn_name = fn +pkg_fn_description = Function utilities for Erlang +pkg_fn_homepage = https://github.com/reiddraper/fn +pkg_fn_fetch = git +pkg_fn_repo = https://github.com/reiddraper/fn +pkg_fn_commit = master + +PACKAGES += folsom +pkg_folsom_name = folsom +pkg_folsom_description = Expose Erlang Events and Metrics +pkg_folsom_homepage = https://github.com/boundary/folsom +pkg_folsom_fetch = git +pkg_folsom_repo = https://github.com/boundary/folsom +pkg_folsom_commit = master + +PACKAGES += folsom_cowboy +pkg_folsom_cowboy_name = folsom_cowboy +pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper. +pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_fetch = git +pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_commit = master + +PACKAGES += folsomite +pkg_folsomite_name = folsomite +pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics +pkg_folsomite_homepage = https://github.com/campanja/folsomite +pkg_folsomite_fetch = git +pkg_folsomite_repo = https://github.com/campanja/folsomite +pkg_folsomite_commit = master + +PACKAGES += fs +pkg_fs_name = fs +pkg_fs_description = Erlang FileSystem Listener +pkg_fs_homepage = https://github.com/synrc/fs +pkg_fs_fetch = git +pkg_fs_repo = https://github.com/synrc/fs +pkg_fs_commit = master + +PACKAGES += fuse +pkg_fuse_name = fuse +pkg_fuse_description = A Circuit Breaker for Erlang +pkg_fuse_homepage = https://github.com/jlouis/fuse +pkg_fuse_fetch = git +pkg_fuse_repo = https://github.com/jlouis/fuse +pkg_fuse_commit = master + +PACKAGES += gcm +pkg_gcm_name = gcm +pkg_gcm_description = An Erlang application for Google Cloud Messaging +pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang +pkg_gcm_fetch = git +pkg_gcm_repo = https://github.com/pdincau/gcm-erlang +pkg_gcm_commit = master + +PACKAGES += gcprof +pkg_gcprof_name = gcprof +pkg_gcprof_description = Garbage Collection profiler for Erlang +pkg_gcprof_homepage = https://github.com/knutin/gcprof +pkg_gcprof_fetch = git +pkg_gcprof_repo = https://github.com/knutin/gcprof +pkg_gcprof_commit = master + +PACKAGES += geas +pkg_geas_name = geas +pkg_geas_description = Guess Erlang Application Scattering +pkg_geas_homepage = https://github.com/crownedgrouse/geas +pkg_geas_fetch = git +pkg_geas_repo = https://github.com/crownedgrouse/geas +pkg_geas_commit = master + +PACKAGES += geef +pkg_geef_name = geef +pkg_geef_description = Git NEEEEF (Erlang NIF) +pkg_geef_homepage = https://github.com/carlosmn/geef +pkg_geef_fetch = git +pkg_geef_repo = https://github.com/carlosmn/geef +pkg_geef_commit = master + +PACKAGES += gen_cycle +pkg_gen_cycle_name = gen_cycle +pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks +pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_fetch = git +pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_commit = develop + +PACKAGES += gen_icmp +pkg_gen_icmp_name = gen_icmp +pkg_gen_icmp_description = Erlang interface to ICMP sockets +pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp +pkg_gen_icmp_fetch = git +pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp +pkg_gen_icmp_commit = master + +PACKAGES += gen_nb_server +pkg_gen_nb_server_name = gen_nb_server +pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers +pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_fetch = git +pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_commit = master + +PACKAGES += gen_paxos +pkg_gen_paxos_name = gen_paxos +pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol +pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos +pkg_gen_paxos_fetch = git +pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos +pkg_gen_paxos_commit = master + +PACKAGES += gen_smtp +pkg_gen_smtp_name = gen_smtp +pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules +pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_fetch = git +pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_commit = master + +PACKAGES += gen_tracker +pkg_gen_tracker_name = gen_tracker +pkg_gen_tracker_description = supervisor with ets handling of children and their metadata +pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_fetch = git +pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_commit = master + +PACKAGES += gen_unix +pkg_gen_unix_name = gen_unix +pkg_gen_unix_description = Erlang Unix socket interface +pkg_gen_unix_homepage = https://github.com/msantos/gen_unix +pkg_gen_unix_fetch = git +pkg_gen_unix_repo = https://github.com/msantos/gen_unix +pkg_gen_unix_commit = master + +PACKAGES += getopt +pkg_getopt_name = getopt +pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax +pkg_getopt_homepage = https://github.com/jcomellas/getopt +pkg_getopt_fetch = git +pkg_getopt_repo = https://github.com/jcomellas/getopt +pkg_getopt_commit = master + +PACKAGES += gettext +pkg_gettext_name = gettext +pkg_gettext_description = Erlang internationalization library. +pkg_gettext_homepage = https://github.com/etnt/gettext +pkg_gettext_fetch = git +pkg_gettext_repo = https://github.com/etnt/gettext +pkg_gettext_commit = master + +PACKAGES += giallo +pkg_giallo_name = giallo +pkg_giallo_description = Small and flexible web framework on top of Cowboy +pkg_giallo_homepage = https://github.com/kivra/giallo +pkg_giallo_fetch = git +pkg_giallo_repo = https://github.com/kivra/giallo +pkg_giallo_commit = master + +PACKAGES += gin +pkg_gin_name = gin +pkg_gin_description = The guards and for Erlang parse_transform +pkg_gin_homepage = https://github.com/mad-cocktail/gin +pkg_gin_fetch = git +pkg_gin_repo = https://github.com/mad-cocktail/gin +pkg_gin_commit = master + +PACKAGES += gitty +pkg_gitty_name = gitty +pkg_gitty_description = Git access in erlang +pkg_gitty_homepage = https://github.com/maxlapshin/gitty +pkg_gitty_fetch = git +pkg_gitty_repo = https://github.com/maxlapshin/gitty +pkg_gitty_commit = master + +PACKAGES += gold_fever +pkg_gold_fever_name = gold_fever +pkg_gold_fever_description = A Treasure Hunt for Erlangers +pkg_gold_fever_homepage = https://github.com/inaka/gold_fever +pkg_gold_fever_fetch = git +pkg_gold_fever_repo = https://github.com/inaka/gold_fever +pkg_gold_fever_commit = master + +PACKAGES += gossiperl +pkg_gossiperl_name = gossiperl +pkg_gossiperl_description = Gossip middleware in Erlang +pkg_gossiperl_homepage = http://gossiperl.com/ +pkg_gossiperl_fetch = git +pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl +pkg_gossiperl_commit = master + +PACKAGES += gpb +pkg_gpb_name = gpb +pkg_gpb_description = A Google Protobuf implementation for Erlang +pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_fetch = git +pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_commit = master + +PACKAGES += gproc +pkg_gproc_name = gproc +pkg_gproc_description = Extended process registry for Erlang +pkg_gproc_homepage = https://github.com/uwiger/gproc +pkg_gproc_fetch = git +pkg_gproc_repo = https://github.com/uwiger/gproc +pkg_gproc_commit = master + +PACKAGES += grapherl +pkg_grapherl_name = grapherl +pkg_grapherl_description = Create graphs of Erlang systems and programs +pkg_grapherl_homepage = https://github.com/eproxus/grapherl +pkg_grapherl_fetch = git +pkg_grapherl_repo = https://github.com/eproxus/grapherl +pkg_grapherl_commit = master + +PACKAGES += gun +pkg_gun_name = gun +pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang. +pkg_gun_homepage = http//ninenines.eu +pkg_gun_fetch = git +pkg_gun_repo = https://github.com/ninenines/gun +pkg_gun_commit = master + +PACKAGES += gut +pkg_gut_name = gut +pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman +pkg_gut_homepage = https://github.com/unbalancedparentheses/gut +pkg_gut_fetch = git +pkg_gut_repo = https://github.com/unbalancedparentheses/gut +pkg_gut_commit = master + +PACKAGES += hackney +pkg_hackney_name = hackney +pkg_hackney_description = simple HTTP client in Erlang +pkg_hackney_homepage = https://github.com/benoitc/hackney +pkg_hackney_fetch = git +pkg_hackney_repo = https://github.com/benoitc/hackney +pkg_hackney_commit = master + +PACKAGES += hamcrest +pkg_hamcrest_name = hamcrest +pkg_hamcrest_description = Erlang port of Hamcrest +pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_fetch = git +pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_commit = master + +PACKAGES += hanoidb +pkg_hanoidb_name = hanoidb +pkg_hanoidb_description = Erlang LSM BTree Storage +pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_fetch = git +pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_commit = master + +PACKAGES += hottub +pkg_hottub_name = hottub +pkg_hottub_description = Permanent Erlang Worker Pool +pkg_hottub_homepage = https://github.com/bfrog/hottub +pkg_hottub_fetch = git +pkg_hottub_repo = https://github.com/bfrog/hottub +pkg_hottub_commit = master + +PACKAGES += hpack +pkg_hpack_name = hpack +pkg_hpack_description = HPACK Implementation for Erlang +pkg_hpack_homepage = https://github.com/joedevivo/hpack +pkg_hpack_fetch = git +pkg_hpack_repo = https://github.com/joedevivo/hpack +pkg_hpack_commit = master + +PACKAGES += hyper +pkg_hyper_name = hyper +pkg_hyper_description = Erlang implementation of HyperLogLog +pkg_hyper_homepage = https://github.com/GameAnalytics/hyper +pkg_hyper_fetch = git +pkg_hyper_repo = https://github.com/GameAnalytics/hyper +pkg_hyper_commit = master + +PACKAGES += ibrowse +pkg_ibrowse_name = ibrowse +pkg_ibrowse_description = Erlang HTTP client +pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_fetch = git +pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_commit = v4.1.1 + +PACKAGES += ierlang +pkg_ierlang_name = ierlang +pkg_ierlang_description = An Erlang language kernel for IPython. +pkg_ierlang_homepage = https://github.com/robbielynch/ierlang +pkg_ierlang_fetch = git +pkg_ierlang_repo = https://github.com/robbielynch/ierlang +pkg_ierlang_commit = master + +PACKAGES += iota +pkg_iota_name = iota +pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code +pkg_iota_homepage = https://github.com/jpgneves/iota +pkg_iota_fetch = git +pkg_iota_repo = https://github.com/jpgneves/iota +pkg_iota_commit = master + +PACKAGES += irc_lib +pkg_irc_lib_name = irc_lib +pkg_irc_lib_description = Erlang irc client library +pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_fetch = git +pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_commit = master + +PACKAGES += ircd +pkg_ircd_name = ircd +pkg_ircd_description = A pluggable IRC daemon application/library for Erlang. +pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd +pkg_ircd_fetch = git +pkg_ircd_repo = https://github.com/tonyg/erlang-ircd +pkg_ircd_commit = master + +PACKAGES += iris +pkg_iris_name = iris +pkg_iris_description = Iris Erlang binding +pkg_iris_homepage = https://github.com/project-iris/iris-erl +pkg_iris_fetch = git +pkg_iris_repo = https://github.com/project-iris/iris-erl +pkg_iris_commit = master + +PACKAGES += iso8601 +pkg_iso8601_name = iso8601 +pkg_iso8601_description = Erlang ISO 8601 date formatter/parser +pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_fetch = git +pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_commit = master + +PACKAGES += jamdb_sybase +pkg_jamdb_sybase_name = jamdb_sybase +pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE +pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_fetch = git +pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_commit = 0.6.0 + +PACKAGES += jerg +pkg_jerg_name = jerg +pkg_jerg_description = JSON Schema to Erlang Records Generator +pkg_jerg_homepage = https://github.com/ddossot/jerg +pkg_jerg_fetch = git +pkg_jerg_repo = https://github.com/ddossot/jerg +pkg_jerg_commit = master + +PACKAGES += jesse +pkg_jesse_name = jesse +pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang. +pkg_jesse_homepage = https://github.com/klarna/jesse +pkg_jesse_fetch = git +pkg_jesse_repo = https://github.com/klarna/jesse +pkg_jesse_commit = master + +PACKAGES += jiffy +pkg_jiffy_name = jiffy +pkg_jiffy_description = JSON NIFs for Erlang. +pkg_jiffy_homepage = https://github.com/davisp/jiffy +pkg_jiffy_fetch = git +pkg_jiffy_repo = https://github.com/davisp/jiffy +pkg_jiffy_commit = master + +PACKAGES += jiffy_v +pkg_jiffy_v_name = jiffy_v +pkg_jiffy_v_description = JSON validation utility +pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_fetch = git +pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_commit = 0.3.3 + +PACKAGES += jobs +pkg_jobs_name = jobs +pkg_jobs_description = a Job scheduler for load regulation +pkg_jobs_homepage = https://github.com/esl/jobs +pkg_jobs_fetch = git +pkg_jobs_repo = https://github.com/esl/jobs +pkg_jobs_commit = 0.3 + +PACKAGES += joxa +pkg_joxa_name = joxa +pkg_joxa_description = A Modern Lisp for the Erlang VM +pkg_joxa_homepage = https://github.com/joxa/joxa +pkg_joxa_fetch = git +pkg_joxa_repo = https://github.com/joxa/joxa +pkg_joxa_commit = master + +PACKAGES += json +pkg_json_name = json +pkg_json_description = a high level json library for erlang (17.0+) +pkg_json_homepage = https://github.com/talentdeficit/json +pkg_json_fetch = git +pkg_json_repo = https://github.com/talentdeficit/json +pkg_json_commit = master + +PACKAGES += json_rec +pkg_json_rec_name = json_rec +pkg_json_rec_description = JSON to erlang record +pkg_json_rec_homepage = https://github.com/justinkirby/json_rec +pkg_json_rec_fetch = git +pkg_json_rec_repo = https://github.com/justinkirby/json_rec +pkg_json_rec_commit = master + +PACKAGES += jsonerl +pkg_jsonerl_name = jsonerl +pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder +pkg_jsonerl_homepage = https://github.com/lambder/jsonerl +pkg_jsonerl_fetch = git +pkg_jsonerl_repo = https://github.com/lambder/jsonerl +pkg_jsonerl_commit = master + +PACKAGES += jsonpath +pkg_jsonpath_name = jsonpath +pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation +pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_fetch = git +pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_commit = master + +PACKAGES += jsonx +pkg_jsonx_name = jsonx +pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C. +pkg_jsonx_homepage = https://github.com/iskra/jsonx +pkg_jsonx_fetch = git +pkg_jsonx_repo = https://github.com/iskra/jsonx +pkg_jsonx_commit = master + +PACKAGES += jsx +pkg_jsx_name = jsx +pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON. +pkg_jsx_homepage = https://github.com/talentdeficit/jsx +pkg_jsx_fetch = git +pkg_jsx_repo = https://github.com/talentdeficit/jsx +pkg_jsx_commit = master + +PACKAGES += kafka +pkg_kafka_name = kafka +pkg_kafka_description = Kafka consumer and producer in Erlang +pkg_kafka_homepage = https://github.com/wooga/kafka-erlang +pkg_kafka_fetch = git +pkg_kafka_repo = https://github.com/wooga/kafka-erlang +pkg_kafka_commit = master + +PACKAGES += kai +pkg_kai_name = kai +pkg_kai_description = DHT storage by Takeshi Inoue +pkg_kai_homepage = https://github.com/synrc/kai +pkg_kai_fetch = git +pkg_kai_repo = https://github.com/synrc/kai +pkg_kai_commit = master + +PACKAGES += katja +pkg_katja_name = katja +pkg_katja_description = A simple Riemann client written in Erlang. +pkg_katja_homepage = https://github.com/nifoc/katja +pkg_katja_fetch = git +pkg_katja_repo = https://github.com/nifoc/katja +pkg_katja_commit = master + +PACKAGES += kdht +pkg_kdht_name = kdht +pkg_kdht_description = kdht is an erlang DHT implementation +pkg_kdht_homepage = https://github.com/kevinlynx/kdht +pkg_kdht_fetch = git +pkg_kdht_repo = https://github.com/kevinlynx/kdht +pkg_kdht_commit = master + +PACKAGES += key2value +pkg_key2value_name = key2value +pkg_key2value_description = Erlang 2-way map +pkg_key2value_homepage = https://github.com/okeuday/key2value +pkg_key2value_fetch = git +pkg_key2value_repo = https://github.com/okeuday/key2value +pkg_key2value_commit = master + +PACKAGES += keys1value +pkg_keys1value_name = keys1value +pkg_keys1value_description = Erlang set associative map for key lists +pkg_keys1value_homepage = https://github.com/okeuday/keys1value +pkg_keys1value_fetch = git +pkg_keys1value_repo = https://github.com/okeuday/keys1value +pkg_keys1value_commit = master + +PACKAGES += kinetic +pkg_kinetic_name = kinetic +pkg_kinetic_description = Erlang Kinesis Client +pkg_kinetic_homepage = https://github.com/AdRoll/kinetic +pkg_kinetic_fetch = git +pkg_kinetic_repo = https://github.com/AdRoll/kinetic +pkg_kinetic_commit = master + +PACKAGES += kjell +pkg_kjell_name = kjell +pkg_kjell_description = Erlang Shell +pkg_kjell_homepage = https://github.com/karlll/kjell +pkg_kjell_fetch = git +pkg_kjell_repo = https://github.com/karlll/kjell +pkg_kjell_commit = master + +PACKAGES += kraken +pkg_kraken_name = kraken +pkg_kraken_description = Distributed Pubsub Server for Realtime Apps +pkg_kraken_homepage = https://github.com/Asana/kraken +pkg_kraken_fetch = git +pkg_kraken_repo = https://github.com/Asana/kraken +pkg_kraken_commit = master + +PACKAGES += kucumberl +pkg_kucumberl_name = kucumberl +pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber +pkg_kucumberl_homepage = https://github.com/openshine/kucumberl +pkg_kucumberl_fetch = git +pkg_kucumberl_repo = https://github.com/openshine/kucumberl +pkg_kucumberl_commit = master + +PACKAGES += kvc +pkg_kvc_name = kvc +pkg_kvc_description = KVC - Key Value Coding for Erlang data structures +pkg_kvc_homepage = https://github.com/etrepum/kvc +pkg_kvc_fetch = git +pkg_kvc_repo = https://github.com/etrepum/kvc +pkg_kvc_commit = master + +PACKAGES += kvlists +pkg_kvlists_name = kvlists +pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang +pkg_kvlists_homepage = https://github.com/jcomellas/kvlists +pkg_kvlists_fetch = git +pkg_kvlists_repo = https://github.com/jcomellas/kvlists +pkg_kvlists_commit = master + +PACKAGES += kvs +pkg_kvs_name = kvs +pkg_kvs_description = Container and Iterator +pkg_kvs_homepage = https://github.com/synrc/kvs +pkg_kvs_fetch = git +pkg_kvs_repo = https://github.com/synrc/kvs +pkg_kvs_commit = master + +PACKAGES += lager +pkg_lager_name = lager +pkg_lager_description = A logging framework for Erlang/OTP. +pkg_lager_homepage = https://github.com/basho/lager +pkg_lager_fetch = git +pkg_lager_repo = https://github.com/basho/lager +pkg_lager_commit = master + +PACKAGES += lager_amqp_backend +pkg_lager_amqp_backend_name = lager_amqp_backend +pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend +pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_fetch = git +pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_commit = master + +PACKAGES += lager_syslog +pkg_lager_syslog_name = lager_syslog +pkg_lager_syslog_description = Syslog backend for lager +pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog +pkg_lager_syslog_fetch = git +pkg_lager_syslog_repo = https://github.com/basho/lager_syslog +pkg_lager_syslog_commit = master + +PACKAGES += lambdapad +pkg_lambdapad_name = lambdapad +pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang. +pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad +pkg_lambdapad_fetch = git +pkg_lambdapad_repo = https://github.com/gar1t/lambdapad +pkg_lambdapad_commit = master + +PACKAGES += lasp +pkg_lasp_name = lasp +pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations +pkg_lasp_homepage = http://lasp-lang.org/ +pkg_lasp_fetch = git +pkg_lasp_repo = https://github.com/lasp-lang/lasp +pkg_lasp_commit = master + +PACKAGES += lasse +pkg_lasse_name = lasse +pkg_lasse_description = SSE handler for Cowboy +pkg_lasse_homepage = https://github.com/inaka/lasse +pkg_lasse_fetch = git +pkg_lasse_repo = https://github.com/inaka/lasse +pkg_lasse_commit = 0.1.0 + +PACKAGES += ldap +pkg_ldap_name = ldap +pkg_ldap_description = LDAP server written in Erlang +pkg_ldap_homepage = https://github.com/spawnproc/ldap +pkg_ldap_fetch = git +pkg_ldap_repo = https://github.com/spawnproc/ldap +pkg_ldap_commit = master + +PACKAGES += lethink +pkg_lethink_name = lethink +pkg_lethink_description = erlang driver for rethinkdb +pkg_lethink_homepage = https://github.com/taybin/lethink +pkg_lethink_fetch = git +pkg_lethink_repo = https://github.com/taybin/lethink +pkg_lethink_commit = master + +PACKAGES += lfe +pkg_lfe_name = lfe +pkg_lfe_description = Lisp Flavoured Erlang (LFE) +pkg_lfe_homepage = https://github.com/rvirding/lfe +pkg_lfe_fetch = git +pkg_lfe_repo = https://github.com/rvirding/lfe +pkg_lfe_commit = master + +PACKAGES += ling +pkg_ling_name = ling +pkg_ling_description = Erlang on Xen +pkg_ling_homepage = https://github.com/cloudozer/ling +pkg_ling_fetch = git +pkg_ling_repo = https://github.com/cloudozer/ling +pkg_ling_commit = master + +PACKAGES += live +pkg_live_name = live +pkg_live_description = Automated module and configuration reloader. +pkg_live_homepage = http://ninenines.eu +pkg_live_fetch = git +pkg_live_repo = https://github.com/ninenines/live +pkg_live_commit = master + +PACKAGES += lmq +pkg_lmq_name = lmq +pkg_lmq_description = Lightweight Message Queue +pkg_lmq_homepage = https://github.com/iij/lmq +pkg_lmq_fetch = git +pkg_lmq_repo = https://github.com/iij/lmq +pkg_lmq_commit = master + +PACKAGES += locker +pkg_locker_name = locker +pkg_locker_description = Atomic distributed 'check and set' for short-lived keys +pkg_locker_homepage = https://github.com/wooga/locker +pkg_locker_fetch = git +pkg_locker_repo = https://github.com/wooga/locker +pkg_locker_commit = master + +PACKAGES += locks +pkg_locks_name = locks +pkg_locks_description = A scalable, deadlock-resolving resource locker +pkg_locks_homepage = https://github.com/uwiger/locks +pkg_locks_fetch = git +pkg_locks_repo = https://github.com/uwiger/locks +pkg_locks_commit = master + +PACKAGES += log4erl +pkg_log4erl_name = log4erl +pkg_log4erl_description = A logger for erlang in the spirit of Log4J. +pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl +pkg_log4erl_fetch = git +pkg_log4erl_repo = https://github.com/ahmednawras/log4erl +pkg_log4erl_commit = master + +PACKAGES += lol +pkg_lol_name = lol +pkg_lol_description = Lisp on erLang, and programming is fun again +pkg_lol_homepage = https://github.com/b0oh/lol +pkg_lol_fetch = git +pkg_lol_repo = https://github.com/b0oh/lol +pkg_lol_commit = master + +PACKAGES += lucid +pkg_lucid_name = lucid +pkg_lucid_description = HTTP/2 server written in Erlang +pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid +pkg_lucid_fetch = git +pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid +pkg_lucid_commit = master + +PACKAGES += luerl +pkg_luerl_name = luerl +pkg_luerl_description = Lua in Erlang +pkg_luerl_homepage = https://github.com/rvirding/luerl +pkg_luerl_fetch = git +pkg_luerl_repo = https://github.com/rvirding/luerl +pkg_luerl_commit = develop + +PACKAGES += luwak +pkg_luwak_name = luwak +pkg_luwak_description = Large-object storage interface for Riak +pkg_luwak_homepage = https://github.com/basho/luwak +pkg_luwak_fetch = git +pkg_luwak_repo = https://github.com/basho/luwak +pkg_luwak_commit = master + +PACKAGES += lux +pkg_lux_name = lux +pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands +pkg_lux_homepage = https://github.com/hawk/lux +pkg_lux_fetch = git +pkg_lux_repo = https://github.com/hawk/lux +pkg_lux_commit = master + +PACKAGES += machi +pkg_machi_name = machi +pkg_machi_description = Machi file store +pkg_machi_homepage = https://github.com/basho/machi +pkg_machi_fetch = git +pkg_machi_repo = https://github.com/basho/machi +pkg_machi_commit = master + +PACKAGES += mad +pkg_mad_name = mad +pkg_mad_description = Small and Fast Rebar Replacement +pkg_mad_homepage = https://github.com/synrc/mad +pkg_mad_fetch = git +pkg_mad_repo = https://github.com/synrc/mad +pkg_mad_commit = master + +PACKAGES += marina +pkg_marina_name = marina +pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client +pkg_marina_homepage = https://github.com/lpgauth/marina +pkg_marina_fetch = git +pkg_marina_repo = https://github.com/lpgauth/marina +pkg_marina_commit = master + +PACKAGES += mavg +pkg_mavg_name = mavg +pkg_mavg_description = Erlang :: Exponential moving average library +pkg_mavg_homepage = https://github.com/EchoTeam/mavg +pkg_mavg_fetch = git +pkg_mavg_repo = https://github.com/EchoTeam/mavg +pkg_mavg_commit = master + +PACKAGES += mc_erl +pkg_mc_erl_name = mc_erl +pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. +pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl +pkg_mc_erl_fetch = git +pkg_mc_erl_repo = https://github.com/clonejo/mc-erl +pkg_mc_erl_commit = master + +PACKAGES += mcd +pkg_mcd_name = mcd +pkg_mcd_description = Fast memcached protocol client in pure Erlang +pkg_mcd_homepage = https://github.com/EchoTeam/mcd +pkg_mcd_fetch = git +pkg_mcd_repo = https://github.com/EchoTeam/mcd +pkg_mcd_commit = master + +PACKAGES += mcerlang +pkg_mcerlang_name = mcerlang +pkg_mcerlang_description = The McErlang model checker for Erlang +pkg_mcerlang_homepage = https://github.com/fredlund/McErlang +pkg_mcerlang_fetch = git +pkg_mcerlang_repo = https://github.com/fredlund/McErlang +pkg_mcerlang_commit = master + +PACKAGES += meck +pkg_meck_name = meck +pkg_meck_description = A mocking library for Erlang +pkg_meck_homepage = https://github.com/eproxus/meck +pkg_meck_fetch = git +pkg_meck_repo = https://github.com/eproxus/meck +pkg_meck_commit = master + +PACKAGES += mekao +pkg_mekao_name = mekao +pkg_mekao_description = SQL constructor +pkg_mekao_homepage = https://github.com/ddosia/mekao +pkg_mekao_fetch = git +pkg_mekao_repo = https://github.com/ddosia/mekao +pkg_mekao_commit = master + +PACKAGES += memo +pkg_memo_name = memo +pkg_memo_description = Erlang memoization server +pkg_memo_homepage = https://github.com/tuncer/memo +pkg_memo_fetch = git +pkg_memo_repo = https://github.com/tuncer/memo +pkg_memo_commit = master + +PACKAGES += merge_index +pkg_merge_index_name = merge_index +pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop). +pkg_merge_index_homepage = https://github.com/basho/merge_index +pkg_merge_index_fetch = git +pkg_merge_index_repo = https://github.com/basho/merge_index +pkg_merge_index_commit = master + +PACKAGES += merl +pkg_merl_name = merl +pkg_merl_description = Metaprogramming in Erlang +pkg_merl_homepage = https://github.com/richcarl/merl +pkg_merl_fetch = git +pkg_merl_repo = https://github.com/richcarl/merl +pkg_merl_commit = master + +PACKAGES += mimetypes +pkg_mimetypes_name = mimetypes +pkg_mimetypes_description = Erlang MIME types library +pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes +pkg_mimetypes_fetch = git +pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes +pkg_mimetypes_commit = master + +PACKAGES += mixer +pkg_mixer_name = mixer +pkg_mixer_description = Mix in functions from other modules +pkg_mixer_homepage = https://github.com/chef/mixer +pkg_mixer_fetch = git +pkg_mixer_repo = https://github.com/chef/mixer +pkg_mixer_commit = master + +PACKAGES += mochiweb +pkg_mochiweb_name = mochiweb +pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers. +pkg_mochiweb_homepage = https://github.com/mochi/mochiweb +pkg_mochiweb_fetch = git +pkg_mochiweb_repo = https://github.com/mochi/mochiweb +pkg_mochiweb_commit = master + +PACKAGES += mochiweb_xpath +pkg_mochiweb_xpath_name = mochiweb_xpath +pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser +pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_fetch = git +pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_commit = master + +PACKAGES += mockgyver +pkg_mockgyver_name = mockgyver +pkg_mockgyver_description = A mocking library for Erlang +pkg_mockgyver_homepage = https://github.com/klajo/mockgyver +pkg_mockgyver_fetch = git +pkg_mockgyver_repo = https://github.com/klajo/mockgyver +pkg_mockgyver_commit = master + +PACKAGES += modlib +pkg_modlib_name = modlib +pkg_modlib_description = Web framework based on Erlang's inets httpd +pkg_modlib_homepage = https://github.com/gar1t/modlib +pkg_modlib_fetch = git +pkg_modlib_repo = https://github.com/gar1t/modlib +pkg_modlib_commit = master + +PACKAGES += mongodb +pkg_mongodb_name = mongodb +pkg_mongodb_description = MongoDB driver for Erlang +pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_fetch = git +pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_commit = master + +PACKAGES += mongooseim +pkg_mongooseim_name = mongooseim +pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions +pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform +pkg_mongooseim_fetch = git +pkg_mongooseim_repo = https://github.com/esl/MongooseIM +pkg_mongooseim_commit = master + +PACKAGES += moyo +pkg_moyo_name = moyo +pkg_moyo_description = Erlang utility functions library +pkg_moyo_homepage = https://github.com/dwango/moyo +pkg_moyo_fetch = git +pkg_moyo_repo = https://github.com/dwango/moyo +pkg_moyo_commit = master + +PACKAGES += msgpack +pkg_msgpack_name = msgpack +pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang +pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_fetch = git +pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_commit = master + +PACKAGES += mu2 +pkg_mu2_name = mu2 +pkg_mu2_description = Erlang mutation testing tool +pkg_mu2_homepage = https://github.com/ramsay-t/mu2 +pkg_mu2_fetch = git +pkg_mu2_repo = https://github.com/ramsay-t/mu2 +pkg_mu2_commit = master + +PACKAGES += mustache +pkg_mustache_name = mustache +pkg_mustache_description = Mustache template engine for Erlang. +pkg_mustache_homepage = https://github.com/mojombo/mustache.erl +pkg_mustache_fetch = git +pkg_mustache_repo = https://github.com/mojombo/mustache.erl +pkg_mustache_commit = master + +PACKAGES += myproto +pkg_myproto_name = myproto +pkg_myproto_description = MySQL Server Protocol in Erlang +pkg_myproto_homepage = https://github.com/altenwald/myproto +pkg_myproto_fetch = git +pkg_myproto_repo = https://github.com/altenwald/myproto +pkg_myproto_commit = master + +PACKAGES += mysql +pkg_mysql_name = mysql +pkg_mysql_description = Erlang MySQL Driver (from code.google.com) +pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_fetch = git +pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_commit = master + +PACKAGES += n2o +pkg_n2o_name = n2o +pkg_n2o_description = WebSocket Application Server +pkg_n2o_homepage = https://github.com/5HT/n2o +pkg_n2o_fetch = git +pkg_n2o_repo = https://github.com/5HT/n2o +pkg_n2o_commit = master + +PACKAGES += nat_upnp +pkg_nat_upnp_name = nat_upnp +pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD +pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_fetch = git +pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_commit = master + +PACKAGES += neo4j +pkg_neo4j_name = neo4j +pkg_neo4j_description = Erlang client library for Neo4J. +pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_fetch = git +pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_commit = master + +PACKAGES += neotoma +pkg_neotoma_name = neotoma +pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars. +pkg_neotoma_homepage = https://github.com/seancribbs/neotoma +pkg_neotoma_fetch = git +pkg_neotoma_repo = https://github.com/seancribbs/neotoma +pkg_neotoma_commit = master + +PACKAGES += newrelic +pkg_newrelic_name = newrelic +pkg_newrelic_description = Erlang library for sending metrics to New Relic +pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang +pkg_newrelic_fetch = git +pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang +pkg_newrelic_commit = master + +PACKAGES += nifty +pkg_nifty_name = nifty +pkg_nifty_description = Erlang NIF wrapper generator +pkg_nifty_homepage = https://github.com/parapluu/nifty +pkg_nifty_fetch = git +pkg_nifty_repo = https://github.com/parapluu/nifty +pkg_nifty_commit = master + +PACKAGES += nitrogen_core +pkg_nitrogen_core_name = nitrogen_core +pkg_nitrogen_core_description = The core Nitrogen library. +pkg_nitrogen_core_homepage = http://nitrogenproject.com/ +pkg_nitrogen_core_fetch = git +pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core +pkg_nitrogen_core_commit = master + +PACKAGES += nkbase +pkg_nkbase_name = nkbase +pkg_nkbase_description = NkBASE distributed database +pkg_nkbase_homepage = https://github.com/Nekso/nkbase +pkg_nkbase_fetch = git +pkg_nkbase_repo = https://github.com/Nekso/nkbase +pkg_nkbase_commit = develop + +PACKAGES += nkdocker +pkg_nkdocker_name = nkdocker +pkg_nkdocker_description = Erlang Docker client +pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker +pkg_nkdocker_fetch = git +pkg_nkdocker_repo = https://github.com/Nekso/nkdocker +pkg_nkdocker_commit = master + +PACKAGES += nkpacket +pkg_nkpacket_name = nkpacket +pkg_nkpacket_description = Generic Erlang transport layer +pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket +pkg_nkpacket_fetch = git +pkg_nkpacket_repo = https://github.com/Nekso/nkpacket +pkg_nkpacket_commit = master + +PACKAGES += nksip +pkg_nksip_name = nksip +pkg_nksip_description = Erlang SIP application server +pkg_nksip_homepage = https://github.com/kalta/nksip +pkg_nksip_fetch = git +pkg_nksip_repo = https://github.com/kalta/nksip +pkg_nksip_commit = master + +PACKAGES += nodefinder +pkg_nodefinder_name = nodefinder +pkg_nodefinder_description = automatic node discovery via UDP multicast +pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder +pkg_nodefinder_fetch = git +pkg_nodefinder_repo = https://github.com/okeuday/nodefinder +pkg_nodefinder_commit = master + +PACKAGES += nprocreg +pkg_nprocreg_name = nprocreg +pkg_nprocreg_description = Minimal Distributed Erlang Process Registry +pkg_nprocreg_homepage = http://nitrogenproject.com/ +pkg_nprocreg_fetch = git +pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg +pkg_nprocreg_commit = master + +PACKAGES += oauth +pkg_oauth_name = oauth +pkg_oauth_description = An Erlang OAuth 1.0 implementation +pkg_oauth_homepage = https://github.com/tim/erlang-oauth +pkg_oauth_fetch = git +pkg_oauth_repo = https://github.com/tim/erlang-oauth +pkg_oauth_commit = master + +PACKAGES += oauth2 +pkg_oauth2_name = oauth2 +pkg_oauth2_description = Erlang Oauth2 implementation +pkg_oauth2_homepage = https://github.com/kivra/oauth2 +pkg_oauth2_fetch = git +pkg_oauth2_repo = https://github.com/kivra/oauth2 +pkg_oauth2_commit = master + +PACKAGES += oauth2c +pkg_oauth2c_name = oauth2c +pkg_oauth2c_description = Erlang OAuth2 Client +pkg_oauth2c_homepage = https://github.com/kivra/oauth2_client +pkg_oauth2c_fetch = git +pkg_oauth2c_repo = https://github.com/kivra/oauth2_client +pkg_oauth2c_commit = master + +PACKAGES += octopus +pkg_octopus_name = octopus +pkg_octopus_description = Small and flexible pool manager written in Erlang +pkg_octopus_homepage = https://github.com/erlangbureau/octopus +pkg_octopus_fetch = git +pkg_octopus_repo = https://github.com/erlangbureau/octopus +pkg_octopus_commit = 1.0.0 + +PACKAGES += of_protocol +pkg_of_protocol_name = of_protocol +pkg_of_protocol_description = OpenFlow Protocol Library for Erlang +pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_fetch = git +pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_commit = master + +PACKAGES += opencouch +pkg_opencouch_name = couch +pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB +pkg_opencouch_homepage = https://github.com/benoitc/opencouch +pkg_opencouch_fetch = git +pkg_opencouch_repo = https://github.com/benoitc/opencouch +pkg_opencouch_commit = master + +PACKAGES += openflow +pkg_openflow_name = openflow +pkg_openflow_description = An OpenFlow controller written in pure erlang +pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_fetch = git +pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_commit = master + +PACKAGES += openid +pkg_openid_name = openid +pkg_openid_description = Erlang OpenID +pkg_openid_homepage = https://github.com/brendonh/erl_openid +pkg_openid_fetch = git +pkg_openid_repo = https://github.com/brendonh/erl_openid +pkg_openid_commit = master + +PACKAGES += openpoker +pkg_openpoker_name = openpoker +pkg_openpoker_description = Genesis Texas hold'em Game Server +pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker +pkg_openpoker_fetch = git +pkg_openpoker_repo = https://github.com/hpyhacking/openpoker +pkg_openpoker_commit = master + +PACKAGES += pal +pkg_pal_name = pal +pkg_pal_description = Pragmatic Authentication Library +pkg_pal_homepage = https://github.com/manifest/pal +pkg_pal_fetch = git +pkg_pal_repo = https://github.com/manifest/pal +pkg_pal_commit = master + +PACKAGES += parse_trans +pkg_parse_trans_name = parse_trans +pkg_parse_trans_description = Parse transform utilities for Erlang +pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans +pkg_parse_trans_fetch = git +pkg_parse_trans_repo = https://github.com/uwiger/parse_trans +pkg_parse_trans_commit = master + +PACKAGES += parsexml +pkg_parsexml_name = parsexml +pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API +pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml +pkg_parsexml_fetch = git +pkg_parsexml_repo = https://github.com/maxlapshin/parsexml +pkg_parsexml_commit = master + +PACKAGES += pegjs +pkg_pegjs_name = pegjs +pkg_pegjs_description = An implementation of PEG.js grammar for Erlang. +pkg_pegjs_homepage = https://github.com/dmitriid/pegjs +pkg_pegjs_fetch = git +pkg_pegjs_repo = https://github.com/dmitriid/pegjs +pkg_pegjs_commit = 0.3 + +PACKAGES += percept2 +pkg_percept2_name = percept2 +pkg_percept2_description = Concurrent profiling tool for Erlang +pkg_percept2_homepage = https://github.com/huiqing/percept2 +pkg_percept2_fetch = git +pkg_percept2_repo = https://github.com/huiqing/percept2 +pkg_percept2_commit = master + +PACKAGES += pgsql +pkg_pgsql_name = pgsql +pkg_pgsql_description = Erlang PostgreSQL driver +pkg_pgsql_homepage = https://github.com/semiocast/pgsql +pkg_pgsql_fetch = git +pkg_pgsql_repo = https://github.com/semiocast/pgsql +pkg_pgsql_commit = master + +PACKAGES += pkgx +pkg_pkgx_name = pkgx +pkg_pkgx_description = Build .deb packages from Erlang releases +pkg_pkgx_homepage = https://github.com/arjan/pkgx +pkg_pkgx_fetch = git +pkg_pkgx_repo = https://github.com/arjan/pkgx +pkg_pkgx_commit = master + +PACKAGES += pkt +pkg_pkt_name = pkt +pkg_pkt_description = Erlang network protocol library +pkg_pkt_homepage = https://github.com/msantos/pkt +pkg_pkt_fetch = git +pkg_pkt_repo = https://github.com/msantos/pkt +pkg_pkt_commit = master + +PACKAGES += plain_fsm +pkg_plain_fsm_name = plain_fsm +pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs. +pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_fetch = git +pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_commit = master + +PACKAGES += plumtree +pkg_plumtree_name = plumtree +pkg_plumtree_description = Epidemic Broadcast Trees +pkg_plumtree_homepage = https://github.com/helium/plumtree +pkg_plumtree_fetch = git +pkg_plumtree_repo = https://github.com/helium/plumtree +pkg_plumtree_commit = master + +PACKAGES += pmod_transform +pkg_pmod_transform_name = pmod_transform +pkg_pmod_transform_description = Parse transform for parameterized modules +pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform +pkg_pmod_transform_fetch = git +pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform +pkg_pmod_transform_commit = master + +PACKAGES += pobox +pkg_pobox_name = pobox +pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang +pkg_pobox_homepage = https://github.com/ferd/pobox +pkg_pobox_fetch = git +pkg_pobox_repo = https://github.com/ferd/pobox +pkg_pobox_commit = master + +PACKAGES += ponos +pkg_ponos_name = ponos +pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang +pkg_ponos_homepage = https://github.com/klarna/ponos +pkg_ponos_fetch = git +pkg_ponos_repo = https://github.com/klarna/ponos +pkg_ponos_commit = master + +PACKAGES += poolboy +pkg_poolboy_name = poolboy +pkg_poolboy_description = A hunky Erlang worker pool factory +pkg_poolboy_homepage = https://github.com/devinus/poolboy +pkg_poolboy_fetch = git +pkg_poolboy_repo = https://github.com/devinus/poolboy +pkg_poolboy_commit = master + +PACKAGES += pooler +pkg_pooler_name = pooler +pkg_pooler_description = An OTP Process Pool Application +pkg_pooler_homepage = https://github.com/seth/pooler +pkg_pooler_fetch = git +pkg_pooler_repo = https://github.com/seth/pooler +pkg_pooler_commit = master + +PACKAGES += pqueue +pkg_pqueue_name = pqueue +pkg_pqueue_description = Erlang Priority Queues +pkg_pqueue_homepage = https://github.com/okeuday/pqueue +pkg_pqueue_fetch = git +pkg_pqueue_repo = https://github.com/okeuday/pqueue +pkg_pqueue_commit = master + +PACKAGES += procket +pkg_procket_name = procket +pkg_procket_description = Erlang interface to low level socket operations +pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket +pkg_procket_fetch = git +pkg_procket_repo = https://github.com/msantos/procket +pkg_procket_commit = master + +PACKAGES += prop +pkg_prop_name = prop +pkg_prop_description = An Erlang code scaffolding and generator system. +pkg_prop_homepage = https://github.com/nuex/prop +pkg_prop_fetch = git +pkg_prop_repo = https://github.com/nuex/prop +pkg_prop_commit = master + +PACKAGES += proper +pkg_proper_name = proper +pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang. +pkg_proper_homepage = http://proper.softlab.ntua.gr +pkg_proper_fetch = git +pkg_proper_repo = https://github.com/manopapad/proper +pkg_proper_commit = master + +PACKAGES += props +pkg_props_name = props +pkg_props_description = Property structure library +pkg_props_homepage = https://github.com/greyarea/props +pkg_props_fetch = git +pkg_props_repo = https://github.com/greyarea/props +pkg_props_commit = master + +PACKAGES += protobuffs +pkg_protobuffs_name = protobuffs +pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs. +pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_fetch = git +pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_commit = master + +PACKAGES += psycho +pkg_psycho_name = psycho +pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware. +pkg_psycho_homepage = https://github.com/gar1t/psycho +pkg_psycho_fetch = git +pkg_psycho_repo = https://github.com/gar1t/psycho +pkg_psycho_commit = master + +PACKAGES += purity +pkg_purity_name = purity +pkg_purity_description = A side-effect analyzer for Erlang +pkg_purity_homepage = https://github.com/mpitid/purity +pkg_purity_fetch = git +pkg_purity_repo = https://github.com/mpitid/purity +pkg_purity_commit = master + +PACKAGES += push_service +pkg_push_service_name = push_service +pkg_push_service_description = Push service +pkg_push_service_homepage = https://github.com/hairyhum/push_service +pkg_push_service_fetch = git +pkg_push_service_repo = https://github.com/hairyhum/push_service +pkg_push_service_commit = master + +PACKAGES += qdate +pkg_qdate_name = qdate +pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang. +pkg_qdate_homepage = https://github.com/choptastic/qdate +pkg_qdate_fetch = git +pkg_qdate_repo = https://github.com/choptastic/qdate +pkg_qdate_commit = 0.4.0 + +PACKAGES += qrcode +pkg_qrcode_name = qrcode +pkg_qrcode_description = QR Code encoder in Erlang +pkg_qrcode_homepage = https://github.com/komone/qrcode +pkg_qrcode_fetch = git +pkg_qrcode_repo = https://github.com/komone/qrcode +pkg_qrcode_commit = master + +PACKAGES += quest +pkg_quest_name = quest +pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang. +pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest +pkg_quest_fetch = git +pkg_quest_repo = https://github.com/eriksoe/ErlangQuest +pkg_quest_commit = master + +PACKAGES += quickrand +pkg_quickrand_name = quickrand +pkg_quickrand_description = Quick Erlang Random Number Generation +pkg_quickrand_homepage = https://github.com/okeuday/quickrand +pkg_quickrand_fetch = git +pkg_quickrand_repo = https://github.com/okeuday/quickrand +pkg_quickrand_commit = master + +PACKAGES += rabbit +pkg_rabbit_name = rabbit +pkg_rabbit_description = RabbitMQ Server +pkg_rabbit_homepage = https://www.rabbitmq.com/ +pkg_rabbit_fetch = git +pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git +pkg_rabbit_commit = master + +PACKAGES += rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak +pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_fetch = git +pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_commit = master + +PACKAGES += rack +pkg_rack_name = rack +pkg_rack_description = Rack handler for erlang +pkg_rack_homepage = https://github.com/erlyvideo/rack +pkg_rack_fetch = git +pkg_rack_repo = https://github.com/erlyvideo/rack +pkg_rack_commit = master + +PACKAGES += radierl +pkg_radierl_name = radierl +pkg_radierl_description = RADIUS protocol stack implemented in Erlang. +pkg_radierl_homepage = https://github.com/vances/radierl +pkg_radierl_fetch = git +pkg_radierl_repo = https://github.com/vances/radierl +pkg_radierl_commit = master + +PACKAGES += rafter +pkg_rafter_name = rafter +pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol +pkg_rafter_homepage = https://github.com/andrewjstone/rafter +pkg_rafter_fetch = git +pkg_rafter_repo = https://github.com/andrewjstone/rafter +pkg_rafter_commit = master + +PACKAGES += ranch +pkg_ranch_name = ranch +pkg_ranch_description = Socket acceptor pool for TCP protocols. +pkg_ranch_homepage = http://ninenines.eu +pkg_ranch_fetch = git +pkg_ranch_repo = https://github.com/ninenines/ranch +pkg_ranch_commit = 1.1.0 + +PACKAGES += rbeacon +pkg_rbeacon_name = rbeacon +pkg_rbeacon_description = LAN discovery and presence in Erlang. +pkg_rbeacon_homepage = https://github.com/refuge/rbeacon +pkg_rbeacon_fetch = git +pkg_rbeacon_repo = https://github.com/refuge/rbeacon +pkg_rbeacon_commit = master + +PACKAGES += rebar +pkg_rebar_name = rebar +pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases. +pkg_rebar_homepage = http://www.rebar3.org +pkg_rebar_fetch = git +pkg_rebar_repo = https://github.com/rebar/rebar3 +pkg_rebar_commit = master + +PACKAGES += rebus +pkg_rebus_name = rebus +pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang. +pkg_rebus_homepage = https://github.com/olle/rebus +pkg_rebus_fetch = git +pkg_rebus_repo = https://github.com/olle/rebus +pkg_rebus_commit = master + +PACKAGES += rec2json +pkg_rec2json_name = rec2json +pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily. +pkg_rec2json_homepage = https://github.com/lordnull/rec2json +pkg_rec2json_fetch = git +pkg_rec2json_repo = https://github.com/lordnull/rec2json +pkg_rec2json_commit = master + +PACKAGES += recon +pkg_recon_name = recon +pkg_recon_description = Collection of functions and scripts to debug Erlang in production. +pkg_recon_homepage = https://github.com/ferd/recon +pkg_recon_fetch = git +pkg_recon_repo = https://github.com/ferd/recon +pkg_recon_commit = 2.2.1 + +PACKAGES += record_info +pkg_record_info_name = record_info +pkg_record_info_description = Convert between record and proplist +pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info +pkg_record_info_fetch = git +pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info +pkg_record_info_commit = master + +PACKAGES += redgrid +pkg_redgrid_name = redgrid +pkg_redgrid_description = automatic Erlang node discovery via redis +pkg_redgrid_homepage = https://github.com/jkvor/redgrid +pkg_redgrid_fetch = git +pkg_redgrid_repo = https://github.com/jkvor/redgrid +pkg_redgrid_commit = master + +PACKAGES += redo +pkg_redo_name = redo +pkg_redo_description = pipelined erlang redis client +pkg_redo_homepage = https://github.com/jkvor/redo +pkg_redo_fetch = git +pkg_redo_repo = https://github.com/jkvor/redo +pkg_redo_commit = master + +PACKAGES += reload_mk +pkg_reload_mk_name = reload_mk +pkg_reload_mk_description = Live reload plugin for erlang.mk. +pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk +pkg_reload_mk_fetch = git +pkg_reload_mk_repo = https://github.com/bullno1/reload.mk +pkg_reload_mk_commit = master + +PACKAGES += reltool_util +pkg_reltool_util_name = reltool_util +pkg_reltool_util_description = Erlang reltool utility functionality application +pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util +pkg_reltool_util_fetch = git +pkg_reltool_util_repo = https://github.com/okeuday/reltool_util +pkg_reltool_util_commit = master + +PACKAGES += relx +pkg_relx_name = relx +pkg_relx_description = Sane, simple release creation for Erlang +pkg_relx_homepage = https://github.com/erlware/relx +pkg_relx_fetch = git +pkg_relx_repo = https://github.com/erlware/relx +pkg_relx_commit = master + +PACKAGES += resource_discovery +pkg_resource_discovery_name = resource_discovery +pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster. +pkg_resource_discovery_homepage = http://erlware.org/ +pkg_resource_discovery_fetch = git +pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery +pkg_resource_discovery_commit = master + +PACKAGES += restc +pkg_restc_name = restc +pkg_restc_description = Erlang Rest Client +pkg_restc_homepage = https://github.com/kivra/restclient +pkg_restc_fetch = git +pkg_restc_repo = https://github.com/kivra/restclient +pkg_restc_commit = master + +PACKAGES += rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation. +pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_fetch = git +pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_commit = master + +PACKAGES += riak_control +pkg_riak_control_name = riak_control +pkg_riak_control_description = Webmachine-based administration interface for Riak. +pkg_riak_control_homepage = https://github.com/basho/riak_control +pkg_riak_control_fetch = git +pkg_riak_control_repo = https://github.com/basho/riak_control +pkg_riak_control_commit = master + +PACKAGES += riak_core +pkg_riak_core_name = riak_core +pkg_riak_core_description = Distributed systems infrastructure used by Riak. +pkg_riak_core_homepage = https://github.com/basho/riak_core +pkg_riak_core_fetch = git +pkg_riak_core_repo = https://github.com/basho/riak_core +pkg_riak_core_commit = master + +PACKAGES += riak_dt +pkg_riak_dt_name = riak_dt +pkg_riak_dt_description = Convergent replicated datatypes in Erlang +pkg_riak_dt_homepage = https://github.com/basho/riak_dt +pkg_riak_dt_fetch = git +pkg_riak_dt_repo = https://github.com/basho/riak_dt +pkg_riak_dt_commit = master + +PACKAGES += riak_ensemble +pkg_riak_ensemble_name = riak_ensemble +pkg_riak_ensemble_description = Multi-Paxos framework in Erlang +pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_fetch = git +pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_commit = master + +PACKAGES += riak_kv +pkg_riak_kv_name = riak_kv +pkg_riak_kv_description = Riak Key/Value Store +pkg_riak_kv_homepage = https://github.com/basho/riak_kv +pkg_riak_kv_fetch = git +pkg_riak_kv_repo = https://github.com/basho/riak_kv +pkg_riak_kv_commit = master + +PACKAGES += riak_pg +pkg_riak_pg_name = riak_pg +pkg_riak_pg_description = Distributed process groups with riak_core. +pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_fetch = git +pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_commit = master + +PACKAGES += riak_pipe +pkg_riak_pipe_name = riak_pipe +pkg_riak_pipe_description = Riak Pipelines +pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe +pkg_riak_pipe_fetch = git +pkg_riak_pipe_repo = https://github.com/basho/riak_pipe +pkg_riak_pipe_commit = master + +PACKAGES += riak_sysmon +pkg_riak_sysmon_name = riak_sysmon +pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages +pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_fetch = git +pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_commit = master + +PACKAGES += riak_test +pkg_riak_test_name = riak_test +pkg_riak_test_description = I'm in your cluster, testing your riaks +pkg_riak_test_homepage = https://github.com/basho/riak_test +pkg_riak_test_fetch = git +pkg_riak_test_repo = https://github.com/basho/riak_test +pkg_riak_test_commit = master + +PACKAGES += riakc +pkg_riakc_name = riakc +pkg_riakc_description = Erlang clients for Riak. +pkg_riakc_homepage = https://github.com/basho/riak-erlang-client +pkg_riakc_fetch = git +pkg_riakc_repo = https://github.com/basho/riak-erlang-client +pkg_riakc_commit = master + +PACKAGES += riakhttpc +pkg_riakhttpc_name = riakhttpc +pkg_riakhttpc_description = Riak Erlang client using the HTTP interface +pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_fetch = git +pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_commit = master + +PACKAGES += riaknostic +pkg_riaknostic_name = riaknostic +pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap +pkg_riaknostic_homepage = https://github.com/basho/riaknostic +pkg_riaknostic_fetch = git +pkg_riaknostic_repo = https://github.com/basho/riaknostic +pkg_riaknostic_commit = master + +PACKAGES += riakpool +pkg_riakpool_name = riakpool +pkg_riakpool_description = erlang riak client pool +pkg_riakpool_homepage = https://github.com/dweldon/riakpool +pkg_riakpool_fetch = git +pkg_riakpool_repo = https://github.com/dweldon/riakpool +pkg_riakpool_commit = master + +PACKAGES += rivus_cep +pkg_rivus_cep_name = rivus_cep +pkg_rivus_cep_description = Complex event processing in Erlang +pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_fetch = git +pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_commit = master + +PACKAGES += rlimit +pkg_rlimit_name = rlimit +pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent +pkg_rlimit_homepage = https://github.com/jlouis/rlimit +pkg_rlimit_fetch = git +pkg_rlimit_repo = https://github.com/jlouis/rlimit +pkg_rlimit_commit = master + +PACKAGES += safetyvalve +pkg_safetyvalve_name = safetyvalve +pkg_safetyvalve_description = A safety valve for your erlang node +pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_fetch = git +pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_commit = master + +PACKAGES += seestar +pkg_seestar_name = seestar +pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol +pkg_seestar_homepage = https://github.com/iamaleksey/seestar +pkg_seestar_fetch = git +pkg_seestar_repo = https://github.com/iamaleksey/seestar +pkg_seestar_commit = master + +PACKAGES += service +pkg_service_name = service +pkg_service_description = A minimal Erlang behavior for creating CloudI internal services +pkg_service_homepage = http://cloudi.org/ +pkg_service_fetch = git +pkg_service_repo = https://github.com/CloudI/service +pkg_service_commit = master + +PACKAGES += setup +pkg_setup_name = setup +pkg_setup_description = Generic setup utility for Erlang-based systems +pkg_setup_homepage = https://github.com/uwiger/setup +pkg_setup_fetch = git +pkg_setup_repo = https://github.com/uwiger/setup +pkg_setup_commit = master + +PACKAGES += sext +pkg_sext_name = sext +pkg_sext_description = Sortable Erlang Term Serialization +pkg_sext_homepage = https://github.com/uwiger/sext +pkg_sext_fetch = git +pkg_sext_repo = https://github.com/uwiger/sext +pkg_sext_commit = master + +PACKAGES += sfmt +pkg_sfmt_name = sfmt +pkg_sfmt_description = SFMT pseudo random number generator for Erlang. +pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_fetch = git +pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_commit = master + +PACKAGES += sgte +pkg_sgte_name = sgte +pkg_sgte_description = A simple Erlang Template Engine +pkg_sgte_homepage = https://github.com/filippo/sgte +pkg_sgte_fetch = git +pkg_sgte_repo = https://github.com/filippo/sgte +pkg_sgte_commit = master + +PACKAGES += sheriff +pkg_sheriff_name = sheriff +pkg_sheriff_description = Parse transform for type based validation. +pkg_sheriff_homepage = http://ninenines.eu +pkg_sheriff_fetch = git +pkg_sheriff_repo = https://github.com/extend/sheriff +pkg_sheriff_commit = master + +PACKAGES += shotgun +pkg_shotgun_name = shotgun +pkg_shotgun_description = better than just a gun +pkg_shotgun_homepage = https://github.com/inaka/shotgun +pkg_shotgun_fetch = git +pkg_shotgun_repo = https://github.com/inaka/shotgun +pkg_shotgun_commit = 0.1.0 + +PACKAGES += sidejob +pkg_sidejob_name = sidejob +pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang +pkg_sidejob_homepage = https://github.com/basho/sidejob +pkg_sidejob_fetch = git +pkg_sidejob_repo = https://github.com/basho/sidejob +pkg_sidejob_commit = master + +PACKAGES += sieve +pkg_sieve_name = sieve +pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang +pkg_sieve_homepage = https://github.com/benoitc/sieve +pkg_sieve_fetch = git +pkg_sieve_repo = https://github.com/benoitc/sieve +pkg_sieve_commit = master + +PACKAGES += sighandler +pkg_sighandler_name = sighandler +pkg_sighandler_description = Handle UNIX signals in Er lang +pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler +pkg_sighandler_fetch = git +pkg_sighandler_repo = https://github.com/jkingsbery/sighandler +pkg_sighandler_commit = master + +PACKAGES += simhash +pkg_simhash_name = simhash +pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data. +pkg_simhash_homepage = https://github.com/ferd/simhash +pkg_simhash_fetch = git +pkg_simhash_repo = https://github.com/ferd/simhash +pkg_simhash_commit = master + +PACKAGES += simple_bridge +pkg_simple_bridge_name = simple_bridge +pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers. +pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_fetch = git +pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_commit = master + +PACKAGES += simple_oauth2 +pkg_simple_oauth2_name = simple_oauth2 +pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured) +pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_fetch = git +pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_commit = master + +PACKAGES += skel +pkg_skel_name = skel +pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang +pkg_skel_homepage = https://github.com/ParaPhrase/skel +pkg_skel_fetch = git +pkg_skel_repo = https://github.com/ParaPhrase/skel +pkg_skel_commit = master + +PACKAGES += smother +pkg_smother_name = smother +pkg_smother_description = Extended code coverage metrics for Erlang. +pkg_smother_homepage = https://ramsay-t.github.io/Smother/ +pkg_smother_fetch = git +pkg_smother_repo = https://github.com/ramsay-t/Smother +pkg_smother_commit = master + +PACKAGES += social +pkg_social_name = social +pkg_social_description = Cowboy handler for social login via OAuth2 providers +pkg_social_homepage = https://github.com/dvv/social +pkg_social_fetch = git +pkg_social_repo = https://github.com/dvv/social +pkg_social_commit = master + +PACKAGES += spapi_router +pkg_spapi_router_name = spapi_router +pkg_spapi_router_description = Partially-connected Erlang clustering +pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router +pkg_spapi_router_fetch = git +pkg_spapi_router_repo = https://github.com/spilgames/spapi-router +pkg_spapi_router_commit = master + +PACKAGES += sqerl +pkg_sqerl_name = sqerl +pkg_sqerl_description = An Erlang-flavoured SQL DSL +pkg_sqerl_homepage = https://github.com/hairyhum/sqerl +pkg_sqerl_fetch = git +pkg_sqerl_repo = https://github.com/hairyhum/sqerl +pkg_sqerl_commit = master + +PACKAGES += srly +pkg_srly_name = srly +pkg_srly_description = Native Erlang Unix serial interface +pkg_srly_homepage = https://github.com/msantos/srly +pkg_srly_fetch = git +pkg_srly_repo = https://github.com/msantos/srly +pkg_srly_commit = master + +PACKAGES += sshrpc +pkg_sshrpc_name = sshrpc +pkg_sshrpc_description = Erlang SSH RPC module (experimental) +pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_fetch = git +pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_commit = master + +PACKAGES += stable +pkg_stable_name = stable +pkg_stable_description = Library of assorted helpers for Cowboy web server. +pkg_stable_homepage = https://github.com/dvv/stable +pkg_stable_fetch = git +pkg_stable_repo = https://github.com/dvv/stable +pkg_stable_commit = master + +PACKAGES += statebox +pkg_statebox_name = statebox +pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak. +pkg_statebox_homepage = https://github.com/mochi/statebox +pkg_statebox_fetch = git +pkg_statebox_repo = https://github.com/mochi/statebox +pkg_statebox_commit = master + +PACKAGES += statebox_riak +pkg_statebox_riak_name = statebox_riak +pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media. +pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak +pkg_statebox_riak_fetch = git +pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak +pkg_statebox_riak_commit = master + +PACKAGES += statman +pkg_statman_name = statman +pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM +pkg_statman_homepage = https://github.com/knutin/statman +pkg_statman_fetch = git +pkg_statman_repo = https://github.com/knutin/statman +pkg_statman_commit = master + +PACKAGES += statsderl +pkg_statsderl_name = statsderl +pkg_statsderl_description = StatsD client (erlang) +pkg_statsderl_homepage = https://github.com/lpgauth/statsderl +pkg_statsderl_fetch = git +pkg_statsderl_repo = https://github.com/lpgauth/statsderl +pkg_statsderl_commit = master + +PACKAGES += stdinout_pool +pkg_stdinout_pool_name = stdinout_pool +pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication. +pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_fetch = git +pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_commit = master + +PACKAGES += stockdb +pkg_stockdb_name = stockdb +pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang +pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb +pkg_stockdb_fetch = git +pkg_stockdb_repo = https://github.com/maxlapshin/stockdb +pkg_stockdb_commit = master + +PACKAGES += stripe +pkg_stripe_name = stripe +pkg_stripe_description = Erlang interface to the stripe.com API +pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang +pkg_stripe_fetch = git +pkg_stripe_repo = https://github.com/mattsta/stripe-erlang +pkg_stripe_commit = v1 + +PACKAGES += surrogate +pkg_surrogate_name = surrogate +pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes. +pkg_surrogate_homepage = https://github.com/skruger/Surrogate +pkg_surrogate_fetch = git +pkg_surrogate_repo = https://github.com/skruger/Surrogate +pkg_surrogate_commit = master + +PACKAGES += swab +pkg_swab_name = swab +pkg_swab_description = General purpose buffer handling module +pkg_swab_homepage = https://github.com/crownedgrouse/swab +pkg_swab_fetch = git +pkg_swab_repo = https://github.com/crownedgrouse/swab +pkg_swab_commit = master + +PACKAGES += swarm +pkg_swarm_name = swarm +pkg_swarm_description = Fast and simple acceptor pool for Erlang +pkg_swarm_homepage = https://github.com/jeremey/swarm +pkg_swarm_fetch = git +pkg_swarm_repo = https://github.com/jeremey/swarm +pkg_swarm_commit = master + +PACKAGES += switchboard +pkg_switchboard_name = switchboard +pkg_switchboard_description = A framework for processing email using worker plugins. +pkg_switchboard_homepage = https://github.com/thusfresh/switchboard +pkg_switchboard_fetch = git +pkg_switchboard_repo = https://github.com/thusfresh/switchboard +pkg_switchboard_commit = master + +PACKAGES += syn +pkg_syn_name = syn +pkg_syn_description = A global process registry for Erlang. +pkg_syn_homepage = https://github.com/ostinelli/syn +pkg_syn_fetch = git +pkg_syn_repo = https://github.com/ostinelli/syn +pkg_syn_commit = master + +PACKAGES += sync +pkg_sync_name = sync +pkg_sync_description = On-the-fly recompiling and reloading in Erlang. +pkg_sync_homepage = https://github.com/rustyio/sync +pkg_sync_fetch = git +pkg_sync_repo = https://github.com/rustyio/sync +pkg_sync_commit = master + +PACKAGES += syntaxerl +pkg_syntaxerl_name = syntaxerl +pkg_syntaxerl_description = Syntax checker for Erlang +pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_fetch = git +pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_commit = master + +PACKAGES += syslog +pkg_syslog_name = syslog +pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3) +pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog +pkg_syslog_fetch = git +pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog +pkg_syslog_commit = master + +PACKAGES += taskforce +pkg_taskforce_name = taskforce +pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks. +pkg_taskforce_homepage = https://github.com/g-andrade/taskforce +pkg_taskforce_fetch = git +pkg_taskforce_repo = https://github.com/g-andrade/taskforce +pkg_taskforce_commit = master + +PACKAGES += tddreloader +pkg_tddreloader_name = tddreloader +pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes +pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader +pkg_tddreloader_fetch = git +pkg_tddreloader_repo = https://github.com/version2beta/tddreloader +pkg_tddreloader_commit = master + +PACKAGES += tempo +pkg_tempo_name = tempo +pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang. +pkg_tempo_homepage = https://github.com/selectel/tempo +pkg_tempo_fetch = git +pkg_tempo_repo = https://github.com/selectel/tempo +pkg_tempo_commit = master + +PACKAGES += ticktick +pkg_ticktick_name = ticktick +pkg_ticktick_description = Ticktick is an id generator for message service. +pkg_ticktick_homepage = https://github.com/ericliang/ticktick +pkg_ticktick_fetch = git +pkg_ticktick_repo = https://github.com/ericliang/ticktick +pkg_ticktick_commit = master + +PACKAGES += tinymq +pkg_tinymq_name = tinymq +pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue +pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_fetch = git +pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_commit = master + +PACKAGES += tinymt +pkg_tinymt_name = tinymt +pkg_tinymt_description = TinyMT pseudo random number generator for Erlang. +pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_fetch = git +pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_commit = master + +PACKAGES += tirerl +pkg_tirerl_name = tirerl +pkg_tirerl_description = Erlang interface to Elastic Search +pkg_tirerl_homepage = https://github.com/inaka/tirerl +pkg_tirerl_fetch = git +pkg_tirerl_repo = https://github.com/inaka/tirerl +pkg_tirerl_commit = master + +PACKAGES += traffic_tools +pkg_traffic_tools_name = traffic_tools +pkg_traffic_tools_description = Simple traffic limiting library +pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools +pkg_traffic_tools_fetch = git +pkg_traffic_tools_repo = https://github.com/systra/traffic_tools +pkg_traffic_tools_commit = master + +PACKAGES += trails +pkg_trails_name = trails +pkg_trails_description = A couple of improvements over Cowboy Routes +pkg_trails_homepage = http://inaka.github.io/cowboy-trails/ +pkg_trails_fetch = git +pkg_trails_repo = https://github.com/inaka/cowboy-trails +pkg_trails_commit = master + +PACKAGES += trane +pkg_trane_name = trane +pkg_trane_description = SAX style broken HTML parser in Erlang +pkg_trane_homepage = https://github.com/massemanet/trane +pkg_trane_fetch = git +pkg_trane_repo = https://github.com/massemanet/trane +pkg_trane_commit = master + +PACKAGES += transit +pkg_transit_name = transit +pkg_transit_description = transit format for erlang +pkg_transit_homepage = https://github.com/isaiah/transit-erlang +pkg_transit_fetch = git +pkg_transit_repo = https://github.com/isaiah/transit-erlang +pkg_transit_commit = master + +PACKAGES += trie +pkg_trie_name = trie +pkg_trie_description = Erlang Trie Implementation +pkg_trie_homepage = https://github.com/okeuday/trie +pkg_trie_fetch = git +pkg_trie_repo = https://github.com/okeuday/trie +pkg_trie_commit = master + +PACKAGES += triq +pkg_triq_name = triq +pkg_triq_description = Trifork QuickCheck +pkg_triq_homepage = https://github.com/krestenkrab/triq +pkg_triq_fetch = git +pkg_triq_repo = https://github.com/krestenkrab/triq +pkg_triq_commit = master + +PACKAGES += tunctl +pkg_tunctl_name = tunctl +pkg_tunctl_description = Erlang TUN/TAP interface +pkg_tunctl_homepage = https://github.com/msantos/tunctl +pkg_tunctl_fetch = git +pkg_tunctl_repo = https://github.com/msantos/tunctl +pkg_tunctl_commit = master + +PACKAGES += twerl +pkg_twerl_name = twerl +pkg_twerl_description = Erlang client for the Twitter Streaming API +pkg_twerl_homepage = https://github.com/lucaspiller/twerl +pkg_twerl_fetch = git +pkg_twerl_repo = https://github.com/lucaspiller/twerl +pkg_twerl_commit = oauth + +PACKAGES += twitter_erlang +pkg_twitter_erlang_name = twitter_erlang +pkg_twitter_erlang_description = An Erlang twitter client +pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_fetch = git +pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_commit = master + +PACKAGES += ucol_nif +pkg_ucol_nif_name = ucol_nif +pkg_ucol_nif_description = ICU based collation Erlang module +pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif +pkg_ucol_nif_fetch = git +pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif +pkg_ucol_nif_commit = master + +PACKAGES += unicorn +pkg_unicorn_name = unicorn +pkg_unicorn_description = Generic configuration server +pkg_unicorn_homepage = https://github.com/shizzard/unicorn +pkg_unicorn_fetch = git +pkg_unicorn_repo = https://github.com/shizzard/unicorn +pkg_unicorn_commit = 0.3.0 + +PACKAGES += unsplit +pkg_unsplit_name = unsplit +pkg_unsplit_description = Resolves conflicts in Mnesia after network splits +pkg_unsplit_homepage = https://github.com/uwiger/unsplit +pkg_unsplit_fetch = git +pkg_unsplit_repo = https://github.com/uwiger/unsplit +pkg_unsplit_commit = master + +PACKAGES += uuid +pkg_uuid_name = uuid +pkg_uuid_description = Erlang UUID Implementation +pkg_uuid_homepage = https://github.com/okeuday/uuid +pkg_uuid_fetch = git +pkg_uuid_repo = https://github.com/okeuday/uuid +pkg_uuid_commit = v1.4.0 + +PACKAGES += ux +pkg_ux_name = ux +pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation) +pkg_ux_homepage = https://github.com/erlang-unicode/ux +pkg_ux_fetch = git +pkg_ux_repo = https://github.com/erlang-unicode/ux +pkg_ux_commit = master + +PACKAGES += vert +pkg_vert_name = vert +pkg_vert_description = Erlang binding to libvirt virtualization API +pkg_vert_homepage = https://github.com/msantos/erlang-libvirt +pkg_vert_fetch = git +pkg_vert_repo = https://github.com/msantos/erlang-libvirt +pkg_vert_commit = master + +PACKAGES += verx +pkg_verx_name = verx +pkg_verx_description = Erlang implementation of the libvirtd remote protocol +pkg_verx_homepage = https://github.com/msantos/verx +pkg_verx_fetch = git +pkg_verx_repo = https://github.com/msantos/verx +pkg_verx_commit = master + +PACKAGES += vmq_acl +pkg_vmq_acl_name = vmq_acl +pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_acl_homepage = https://verne.mq/ +pkg_vmq_acl_fetch = git +pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl +pkg_vmq_acl_commit = master + +PACKAGES += vmq_bridge +pkg_vmq_bridge_name = vmq_bridge +pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_bridge_homepage = https://verne.mq/ +pkg_vmq_bridge_fetch = git +pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge +pkg_vmq_bridge_commit = master + +PACKAGES += vmq_graphite +pkg_vmq_graphite_name = vmq_graphite +pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_graphite_homepage = https://verne.mq/ +pkg_vmq_graphite_fetch = git +pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite +pkg_vmq_graphite_commit = master + +PACKAGES += vmq_passwd +pkg_vmq_passwd_name = vmq_passwd +pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_passwd_homepage = https://verne.mq/ +pkg_vmq_passwd_fetch = git +pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd +pkg_vmq_passwd_commit = master + +PACKAGES += vmq_server +pkg_vmq_server_name = vmq_server +pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_server_homepage = https://verne.mq/ +pkg_vmq_server_fetch = git +pkg_vmq_server_repo = https://github.com/erlio/vmq_server +pkg_vmq_server_commit = master + +PACKAGES += vmq_snmp +pkg_vmq_snmp_name = vmq_snmp +pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_snmp_homepage = https://verne.mq/ +pkg_vmq_snmp_fetch = git +pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp +pkg_vmq_snmp_commit = master + +PACKAGES += vmq_systree +pkg_vmq_systree_name = vmq_systree +pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_systree_homepage = https://verne.mq/ +pkg_vmq_systree_fetch = git +pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree +pkg_vmq_systree_commit = master + +PACKAGES += vmstats +pkg_vmstats_name = vmstats +pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs. +pkg_vmstats_homepage = https://github.com/ferd/vmstats +pkg_vmstats_fetch = git +pkg_vmstats_repo = https://github.com/ferd/vmstats +pkg_vmstats_commit = master + +PACKAGES += walrus +pkg_walrus_name = walrus +pkg_walrus_description = Walrus - Mustache-like Templating +pkg_walrus_homepage = https://github.com/devinus/walrus +pkg_walrus_fetch = git +pkg_walrus_repo = https://github.com/devinus/walrus +pkg_walrus_commit = master + +PACKAGES += webmachine +pkg_webmachine_name = webmachine +pkg_webmachine_description = A REST-based system for building web applications. +pkg_webmachine_homepage = https://github.com/basho/webmachine +pkg_webmachine_fetch = git +pkg_webmachine_repo = https://github.com/basho/webmachine +pkg_webmachine_commit = master + +PACKAGES += websocket_client +pkg_websocket_client_name = websocket_client +pkg_websocket_client_description = Erlang websocket client (ws and wss supported) +pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client +pkg_websocket_client_fetch = git +pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client +pkg_websocket_client_commit = master + +PACKAGES += worker_pool +pkg_worker_pool_name = worker_pool +pkg_worker_pool_description = a simple erlang worker pool +pkg_worker_pool_homepage = https://github.com/inaka/worker_pool +pkg_worker_pool_fetch = git +pkg_worker_pool_repo = https://github.com/inaka/worker_pool +pkg_worker_pool_commit = 1.0.3 + +PACKAGES += wrangler +pkg_wrangler_name = wrangler +pkg_wrangler_description = Import of the Wrangler svn repository. +pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html +pkg_wrangler_fetch = git +pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler +pkg_wrangler_commit = master + +PACKAGES += wsock +pkg_wsock_name = wsock +pkg_wsock_description = Erlang library to build WebSocket clients and servers +pkg_wsock_homepage = https://github.com/madtrick/wsock +pkg_wsock_fetch = git +pkg_wsock_repo = https://github.com/madtrick/wsock +pkg_wsock_commit = master + +PACKAGES += xhttpc +pkg_xhttpc_name = xhttpc +pkg_xhttpc_description = Extensible HTTP Client for Erlang +pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc +pkg_xhttpc_fetch = git +pkg_xhttpc_repo = https://github.com/seriyps/xhttpc +pkg_xhttpc_commit = master + +PACKAGES += xref_runner +pkg_xref_runner_name = xref_runner +pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref) +pkg_xref_runner_homepage = https://github.com/inaka/xref_runner +pkg_xref_runner_fetch = git +pkg_xref_runner_repo = https://github.com/inaka/xref_runner +pkg_xref_runner_commit = 0.2.0 + +PACKAGES += yamerl +pkg_yamerl_name = yamerl +pkg_yamerl_description = YAML 1.2 parser in pure Erlang +pkg_yamerl_homepage = https://github.com/yakaz/yamerl +pkg_yamerl_fetch = git +pkg_yamerl_repo = https://github.com/yakaz/yamerl +pkg_yamerl_commit = master + +PACKAGES += yamler +pkg_yamler_name = yamler +pkg_yamler_description = libyaml-based yaml loader for Erlang +pkg_yamler_homepage = https://github.com/goertzenator/yamler +pkg_yamler_fetch = git +pkg_yamler_repo = https://github.com/goertzenator/yamler +pkg_yamler_commit = master + +PACKAGES += yaws +pkg_yaws_name = yaws +pkg_yaws_description = Yaws webserver +pkg_yaws_homepage = http://yaws.hyber.org +pkg_yaws_fetch = git +pkg_yaws_repo = https://github.com/klacke/yaws +pkg_yaws_commit = master + +PACKAGES += zab_engine +pkg_zab_engine_name = zab_engine +pkg_zab_engine_description = zab propotocol implement by erlang +pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_fetch = git +pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_commit = master + +PACKAGES += zeta +pkg_zeta_name = zeta +pkg_zeta_description = HTTP access log parser in Erlang +pkg_zeta_homepage = https://github.com/s1n4/zeta +pkg_zeta_fetch = git +pkg_zeta_repo = https://github.com/s1n4/zeta +pkg_zeta_commit = + +PACKAGES += zippers +pkg_zippers_name = zippers +pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers +pkg_zippers_homepage = https://github.com/ferd/zippers +pkg_zippers_fetch = git +pkg_zippers_repo = https://github.com/ferd/zippers +pkg_zippers_commit = master + +PACKAGES += zlists +pkg_zlists_name = zlists +pkg_zlists_description = Erlang lazy lists library. +pkg_zlists_homepage = https://github.com/vjache/erlang-zlists +pkg_zlists_fetch = git +pkg_zlists_repo = https://github.com/vjache/erlang-zlists +pkg_zlists_commit = master + +PACKAGES += zraft_lib +pkg_zraft_lib_name = zraft_lib +pkg_zraft_lib_description = Erlang raft consensus protocol implementation +pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_fetch = git +pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_commit = master + +PACKAGES += zucchini +pkg_zucchini_name = zucchini +pkg_zucchini_description = An Erlang INI parser +pkg_zucchini_homepage = https://github.com/devinus/zucchini +pkg_zucchini_fetch = git +pkg_zucchini_repo = https://github.com/devinus/zucchini +pkg_zucchini_commit = master + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: search + +define pkg_print + $(verbose) printf "%s\n" \ + $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \ + "App name: $(pkg_$(1)_name)" \ + "Description: $(pkg_$(1)_description)" \ + "Home page: $(pkg_$(1)_homepage)" \ + "Fetch with: $(pkg_$(1)_fetch)" \ + "Repository: $(pkg_$(1)_repo)" \ + "Commit: $(pkg_$(1)_commit)" \ + "" + +endef + +search: +ifdef q + $(foreach p,$(PACKAGES), \ + $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \ + $(call pkg_print,$(p)))) +else + $(foreach p,$(PACKAGES),$(call pkg_print,$(p))) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-deps + +# Configuration. + +ifdef OTP_DEPS +$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.) +endif + +IGNORE_DEPS ?= +export IGNORE_DEPS + +APPS_DIR ?= $(CURDIR)/apps +export APPS_DIR + +DEPS_DIR ?= $(CURDIR)/deps +export DEPS_DIR + +REBAR_DEPS_DIR = $(DEPS_DIR) +export REBAR_DEPS_DIR + +dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) +dep_repo = $(patsubst git://github.com/%,https://github.com/%, \ + $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))) +dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) + +ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) +ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) + +ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),) +ifeq ($(ERL_LIBS),) + ERL_LIBS = $(APPS_DIR):$(DEPS_DIR) +else + ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR) +endif +endif +export ERL_LIBS + +export NO_AUTOPATCH + +# Verbosity. + +dep_verbose_0 = @echo " DEP " $(1); +dep_verbose_2 = set -x; +dep_verbose = $(dep_verbose_$(V)) + +# Core targets. + +ifneq ($(SKIP_DEPS),) +deps:: +else +deps:: $(ALL_DEPS_DIRS) +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \ + if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \ + :; \ + else \ + echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ + if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ + $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \ + else \ + echo "Error: No Makefile to build dependency $$dep."; \ + exit 2; \ + fi \ + fi \ + done +endif + +# Deps related targets. + +# @todo rename GNUmakefile and makefile into Makefile first, if they exist +# While Makefile file could be GNUmakefile or makefile, +# in practice only Makefile is needed so far. +define dep_autopatch + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \ + $(call dep_autopatch2,$(1)); \ + else \ + if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + $(call dep_autopatch_erlang_mk,$(1)); \ + else \ + $(call erlang,$(call dep_autopatch_app.erl,$(1))); \ + fi \ + fi \ + else \ + if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ + $(call dep_autopatch_noop,$(1)); \ + else \ + $(call dep_autopatch2,$(1)); \ + fi \ + fi +endef + +define dep_autopatch2 + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + if [ -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \ + $(call dep_autopatch_fetch_rebar); \ + $(call dep_autopatch_rebar,$(1)); \ + else \ + $(call dep_autopatch_gen,$(1)); \ + fi +endef + +define dep_autopatch_noop + printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile +endef + +# Overwrite erlang.mk with the current file by default. +ifeq ($(NO_AUTOPATCH_ERLANG_MK),) +define dep_autopatch_erlang_mk + echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \ + > $(DEPS_DIR)/$1/erlang.mk +endef +else +define dep_autopatch_erlang_mk + : +endef +endif + +define dep_autopatch_gen + printf "%s\n" \ + "ERLC_OPTS = +debug_info" \ + "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile +endef + +define dep_autopatch_fetch_rebar + mkdir -p $(ERLANG_MK_TMP); \ + if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ + git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \ + cd $(ERLANG_MK_TMP)/rebar; \ + git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \ + $(MAKE); \ + cd -; \ + fi +endef + +define dep_autopatch_rebar + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \ + fi; \ + $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \ + rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app +endef + +define dep_autopatch_rebar.erl + application:load(rebar), + application:set_env(rebar, log_level, debug), + Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of + {ok, Conf0} -> Conf0; + _ -> [] + end, + {Conf, OsEnv} = fun() -> + case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of + false -> {Conf1, []}; + true -> + Bindings0 = erl_eval:new_bindings(), + Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0), + Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1), + Before = os:getenv(), + {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings), + {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)} + end + end(), + Write = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append]) + end, + Escape = fun (Text) -> + re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}]) + end, + Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package " + "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"), + Write("C_SRC_DIR = /path/do/not/exist\n"), + Write("C_SRC_TYPE = rebar\n"), + Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"), + Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]), + fun() -> + Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), + case lists:keyfind(erl_opts, 1, Conf) of + false -> ok; + {_, ErlOpts} -> + lists:foreach(fun + ({d, D}) -> + Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + ({i, I}) -> + Write(["ERLC_OPTS += -I ", I, "\n"]); + ({platform_define, Regex, D}) -> + case rebar_utils:is_arch(Regex) of + true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + false -> ok + end; + ({parse_transform, PT}) -> + Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n"); + (_) -> ok + end, ErlOpts) + end, + Write("\n") + end(), + fun() -> + File = case lists:keyfind(deps, 1, Conf) of + false -> []; + {_, Deps} -> + [begin case case Dep of + {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}}; + {N, S} when is_tuple(S) -> {N, S}; + {N, _, S} -> {N, S}; + {N, _, S, _} -> {N, S}; + _ -> false + end of + false -> ok; + {Name, Source} -> + {Method, Repo, Commit} = case Source of + {hex, V} -> {hex, V, undefined}; + {git, R} -> {git, R, master}; + {M, R, {branch, C}} -> {M, R, C}; + {M, R, {ref, C}} -> {M, R, C}; + {M, R, {tag, C}} -> {M, R, C}; + {M, R, C} -> {M, R, C} + end, + Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit])) + end end || Dep <- Deps] + end + end(), + fun() -> + case lists:keyfind(erl_first_files, 1, Conf) of + false -> ok; + {_, Files} -> + Names = [[" ", case lists:reverse(F) of + "lre." ++ Elif -> lists:reverse(Elif); + Elif -> lists:reverse(Elif) + end] || "src/" ++ F <- Files], + Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) + end + end(), + FindFirst = fun(F, Fd) -> + case io:parse_erl_form(Fd, undefined) of + {ok, {attribute, _, compile, {parse_transform, PT}}, _} -> + [PT, F(F, Fd)]; + {ok, {attribute, _, compile, CompileOpts}, _} when is_list(CompileOpts) -> + case proplists:get_value(parse_transform, CompileOpts) of + undefined -> [F(F, Fd)]; + PT -> [PT, F(F, Fd)] + end; + {ok, {attribute, _, include, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end + end; + {ok, {attribute, _, include_lib, "$(1)/include/" ++ Hrl}, _} -> + {ok, HrlFd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]), + [F(F, HrlFd), F(F, Fd)]; + {ok, {attribute, _, include_lib, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {ok, {attribute, _, import, {Imp, _}}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(Imp) ++ ".erl", [read]) of + {ok, ImpFd} -> [Imp, F(F, ImpFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {eof, _} -> + file:close(Fd), + []; + _ -> + F(F, Fd) + end + end, + fun() -> + ErlFiles = filelib:wildcard("$(call core_native_path,$(DEPS_DIR)/$1/src/)*.erl"), + First0 = lists:usort(lists:flatten([begin + {ok, Fd} = file:open(F, [read]), + FindFirst(FindFirst, Fd) + end || F <- ErlFiles])), + First = lists:flatten([begin + {ok, Fd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", [read]), + FindFirst(FindFirst, Fd) + end || M <- First0, lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)]) ++ First0, + Write(["COMPILE_FIRST +=", [[" ", atom_to_list(M)] || M <- First, + lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)], "\n"]) + end(), + Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"), + Write("\npreprocess::\n"), + Write("\npre-deps::\n"), + Write("\npre-app::\n"), + PatchHook = fun(Cmd) -> + case Cmd of + "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + _ -> Escape(Cmd) + end + end, + fun() -> + case lists:keyfind(pre_hooks, 1, Conf) of + false -> ok; + {_, Hooks} -> + [case H of + {'get-deps', Cmd} -> + Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n"); + {compile, Cmd} -> + Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + {Regex, compile, Cmd} -> + case rebar_utils:is_arch(Regex) of + true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + false -> ok + end; + _ -> ok + end || H <- Hooks] + end + end(), + ShellToMk = fun(V) -> + re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]), + "-Werror\\\\b", "", [{return, list}, global]) + end, + PortSpecs = fun() -> + case lists:keyfind(port_specs, 1, Conf) of + false -> + case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of + false -> []; + true -> + [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"), + proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}] + end; + {_, Specs} -> + lists:flatten([case S of + {Output, Input} -> {ShellToMk(Output), Input, []}; + {Regex, Output, Input} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, []}; + false -> [] + end; + {Regex, Output, Input, [{env, Env}]} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, Env}; + false -> [] + end + end || S <- Specs]) + end + end(), + PortSpecWrite = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append]) + end, + case PortSpecs of + [] -> ok; + _ -> + Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"), + PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I ~s/erts-~s/include -I ~s\n", + [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])), + PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L ~s -lerl_interface -lei\n", + [code:lib_dir(erl_interface, lib)])), + [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv], + FilterEnv = fun(Env) -> + lists:flatten([case E of + {_, _} -> E; + {Regex, K, V} -> + case rebar_utils:is_arch(Regex) of + true -> {K, V}; + false -> [] + end + end || E <- Env]) + end, + MergeEnv = fun(Env) -> + lists:foldl(fun ({K, V}, Acc) -> + case lists:keyfind(K, 1, Acc) of + false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc]; + {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc] + end + end, [], Env) + end, + PortEnv = case lists:keyfind(port_env, 1, Conf) of + false -> []; + {_, PortEnv0} -> FilterEnv(PortEnv0) + end, + PortSpec = fun ({Output, Input0, Env}) -> + filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output), + Input = [[" ", I] || I <- Input0], + PortSpecWrite([ + [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))], + case $(PLATFORM) of + darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress"; + _ -> "" + end, + "\n\nall:: ", Output, "\n\n", + "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))], + Output, ": $$\(foreach ext,.c .C .cc .cpp,", + "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n", + "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)", + case filename:extension(Output) of + [] -> "\n"; + _ -> " -shared\n" + end]) + end, + [PortSpec(S) || S <- PortSpecs] + end, + Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"), + RunPlugin = fun(Plugin, Step) -> + case erlang:function_exported(Plugin, Step, 2) of + false -> ok; + true -> + c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"), + Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(), + dict:store(base_dir, "", dict:new())}, undefined), + io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret]) + end + end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins} -> + [begin + case lists:keyfind(deps, 1, Conf) of + false -> ok; + {_, Deps} -> + case lists:keyfind(P, 1, Deps) of + false -> ok; + _ -> + Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P), + io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]), + io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]), + code:add_patha(Path ++ "/ebin") + end + end + end || P <- Plugins], + [case code:load_file(P) of + {module, P} -> ok; + _ -> + case lists:keyfind(plugin_dir, 1, Conf) of + false -> ok; + {_, PluginsDir} -> + ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl", + {ok, P, Bin} = compile:file(ErlFile, [binary]), + {module, P} = code:load_binary(P, ErlFile, Bin) + end + end || P <- Plugins], + [RunPlugin(P, preprocess) || P <- Plugins], + [RunPlugin(P, pre_compile) || P <- Plugins], + [RunPlugin(P, compile) || P <- Plugins] + end + end(), + halt() +endef + +define dep_autopatch_app.erl + UpdateModules = fun(App) -> + case filelib:is_regular(App) of + false -> ok; + true -> + {ok, [{application, '$(1)', L0}]} = file:consult(App), + Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true, + fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []), + L = lists:keystore(modules, 1, L0, {modules, Mods}), + ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}])) + end + end, + UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"), + halt() +endef + +define dep_autopatch_appsrc.erl + AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", + AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end, + case filelib:is_regular(AppSrcIn) of + false -> ok; + true -> + {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), + L1 = lists:keystore(modules, 1, L0, {modules, []}), + L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end, + L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end, + ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])), + case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end + end, + halt() +endef + +define dep_fetch_git + git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1)); +endef + +define dep_fetch_git-submodule + git submodule update --init -- $(DEPS_DIR)/$1; +endef + +define dep_fetch_hg + hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1)); +endef + +define dep_fetch_svn + svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_cp + cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_hex.erl + ssl:start(), + inets:start(), + {ok, {{_, 200, _}, _, Body}} = httpc:request(get, + {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []}, + [], [{body_format, binary}]), + {ok, Files} = erl_tar:extract({binary, Body}, [memory]), + {_, Source} = lists:keyfind("contents.tar.gz", 1, Files), + ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]), + halt() +endef + +# Hex only has a package version. No need to look in the Erlang.mk packages. +define dep_fetch_hex + $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1)))))); +endef + +define dep_fetch_fail + echo "Error: Unknown or invalid dependency: $(1)." >&2; \ + exit 78; +endef + +# Kept for compatibility purposes with older Erlang.mk configuration. +define dep_fetch_legacy + $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \ + git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \ + cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master); +endef + +define dep_fetch + $(if $(dep_$(1)), \ + $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \ + $(word 1,$(dep_$(1))), \ + $(if $(IS_DEP),legacy,fail)), \ + $(if $(filter $(1),$(PACKAGES)), \ + $(pkg_$(1)_fetch), \ + fail)) +endef + +define dep_target +$(DEPS_DIR)/$(call dep_name,$1): + $(eval DEP_NAME := $(call dep_name,$1)) + $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) + $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ + echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \ + exit 17; \ + fi + $(verbose) mkdir -p $(DEPS_DIR) + $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$1)),$1) + $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure.ac -o -f $(DEPS_DIR)/$(DEP_NAME)/configure.in ]; then \ + echo " AUTO " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && autoreconf -Wall -vif -I m4; \ + fi + - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \ + echo " CONF " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \ + fi +ifeq ($(filter $(1),$(NO_AUTOPATCH)),) + $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi; \ + if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \ + echo " PATCH Downloading rabbitmq-server"; \ + git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \ + fi; \ + ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \ + elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi \ + else \ + $$(call dep_autopatch,$(DEP_NAME)) \ + fi +endif +endef + +$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep)))) + +ifndef IS_APP +clean:: clean-apps + +clean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \ + done + +distclean:: distclean-apps + +distclean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \ + done +endif + +ifndef SKIP_DEPS +distclean:: distclean-deps + +distclean-deps: + $(gen_verbose) rm -rf $(DEPS_DIR) +endif + +# Forward-declare variables used in core/deps-tools.mk. This is required +# in case plugins use them. + +ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/list-deps.log +ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/list-doc-deps.log +ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/list-rel-deps.log +ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/list-test-deps.log +ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/list-shell-deps.log + +# External plugins. + +DEP_PLUGINS ?= + +define core_dep_plugin +-include $(DEPS_DIR)/$(1) + +$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; +endef + +$(foreach p,$(DEP_PLUGINS),\ + $(eval $(if $(findstring /,$p),\ + $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ + $(call core_dep_plugin,$p/plugins.mk,$p)))) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Configuration. + +DTL_FULL_PATH ?= +DTL_PATH ?= templates/ +DTL_SUFFIX ?= _dtl + +# Verbosity. + +dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); +dtl_verbose = $(dtl_verbose_$(V)) + +# Core targets. + +define erlydtl_compile.erl + [begin + Module0 = case "$(strip $(DTL_FULL_PATH))" of + "" -> + filename:basename(F, ".dtl"); + _ -> + "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"), + re:replace(F2, "/", "_", [{return, list}, global]) + end, + Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), + case erlydtl:compile(F, Module, [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of + ok -> ok; + {ok, _} -> ok + end + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) + +DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl)) + +ifdef DTL_FULL_PATH +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%)))) +else +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES)))) +endif + +ifneq ($(words $(DTL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(DTL_FILES); \ + fi + @touch $@ + +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl +endif + +ebin/$(PROJECT).app:: $(DTL_FILES) + $(if $(strip $?),\ + $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?,-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Verbosity. + +proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); +proto_verbose = $(proto_verbose_$(V)) + +# Core targets. + +define compile_proto + $(verbose) mkdir -p ebin/ include/ + $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1))) + $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl + $(verbose) rm ebin/*.erl +endef + +define compile_proto.erl + [begin + Dir = filename:dirname(filename:dirname(F)), + protobuffs_compile:generate_source(F, + [{output_include_dir, Dir ++ "/include"}, + {output_src_dir, Dir ++ "/ebin"}]) + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) +ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto)) + $(if $(strip $?),$(call compile_proto,$?)) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-app + +# Configuration. + +ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \ + +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec +COMPILE_FIRST ?= +COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST))) +ERLC_EXCLUDE ?= +ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE))) + +ERLC_MIB_OPTS ?= +COMPILE_MIB_FIRST ?= +COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST))) + +# Verbosity. + +app_verbose_0 = @echo " APP " $(PROJECT); +app_verbose_2 = set -x; +app_verbose = $(app_verbose_$(V)) + +appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src; +appsrc_verbose_2 = set -x; +appsrc_verbose = $(appsrc_verbose_$(V)) + +makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d; +makedep_verbose_2 = set -x; +makedep_verbose = $(makedep_verbose_$(V)) + +erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\ + $(filter %.erl %.core,$(?F))); +erlc_verbose_2 = set -x; +erlc_verbose = $(erlc_verbose_$(V)) + +xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F)); +xyrl_verbose_2 = set -x; +xyrl_verbose = $(xyrl_verbose_$(V)) + +asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F)); +asn1_verbose_2 = set -x; +asn1_verbose = $(asn1_verbose_$(V)) + +mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F)); +mib_verbose_2 = set -x; +mib_verbose = $(mib_verbose_$(V)) + +ifneq ($(wildcard src/),) + +# Targets. + +ifeq ($(wildcard ebin/test),) +app:: deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +else +app:: clean deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +endif + +ifeq ($(wildcard src/$(PROJECT)_app.erl),) +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, []}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]} +]}. +endef +else +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}, + {mod, {$(PROJECT)_app, []}} +]}. +endef +endif + +app-build: ebin/$(PROJECT).app + $(verbose) : + +# Source files. + +ERL_FILES = $(sort $(call core_find,src/,*.erl)) +CORE_FILES = $(sort $(call core_find,src/,*.core)) + +# ASN.1 files. + +ifneq ($(wildcard asn1/),) +ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1)) +ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +define compile_asn1 + $(verbose) mkdir -p include/ + $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1) + $(verbose) mv asn1/*.erl src/ + $(verbose) mv asn1/*.hrl include/ + $(verbose) mv asn1/*.asn1db include/ +endef + +$(PROJECT).d:: $(ASN1_FILES) + $(if $(strip $?),$(call compile_asn1,$?)) +endif + +# SNMP MIB files. + +ifneq ($(wildcard mibs/),) +MIB_FILES = $(sort $(call core_find,mibs/,*.mib)) + +$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES) + $(verbose) mkdir -p include/ priv/mibs/ + $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $? + $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?))) +endif + +# Leex and Yecc files. + +XRL_FILES = $(sort $(call core_find,src/,*.xrl)) +XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES)))) +ERL_FILES += $(XRL_ERL_FILES) + +YRL_FILES = $(sort $(call core_find,src/,*.yrl)) +YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES)))) +ERL_FILES += $(YRL_ERL_FILES) + +$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES) + $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?) + +# Erlang and Core Erlang files. + +define makedep.erl + ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")), + Modules = [{filename:basename(F, ".erl"), F} || F <- ErlFiles], + Add = fun (Dep, Acc) -> + case lists:keyfind(atom_to_list(Dep), 1, Modules) of + {_, DepFile} -> [DepFile|Acc]; + false -> Acc + end + end, + AddHd = fun (Dep, Acc) -> + case {Dep, lists:keymember(Dep, 2, Modules)} of + {"src/" ++ _, false} -> [Dep|Acc]; + {"include/" ++ _, false} -> [Dep|Acc]; + _ -> Acc + end + end, + CompileFirst = fun (Deps) -> + First0 = [case filename:extension(D) of + ".erl" -> filename:basename(D, ".erl"); + _ -> [] + end || D <- Deps], + case lists:usort(First0) of + [] -> []; + [[]] -> []; + First -> ["COMPILE_FIRST +=", [[" ", F] || F <- First], "\n"] + end + end, + Depend = [begin + case epp:parse_file(F, ["include/"], []) of + {ok, Forms} -> + Deps = lists:usort(lists:foldl(fun + ({attribute, _, behavior, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, behaviour, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, compile, {parse_transform, Dep}}, Acc) -> Add(Dep, Acc); + ({attribute, _, file, {Dep, _}}, Acc) -> AddHd(Dep, Acc); + (_, Acc) -> Acc + end, [], Forms)), + case Deps of + [] -> ""; + _ -> [F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n", CompileFirst(Deps)] + end; + {error, enoent} -> + [] + end + end || F <- ErlFiles], + ok = file:write_file("$(1)", Depend), + halt() +endef + +ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),) +$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST) + $(makedep_verbose) $(call erlang,$(call makedep.erl,$@)) +endif + +ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \ + touch -c $(PROJECT).d; \ + fi + @touch $@ + +$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change +endif + +-include $(PROJECT).d + +ebin/$(PROJECT).app:: ebin/ + +ebin/: + $(verbose) mkdir -p ebin/ + +define compile_erl + $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \ + -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1)) +endef + +ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src) + $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?)) + $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE))) + $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true)) + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES))))))) +ifeq ($(wildcard src/$(PROJECT).app.src),) + $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \ + > ebin/$(PROJECT).app +else + $(verbose) if [ -z "$$(grep -E '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \ + echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \ + exit 1; \ + fi + $(appsrc_verbose) cat src/$(PROJECT).app.src \ + | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \ + | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(GITDESCRIBE)\"}/" \ + > ebin/$(PROJECT).app +endif + +clean:: clean-app + +clean-app: + $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \ + $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \ + $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +endif + +# Copyright (c) 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: docs-deps + +# Configuration. + +ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS)) + +# Targets. + +$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +doc-deps: +else +doc-deps: $(ALL_DOC_DEPS_DIRS) + $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rel-deps + +# Configuration. + +ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS)) + +# Targets. + +$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +rel-deps: +else +rel-deps: $(ALL_REL_DEPS_DIRS) + $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: test-deps test-dir test-build clean-test-dir + +# Configuration. + +TEST_DIR ?= $(CURDIR)/test + +ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS)) + +TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard +TEST_ERLC_OPTS += -DTEST=1 + +# Targets. + +$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +test-deps: +else +test-deps: $(ALL_TEST_DEPS_DIRS) + $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done +endif + +ifneq ($(wildcard $(TEST_DIR)),) +test-dir: + $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \ + $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/ +endif + +ifeq ($(wildcard ebin/test),) +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: clean deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" + $(gen_verbose) touch ebin/test +else +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" +endif + +clean:: clean-test-dir + +clean-test-dir: +ifneq ($(wildcard $(TEST_DIR)/*.beam),) + $(gen_verbose) rm -f $(TEST_DIR)/*.beam +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rebar.config + +# We strip out -Werror because we don't want to fail due to +# warnings when used as a dependency. + +compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/') + +define compat_convert_erlc_opts +$(if $(filter-out -Werror,$1),\ + $(if $(findstring +,$1),\ + $(shell echo $1 | cut -b 2-))) +endef + +define compat_rebar_config +{deps, [$(call comma_list,$(foreach d,$(DEPS),\ + {$(call dep_name,$d),".*",{git,"$(call dep_repo,$d)","$(call dep_commit,$d)"}}))]}. +{erl_opts, [$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$(ERLC_OPTS)),\ + $(call compat_convert_erlc_opts,$o)))]}. +endef + +$(eval _compat_rebar_config = $$(compat_rebar_config)) +$(eval export _compat_rebar_config) + +rebar.config: + $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc + +MAN_INSTALL_PATH ?= /usr/local/share/man +MAN_SECTIONS ?= 3 7 + +docs:: asciidoc + +asciidoc: distclean-asciidoc doc-deps asciidoc-guide asciidoc-manual + +ifeq ($(wildcard doc/src/guide/book.asciidoc),) +asciidoc-guide: +else +asciidoc-guide: + a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf + a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ +endif + +ifeq ($(wildcard doc/src/manual/*.asciidoc),) +asciidoc-manual: +else +asciidoc-manual: + for f in doc/src/manual/*.asciidoc ; do \ + a2x -v -f manpage $$f ; \ + done + for s in $(MAN_SECTIONS); do \ + mkdir -p doc/man$$s/ ; \ + mv doc/src/manual/*.$$s doc/man$$s/ ; \ + gzip doc/man$$s/*.$$s ; \ + done + +install-docs:: install-asciidoc + +install-asciidoc: asciidoc-manual + for s in $(MAN_SECTIONS); do \ + mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \ + install -g 0 -o 0 -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \ + done +endif + +distclean:: distclean-asciidoc + +distclean-asciidoc: + $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/ + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Bootstrap targets:" \ + " bootstrap Generate a skeleton of an OTP application" \ + " bootstrap-lib Generate a skeleton of an OTP library" \ + " bootstrap-rel Generate the files needed to build a release" \ + " new-app n=NAME Create a new local OTP application NAME" \ + " new-lib n=NAME Create a new local OTP library NAME" \ + " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ + " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \ + " list-templates List available templates" + +# Bootstrap templates. + +define bs_appsrc +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]}, + {mod, {$p_app, []}}, + {env, []} +]}. +endef + +define bs_appsrc_lib +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. +endef + +ifdef SP +define bs_Makefile +PROJECT = $p +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.0.1 + +# Whitespace to be used when creating files from templates. +SP = $(SP) + +include erlang.mk +endef +else +define bs_Makefile +PROJECT = $p +include erlang.mk +endef +endif + +define bs_apps_Makefile +PROJECT = $p +include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk +endef + +define bs_app +-module($p_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). + +start(_Type, _Args) -> + $p_sup:start_link(). + +stop(_State) -> + ok. +endef + +define bs_relx_config +{release, {$p_release, "1"}, [$p]}. +{extended_start_script, true}. +{sys_config, "rel/sys.config"}. +{vm_args, "rel/vm.args"}. +endef + +define bs_sys_config +[ +]. +endef + +define bs_vm_args +-name $p@127.0.0.1 +-setcookie $p +-heart +endef + +# Normal templates. + +define tpl_supervisor +-module($(n)). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +init([]) -> + Procs = [], + {ok, {{one_for_one, 1, 5}, Procs}}. +endef + +define tpl_gen_server +-module($(n)). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +%% gen_server. + +init([]) -> + {ok, #state{}}. + +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +endef + +define tpl_cowboy_http +-module($(n)). +-behaviour(cowboy_http_handler). + +-export([init/3]). +-export([handle/2]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {ok, Req, #state{}}. + +handle(Req, State=#state{}) -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_gen_fsm +-module($(n)). +-behaviour(gen_fsm). + +%% API. +-export([start_link/0]). + +%% gen_fsm. +-export([init/1]). +-export([state_name/2]). +-export([handle_event/3]). +-export([state_name/3]). +-export([handle_sync_event/4]). +-export([handle_info/3]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_fsm:start_link(?MODULE, [], []). + +%% gen_fsm. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_Event, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_Event, StateName, StateData) -> + {next_state, StateName, StateData}. + +state_name(_Event, _From, StateData) -> + {reply, ignored, state_name, StateData}. + +handle_sync_event(_Event, _From, StateName, StateData) -> + {reply, ignored, StateName, StateData}. + +handle_info(_Info, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + +define tpl_cowboy_loop +-module($(n)). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {loop, Req, #state{}, 5000, hibernate}. + +info(_Info, Req, State) -> + {loop, Req, State, hibernate}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_cowboy_rest +-module($(n)). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_html/2]). + +init(_, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. + +get_html(Req, State) -> + {<<"This is REST!">>, Req, State}. +endef + +define tpl_cowboy_ws +-module($(n)). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +-record(state, { +}). + +init(_, _, _) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, #state{}}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_ranch_protocol +-module($(n)). +-behaviour(ranch_protocol). + +-export([start_link/4]). +-export([init/4]). + +-type opts() :: []. +-export_type([opts/0]). + +-record(state, { + socket :: inet:socket(), + transport :: module() +}). + +start_link(Ref, Socket, Transport, Opts) -> + Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]), + {ok, Pid}. + +-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Ref, Socket, Transport, _Opts) -> + ok = ranch:accept_ack(Ref), + loop(#state{socket=Socket, transport=Transport}). + +loop(State) -> + loop(State). +endef + +# Plugin-specific targets. + +define render_template + $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) +endef + +ifndef WS +ifdef SP +WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a)) +else +WS = $(tab) +endif +endif + +bootstrap: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(eval n := $(PROJECT)_sup) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc,src/$(PROJECT).app.src) +endif + $(call render_template,bs_app,src/$(PROJECT)_app.erl) + $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl) + +bootstrap-lib: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src) +endif + +bootstrap-rel: +ifneq ($(wildcard relx.config),) + $(error Error: relx.config already exists) +endif +ifneq ($(wildcard rel/),) + $(error Error: rel/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_relx_config,relx.config) + $(verbose) mkdir rel/ + $(call render_template,bs_sys_config,rel/sys.config) + $(call render_template,bs_vm_args,rel/vm.args) + +new-app: +ifndef in + $(error Usage: $(MAKE) new-app in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(eval n := $(in)_sup) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) +endif + $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) + $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) + +new-lib: +ifndef in + $(error Usage: $(MAKE) new-lib in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) +endif + +new: +ifeq ($(wildcard src/)$(in),) + $(error Error: src/ directory does not exist) +endif +ifndef t + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifndef tpl_$(t) + $(error Unknown template) +endif +ifndef n + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in= +else + $(call render_template,tpl_$(t),src/$(n).erl) +endif + +list-templates: + $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-c_src distclean-c_src-env + +# Configuration. + +C_SRC_DIR ?= $(CURDIR)/c_src +C_SRC_ENV ?= $(C_SRC_DIR)/env.mk +C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT).so +C_SRC_TYPE ?= shared + +# System type and C compiler/flags. + +ifeq ($(PLATFORM),darwin) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall + LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress +else ifeq ($(PLATFORM),freebsd) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +else ifeq ($(PLATFORM),linux) + CC ?= gcc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +endif + +CFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) +CXXFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) + +LDLIBS += -L $(ERL_INTERFACE_LIB_DIR) -lerl_interface -lei + +# Verbosity. + +c_verbose_0 = @echo " C " $(?F); +c_verbose = $(c_verbose_$(V)) + +cpp_verbose_0 = @echo " CPP " $(?F); +cpp_verbose = $(cpp_verbose_$(V)) + +link_verbose_0 = @echo " LD " $(@F); +link_verbose = $(link_verbose_$(V)) + +# Targets. + +ifeq ($(wildcard $(C_SRC_DIR)),) +else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),) +app:: app-c_src + +test-build:: app-c_src + +app-c_src: + $(MAKE) -C $(C_SRC_DIR) + +clean:: + $(MAKE) -C $(C_SRC_DIR) clean + +else + +ifeq ($(SOURCES),) +SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat)))) +endif +OBJECTS = $(addsuffix .o, $(basename $(SOURCES))) + +COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c +COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c + +app:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +$(C_SRC_OUTPUT): $(OBJECTS) + $(verbose) mkdir -p priv/ + $(link_verbose) $(CC) $(OBJECTS) \ + $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \ + -o $(C_SRC_OUTPUT) + +%.o: %.c + $(COMPILE_C) $(OUTPUT_OPTION) $< + +%.o: %.cc + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.C + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.cpp + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +clean:: clean-c_src + +clean-c_src: + $(gen_verbose) rm -f $(C_SRC_OUTPUT) $(OBJECTS) + +endif + +ifneq ($(wildcard $(C_SRC_DIR)),) +$(C_SRC_ENV): + $(verbose) $(ERL) -eval "file:write_file(\"$(C_SRC_ENV)\", \ + io_lib:format( \ + \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \ + \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \ + \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \ + [code:root_dir(), erlang:system_info(version), \ + code:lib_dir(erl_interface, include), \ + code:lib_dir(erl_interface, lib)])), \ + halt()." + +distclean:: distclean-c_src-env + +distclean-c_src-env: + $(gen_verbose) rm -f $(C_SRC_ENV) + +-include $(C_SRC_ENV) +endif + +# Templates. + +define bs_c_nif +#include "erl_nif.h" + +static int loads = 0; + +static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info) +{ + /* Initialize private data. */ + *priv_data = NULL; + + loads++; + + return 0; +} + +static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info) +{ + /* Convert the private data to the new version. */ + *priv_data = *old_priv_data; + + loads++; + + return 0; +} + +static void unload(ErlNifEnv* env, void* priv_data) +{ + if (loads == 1) { + /* Destroy the private data. */ + } + + loads--; +} + +static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{ + if (enif_is_atom(env, argv[0])) { + return enif_make_tuple2(env, + enif_make_atom(env, "hello"), + argv[0]); + } + + return enif_make_tuple2(env, + enif_make_atom(env, "error"), + enif_make_atom(env, "badarg")); +} + +static ErlNifFunc nif_funcs[] = { + {"hello", 1, hello} +}; + +ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload) +endef + +define bs_erl_nif +-module($n). + +-export([hello/1]). + +-on_load(on_load/0). +on_load() -> + PrivDir = case code:priv_dir(?MODULE) of + {error, _} -> + AppPath = filename:dirname(filename:dirname(code:which(?MODULE))), + filename:join(AppPath, "priv"); + Path -> + Path + end, + erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0). + +hello(_) -> + erlang:nif_error({not_loaded, ?MODULE}). +endef + +new-nif: +ifneq ($(wildcard $(C_SRC_DIR)/$n.c),) + $(error Error: $(C_SRC_DIR)/$n.c already exists) +endif +ifneq ($(wildcard src/$n.erl),) + $(error Error: src/$n.erl already exists) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in= +else + $(verbose) mkdir -p $(C_SRC_DIR) src/ + $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c) + $(call render_template,bs_erl_nif,src/$n.erl) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ci ci-setup distclean-kerl + +KERL ?= $(CURDIR)/kerl +export KERL + +KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl + +OTP_GIT ?= https://github.com/erlang/otp + +CI_INSTALL_DIR ?= $(HOME)/erlang +CI_OTP ?= + +ifeq ($(strip $(CI_OTP)),) +ci:: +else +ci:: $(addprefix ci-,$(CI_OTP)) + +ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP)) + +ci-setup:: + +ci_verbose_0 = @echo " CI " $(1); +ci_verbose = $(ci_verbose_$(V)) + +define ci_target +ci-$(1): $(CI_INSTALL_DIR)/$(1) + $(ci_verbose) \ + PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \ + CI_OTP_RELEASE="$(1)" \ + CT_OPTS="-label $(1)" \ + $(MAKE) clean ci-setup tests +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp)))) + +define ci_otp_target +ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),) +$(CI_INSTALL_DIR)/$(1): $(KERL) + $(KERL) build git $(OTP_GIT) $(1) $(1) + $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1) +endif +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp)))) + +$(KERL): + $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL)) + $(verbose) chmod +x $(KERL) + +help:: + $(verbose) printf "%s\n" "" \ + "Continuous Integration targets:" \ + " ci Run '$(MAKE) tests' on all configured Erlang versions." \ + "" \ + "The CI_OTP variable must be defined with the Erlang versions" \ + "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3" + +distclean:: distclean-kerl + +distclean-kerl: + $(gen_verbose) rm -rf $(KERL) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ct distclean-ct + +# Configuration. + +CT_OPTS ?= +ifneq ($(wildcard $(TEST_DIR)),) + CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl)))) +else + CT_SUITES ?= +endif + +# Core targets. + +tests:: ct + +distclean:: distclean-ct + +help:: + $(verbose) printf "%s\n" "" \ + "Common_test targets:" \ + " ct Run all the common_test suites for this project" \ + "" \ + "All your common_test suites have their associated targets." \ + "A suite named http_SUITE can be ran using the ct-http target." + +# Plugin-specific targets. + +CT_RUN = ct_run \ + -no_auto_compile \ + -noinput \ + -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(TEST_DIR) \ + -dir $(TEST_DIR) \ + -logdir $(CURDIR)/logs + +ifeq ($(CT_SUITES),) +ct: +else +ct: test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) +endif + +define ct_suite_target +ct-$(1): test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(1)) $(CT_OPTS) +endef + +$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) + +distclean-ct: + $(gen_verbose) rm -rf $(CURDIR)/logs/ + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: plt distclean-plt dialyze + +# Configuration. + +DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt +export DIALYZER_PLT + +PLT_APPS ?= +DIALYZER_DIRS ?= --src -r src +DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions \ + -Wunmatched_returns # -Wunderspecs + +# Core targets. + +check:: dialyze + +distclean:: distclean-plt + +help:: + $(verbose) printf "%s\n" "" \ + "Dialyzer targets:" \ + " plt Build a PLT file for this project" \ + " dialyze Analyze the project using Dialyzer" + +# Plugin-specific targets. + +$(DIALYZER_PLT): deps app + $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS) + +plt: $(DIALYZER_PLT) + +distclean-plt: + $(gen_verbose) rm -f $(DIALYZER_PLT) + +ifneq ($(wildcard $(DIALYZER_PLT)),) +dialyze: +else +dialyze: $(DIALYZER_PLT) +endif + $(verbose) dialyzer --no_native $(DIALYZER_DIRS) $(DIALYZER_OPTS) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-edoc edoc + +# Configuration. + +EDOC_OPTS ?= + +# Core targets. + +docs:: distclean-edoc edoc + +distclean:: distclean-edoc + +# Plugin-specific targets. + +edoc: doc-deps + $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().' + +distclean-edoc: + $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: elvis distclean-elvis + +# Configuration. + +ELVIS_CONFIG ?= $(CURDIR)/elvis.config + +ELVIS ?= $(CURDIR)/elvis +export ELVIS + +ELVIS_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis +ELVIS_CONFIG_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis.config +ELVIS_OPTS ?= + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Elvis targets:" \ + " elvis Run Elvis using the local elvis.config or download the default otherwise" + +distclean:: distclean-elvis + +# Plugin-specific targets. + +$(ELVIS): + $(gen_verbose) $(call core_http_get,$(ELVIS),$(ELVIS_URL)) + $(verbose) chmod +x $(ELVIS) + +$(ELVIS_CONFIG): + $(verbose) $(call core_http_get,$(ELVIS_CONFIG),$(ELVIS_CONFIG_URL)) + +elvis: $(ELVIS) $(ELVIS_CONFIG) + $(verbose) $(ELVIS) rock -c $(ELVIS_CONFIG) $(ELVIS_OPTS) + +distclean-elvis: + $(gen_verbose) rm -rf $(ELVIS) + +# Copyright (c) 2014 Dave Cottlehuber +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-escript escript + +# Configuration. + +ESCRIPT_NAME ?= $(PROJECT) +ESCRIPT_COMMENT ?= This is an -*- erlang -*- file + +ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*" +ESCRIPT_SYS_CONFIG ?= "rel/sys.config" +ESCRIPT_EMU_ARGS ?= -pa . \ + -sasl errlog_type error \ + -escript main $(ESCRIPT_NAME) +ESCRIPT_SHEBANG ?= /usr/bin/env escript +ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**" + +# Core targets. + +distclean:: distclean-escript + +help:: + $(verbose) printf "%s\n" "" \ + "Escript targets:" \ + " escript Build an executable escript archive" \ + +# Plugin-specific targets. + +# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl +# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center +# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE : +# Software may only be used for the great good and the true happiness of all +# sentient beings. + +define ESCRIPT_RAW +'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\ +'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\ +' [F || F <- A, not filelib:is_dir(F) ] end,'\ +'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\ +'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\ +'Ez = fun(Escript) ->'\ +' Static = Files([$(ESCRIPT_STATIC)]),'\ +' Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\ +' Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\ +' escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\ +' {archive, Archive, [memory]},'\ +' {shebang, "$(ESCRIPT_SHEBANG)"},'\ +' {comment, "$(ESCRIPT_COMMENT)"},'\ +' {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\ +' ]),'\ +' file:change_mode(Escript, 8#755)'\ +'end,'\ +'Ez("$(ESCRIPT_NAME)"),'\ +'halt().' +endef + +ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW)) + +escript:: distclean-escript deps app + $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND) + +distclean-escript: + $(gen_verbose) rm -f $(ESCRIPT_NAME) + +# Copyright (c) 2014, Enrique Fernandez +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: eunit + +# Configuration + +EUNIT_OPTS ?= + +# Core targets. + +tests:: eunit + +help:: + $(verbose) printf "%s\n" "" \ + "EUnit targets:" \ + " eunit Run all the EUnit tests for this project" + +# Plugin-specific targets. + +define eunit.erl + case "$(COVER)" of + "" -> ok; + _ -> + case cover:compile_beam_directory("ebin") of + {error, _} -> halt(1); + _ -> ok + end + end, + case eunit:test([$(call comma_list,$(1))], [$(EUNIT_OPTS)]) of + ok -> ok; + error -> halt(2) + end, + case "$(COVER)" of + "" -> ok; + _ -> + cover:export("eunit.coverdata") + end, + halt() +endef + +EUNIT_EBIN_MODS = $(notdir $(basename $(call core_find,ebin/,*.beam))) +EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.beam))) +EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \ + $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),{module,'$(mod)'}) + +eunit: test-build + $(gen_verbose) $(ERL) -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin ebin \ + -eval "$(subst $(newline),,$(subst ",\",$(call eunit.erl,$(EUNIT_MODS))))" + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: relx-rel distclean-relx-rel distclean-relx run + +# Configuration. + +RELX ?= $(CURDIR)/relx +RELX_CONFIG ?= $(CURDIR)/relx.config + +RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.5.0/relx +RELX_OPTS ?= +RELX_OUTPUT_DIR ?= _rel + +ifeq ($(firstword $(RELX_OPTS)),-o) + RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) +else + RELX_OPTS += -o $(RELX_OUTPUT_DIR) +endif + +# Core targets. + +ifeq ($(IS_DEP),) +ifneq ($(wildcard $(RELX_CONFIG)),) +rel:: relx-rel +endif +endif + +distclean:: distclean-relx-rel distclean-relx + +# Plugin-specific targets. + +$(RELX): + $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) + $(verbose) chmod +x $(RELX) + +relx-rel: $(RELX) rel-deps app + $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) + +distclean-relx-rel: + $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) + +distclean-relx: + $(gen_verbose) rm -rf $(RELX) + +# Run target. + +ifeq ($(wildcard $(RELX_CONFIG)),) +run: +else + +define get_relx_release.erl + {ok, Config} = file:consult("$(RELX_CONFIG)"), + {release, {Name, _}, _} = lists:keyfind(release, 1, Config), + io:format("~s", [Name]), + halt(0). +endef + +RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))` + +run: all + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console + +help:: + $(verbose) printf "%s\n" "" \ + "Relx targets:" \ + " run Compile the project, build the release and run it" + +endif + +# Copyright (c) 2014, M Robert Martin +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: shell + +# Configuration. + +SHELL_ERL ?= erl +SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin +SHELL_OPTS ?= + +ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) + +# Core targets + +help:: + $(verbose) printf "%s\n" "" \ + "Shell targets:" \ + " shell Run an erlang shell with SHELL_OPTS or reasonable default" + +# Plugin-specific targets. + +$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) + +build-shell-deps: $(ALL_SHELL_DEPS_DIRS) + $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done + +shell: build-shell-deps + $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS) + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq) +.PHONY: triq + +# Targets. + +tests:: triq + +define triq_check.erl + code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]), + try + case $(1) of + all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]); + module -> triq:check($(2)); + function -> triq:check($(2)) + end + of + true -> halt(0); + _ -> halt(1) + catch error:undef -> + io:format("Undefined property or module~n"), + halt(0) + end. +endef + +ifdef t +ifeq (,$(findstring :,$(t))) +triq: test-build + $(verbose) $(call erlang,$(call triq_check.erl,module,$(t))) +else +triq: test-build + $(verbose) echo Testing $(t)/0 + $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)())) +endif +else +triq: test-build + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam)))))) + $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES))) +endif +endif + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: xref distclean-xref + +# Configuration. + +ifeq ($(XREF_CONFIG),) + XREF_ARGS := +else + XREF_ARGS := -c $(XREF_CONFIG) +endif + +XREFR ?= $(CURDIR)/xrefr +export XREFR + +XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Xref targets:" \ + " xref Run Xrefr using $XREF_CONFIG as config file if defined" + +distclean:: distclean-xref + +# Plugin-specific targets. + +$(XREFR): + $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL)) + $(verbose) chmod +x $(XREFR) + +xref: deps app $(XREFR) + $(gen_verbose) $(XREFR) $(XREFR_ARGS) + +distclean-xref: + $(gen_verbose) rm -rf $(XREFR) + +# Copyright 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +COVER_REPORT_DIR = cover + +# Hook in coverage to ct + +ifdef COVER +ifdef CT_RUN +# All modules in 'ebin' +COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam))) + +test-build:: $(TEST_DIR)/ct.cover.spec + +$(TEST_DIR)/ct.cover.spec: + $(verbose) echo Cover mods: $(COVER_MODS) + $(gen_verbose) printf "%s\n" \ + '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \ + '{export,"$(CURDIR)/ct.coverdata"}.' > $@ + +CT_RUN += -cover $(TEST_DIR)/ct.cover.spec +endif +endif + +# Core targets + +ifdef COVER +ifneq ($(COVER_REPORT_DIR),) +tests:: + $(verbose) $(MAKE) --no-print-directory cover-report +endif +endif + +clean:: coverdata-clean + +ifneq ($(COVER_REPORT_DIR),) +distclean:: cover-report-clean +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Cover targets:" \ + " cover-report Generate a HTML coverage report from previously collected" \ + " cover data." \ + " all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \ + "" \ + "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \ + "target tests additionally generates a HTML coverage report from the combined" \ + "coverdata files from each of these testing tools. HTML reports can be disabled" \ + "by setting COVER_REPORT_DIR to empty." + +# Plugin specific targets + +COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata)) + +.PHONY: coverdata-clean +coverdata-clean: + $(gen_verbose) rm -f *.coverdata ct.cover.spec + +# Merge all coverdata files into one. +all.coverdata: $(COVERDATA) + $(gen_verbose) $(ERL) -eval ' \ + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \ + cover:export("$@"), halt(0).' + +# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to +# empty if you want the coverdata files but not the HTML report. +ifneq ($(COVER_REPORT_DIR),) + +.PHONY: cover-report-clean cover-report + +cover-report-clean: + $(gen_verbose) rm -rf $(COVER_REPORT_DIR) + +ifeq ($(COVERDATA),) +cover-report: +else + +# Modules which include eunit.hrl always contain one line without coverage +# because eunit defines test/0 which is never called. We compensate for this. +EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \ + grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ + | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq)) + +define cover_report.erl + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) + Ms = cover:imported_modules(), + [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M) + ++ ".COVER.html", [html]) || M <- Ms], + Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms], + EunitHrlMods = [$(EUNIT_HRL_MODS)], + Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of + true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report], + TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]), + TotalN = lists:sum([N || {_, {_, N}} <- Report1]), + TotalPerc = round(100 * TotalY / (TotalY + TotalN)), + {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]), + io:format(F, "~n" + "~n" + "Coverage report~n" + "~n", []), + io:format(F, "

Coverage

~n

Total: ~p%

~n", [TotalPerc]), + io:format(F, "~n", []), + [io:format(F, "" + "~n", + [M, M, round(100 * Y / (Y + N))]) || {M, {Y, N}} <- Report1], + How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))", + Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")", + io:format(F, "
ModuleCoverage
~p~p%
~n" + "

Generated using ~s and erlang.mk on ~s.

~n" + "", [How, Date]), + halt(). +endef + +cover-report: + $(gen_verbose) mkdir -p $(COVER_REPORT_DIR) + $(gen_verbose) $(call erlang,$(cover_report.erl)) + +endif +endif # ifneq ($(COVER_REPORT_DIR),) + +# Copyright (c) 2013-2015, Loïc Hoguin +# Copyright (c) 2015, Jean-Sébastien Pédron +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Fetch dependencies (without building them). + +.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \ + fetch-shell-deps + +ifneq ($(SKIP_DEPS),) +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: + @: +else +# By default, we fetch "normal" dependencies. They are also included no +# matter the type of requested dependencies. +# +# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS). +fetch-deps: $(ALL_DEPS_DIRS) +fetch-doc-deps: $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS) +fetch-rel-deps: $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS) +fetch-test-deps: $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS) +fetch-shell-deps: $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(filter doc,$(DEP_TYPES)),) +fetch-deps: $(ALL_DOC_DEPS_DIRS) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +fetch-deps: $(ALL_REL_DEPS_DIRS) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +fetch-deps: $(ALL_TEST_DEPS_DIRS) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +fetch-deps: $(ALL_SHELL_DEPS_DIRS) +endif + +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep $@ IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/$@.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $^ ; do \ + if ! grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/$@.log; then \ + echo $$dep >> $(ERLANG_MK_TMP)/$@.log; \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C $$dep fetch-deps IS_DEP=1 || exit $$?; \ + fi \ + fi \ + done +endif # ifneq ($(SKIP_DEPS),) + +# List dependencies recursively. + +.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \ + list-shell-deps + +ifneq ($(SKIP_DEPS),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): + $(verbose) :> $@ +else +LIST_DIRS = $(ALL_DEPS_DIRS) +LIST_DEPS = $(BUILD_DEPS) $(DEPS) + +$(ERLANG_MK_RECURSIVE_DEPS_LIST): fetch-deps + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DIRS += $(ALL_DOC_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DEPS += $(DOC_DEPS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-doc-deps +else +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DIRS += $(ALL_REL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DEPS += $(REL_DEPS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-rel-deps +else +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DIRS += $(ALL_TEST_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DEPS += $(TEST_DEPS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-test-deps +else +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DIRS += $(ALL_SHELL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DEPS += $(SHELL_DEPS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-shell-deps +else +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-deps +endif + +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): +ifneq ($(IS_DEP),1) + $(verbose) rm -f $@.orig +endif +ifndef IS_APP + $(verbose) for app in $(filter-out $(CURDIR),$(ALL_APPS_DIRS)); do \ + $(MAKE) -C "$$app" --no-print-directory $@ IS_APP=1 || :; \ + done +endif + $(verbose) for dep in $(filter-out $(CURDIR),$(LIST_DIRS)); do \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C "$$dep" --no-print-directory $@ IS_DEP=1; \ + fi; \ + done + $(verbose) for dep in $(LIST_DEPS); do \ + echo $(DEPS_DIR)/$$dep; \ + done >> $@.orig +ifndef IS_APP +ifneq ($(IS_DEP),1) + $(verbose) sort < $@.orig | uniq > $@ + $(verbose) rm -f $@.orig +endif +endif +endif # ifneq ($(SKIP_DEPS),) + +ifneq ($(SKIP_DEPS),) +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + @: +else +list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST) +list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(IS_DEP),1) +ifneq ($(filter doc,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) +endif +endif + +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + $(verbose) cat $^ | sort | uniq +endif # ifneq ($(SKIP_DEPS),) diff --git a/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/rabbitmq-components.mk b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/rabbitmq-components.mk new file mode 100644 index 0000000..eed26fd --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/rabbitmq-components.mk @@ -0,0 +1,331 @@ +ifeq ($(.DEFAULT_GOAL),) +# Define default goal to `all` because this file defines some targets +# before the inclusion of erlang.mk leading to the wrong target becoming +# the default. +.DEFAULT_GOAL = all +endif + +# Automatically add rabbitmq-common to the dependencies, at least for +# the Makefiles. +ifneq ($(PROJECT),rabbit_common) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +ifeq ($(filter rabbit_common,$(DEPS)),) +DEPS += rabbit_common +endif +endif +endif + +# -------------------------------------------------------------------- +# RabbitMQ components. +# -------------------------------------------------------------------- + +# For RabbitMQ repositories, we want to checkout branches which match +# the parent project. For instance, if the parent project is on a +# release tag, dependencies must be on the same release tag. If the +# parent project is on a topic branch, dependencies must be on the same +# topic branch or fallback to `stable` or `master` whichever was the +# base of the topic branch. + +dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_clusterer = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_lvc = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_visualiser = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master +dep_sockjs = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master +dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master + +dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master + +# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk +# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch +# needs to add "ranch" as a BUILD_DEPS. The list of projects needing +# this workaround are: +# o rabbitmq-web-stomp +dep_ranch = git https://github.com/ninenines/ranch 1.2.1 + +RABBITMQ_COMPONENTS = amqp_client \ + rabbit \ + rabbit_common \ + rabbitmq_amqp1_0 \ + rabbitmq_auth_backend_amqp \ + rabbitmq_auth_backend_http \ + rabbitmq_auth_backend_ldap \ + rabbitmq_auth_mechanism_ssl \ + rabbitmq_boot_steps_visualiser \ + rabbitmq_clusterer \ + rabbitmq_codegen \ + rabbitmq_consistent_hash_exchange \ + rabbitmq_delayed_message_exchange \ + rabbitmq_dotnet_client \ + rabbitmq_event_exchange \ + rabbitmq_federation \ + rabbitmq_federation_management \ + rabbitmq_java_client \ + rabbitmq_lvc \ + rabbitmq_management \ + rabbitmq_management_agent \ + rabbitmq_management_exchange \ + rabbitmq_management_themes \ + rabbitmq_management_visualiser \ + rabbitmq_message_timestamp \ + rabbitmq_metronome \ + rabbitmq_mqtt \ + rabbitmq_recent_history_exchange \ + rabbitmq_rtopic_exchange \ + rabbitmq_sharding \ + rabbitmq_shovel \ + rabbitmq_shovel_management \ + rabbitmq_stomp \ + rabbitmq_test \ + rabbitmq_toke \ + rabbitmq_top \ + rabbitmq_tracing \ + rabbitmq_web_dispatch \ + rabbitmq_web_stomp \ + rabbitmq_web_stomp_examples \ + rabbitmq_website + +# Several components have a custom erlang.mk/build.config, mainly +# to disable eunit. Therefore, we can't use the top-level project's +# erlang.mk copy. +NO_AUTOPATCH += $(RABBITMQ_COMPONENTS) + +ifeq ($(origin current_rmq_ref),undefined) +ifneq ($(wildcard .git),) +current_rmq_ref := $(shell (\ + ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\ + if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi)) +else +current_rmq_ref := master +endif +endif +export current_rmq_ref + +ifeq ($(origin base_rmq_ref),undefined) +ifneq ($(wildcard .git),) +base_rmq_ref := $(shell \ + (git rev-parse --verify -q stable >/dev/null && \ + git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \ + echo stable) || \ + echo master) +else +base_rmq_ref := master +endif +endif +export base_rmq_ref + +# Repository URL selection. +# +# First, we infer other components' location from the current project +# repository URL, if it's a Git repository: +# - We take the "origin" remote URL as the base +# - The current project name and repository name is replaced by the +# target's properties: +# eg. rabbitmq-common is replaced by rabbitmq-codegen +# eg. rabbit_common is replaced by rabbitmq_codegen +# +# If cloning from this computed location fails, we fallback to RabbitMQ +# upstream which is GitHub. + +# Maccro to transform eg. "rabbit_common" to "rabbitmq-common". +rmq_cmp_repo_name = $(word 2,$(dep_$(1))) + +# Upstream URL for the current project. +RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT)) +RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git +RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git + +# Current URL for the current project. If this is not a Git clone, +# default to the upstream Git repository. +ifneq ($(wildcard .git),) +git_origin_fetch_url := $(shell git config remote.origin.url) +git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url) +RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url) +RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url) +else +RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL) +RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL) +endif + +# Macro to replace the following pattern: +# 1. /foo.git -> /bar.git +# 2. /foo -> /bar +# 3. /foo/ -> /bar/ +subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3)))) + +# Macro to replace both the project's name (eg. "rabbit_common") and +# repository name (eg. "rabbitmq-common") by the target's equivalent. +# +# This macro is kept on one line because we don't want whitespaces in +# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell +# single-quoted string. +dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo)) + +dep_rmq_commits = $(if $(dep_$(1)), \ + $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \ + $(pkg_$(1)_commit)) + +define dep_fetch_git_rmq + fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \ + fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \ + if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \ + git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url1"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \ + elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url2"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \ + fi; \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \ + $(foreach ref,$(call dep_rmq_commits,$(1)), \ + git checkout -q $(ref) >/dev/null 2>&1 || \ + ) \ + (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \ + 1>&2 && false) ) && \ + (test "$$$$fetch_url" = "$$$$push_url" || \ + git remote set-url --push origin "$$$$push_url") +endef + +# -------------------------------------------------------------------- +# Component distribution. +# -------------------------------------------------------------------- + +list-dist-deps:: + @: + +prepare-dist:: + @: + +# -------------------------------------------------------------------- +# Run a RabbitMQ node (moved from rabbitmq-run.mk as a workaround). +# -------------------------------------------------------------------- + +# Add "rabbit" to the build dependencies when the user wants to start +# a broker or to the test dependencies when the user wants to test a +# project. +# +# NOTE: This should belong to rabbitmq-run.mk. Unfortunately, it is +# loaded *after* erlang.mk which is too late to add a dependency. That's +# why rabbitmq-components.mk knows the list of targets which start a +# broker and add "rabbit" to the dependencies in this case. + +ifneq ($(PROJECT),rabbit) +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS)),) +RUN_RMQ_TARGETS = run-broker \ + run-background-broker \ + run-node \ + run-background-node \ + start-background-node + +ifneq ($(filter $(RUN_RMQ_TARGETS),$(MAKECMDGOALS)),) +BUILD_DEPS += rabbit +endif +endif + +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +ifneq ($(filter check tests tests-with-broker test,$(MAKECMDGOALS)),) +TEST_DEPS += rabbit +endif +endif +endif + +ifeq ($(filter rabbit_public_umbrella amqp_client rabbit_common rabbitmq_test,$(PROJECT)),) +ifeq ($(filter rabbitmq_test,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +TEST_DEPS += rabbitmq_test +endif +endif + +# -------------------------------------------------------------------- +# rabbitmq-components.mk checks. +# -------------------------------------------------------------------- + +ifeq ($(PROJECT),rabbit_common) +else ifdef SKIP_RMQCOMP_CHECK +else ifeq ($(IS_DEP),1) +else ifneq ($(filter co up,$(MAKECMDGOALS)),) +else +# In all other cases, rabbitmq-components.mk must be in sync. +deps:: check-rabbitmq-components.mk +fetch-deps: check-rabbitmq-components.mk +endif + +# If this project is under the Umbrella project, we override $(DEPS_DIR) +# to point to the Umbrella's one. We also disable `make distclean` so +# $(DEPS_DIR) is not accidentally removed. + +ifneq ($(wildcard ../../UMBRELLA.md),) +UNDER_UMBRELLA = 1 +else ifneq ($(wildcard UMBRELLA.md),) +UNDER_UMBRELLA = 1 +endif + +ifeq ($(UNDER_UMBRELLA),1) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +DEPS_DIR ?= $(abspath ..) + +distclean:: distclean-components + @: + +distclean-components: +endif + +ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),) +SKIP_DEPS = 1 +endif +endif + +UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk + +check-rabbitmq-components.mk: + $(verbose) cmp -s rabbitmq-components.mk \ + $(UPSTREAM_RMQ_COMPONENTS_MK) || \ + (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \ + false) + +ifeq ($(PROJECT),rabbit_common) +rabbitmq-components-mk: + @: +else +rabbitmq-components-mk: + $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) . +ifeq ($(DO_COMMIT),yes) + $(verbose) git diff --quiet rabbitmq-components.mk \ + || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk +endif +endif diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbit_auth_mechanism_ssl.erl b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl.erl similarity index 97% rename from rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbit_auth_mechanism_ssl.erl rename to rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl.erl index 47cfcab..b7dac3f 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbit_auth_mechanism_ssl.erl +++ b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbit_auth_mechanism_ssl_app.erl b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl_app.erl similarity index 94% rename from rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbit_auth_mechanism_ssl_app.erl rename to rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl_app.erl index 7f6eff9..159cccc 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbit_auth_mechanism_ssl_app.erl +++ b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/src/rabbit_auth_mechanism_ssl_app.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_auth_mechanism_ssl_app). diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbitmq_auth_mechanism_ssl.app.src b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/src/rabbitmq_auth_mechanism_ssl.app.src similarity index 93% rename from rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbitmq_auth_mechanism_ssl.app.src rename to rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/src/rabbitmq_auth_mechanism_ssl.app.src index 400b3b0..f3ae501 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/src/rabbitmq_auth_mechanism_ssl.app.src +++ b/rabbitmq-server/deps/rabbitmq_auth_mechanism_ssl/src/rabbitmq_auth_mechanism_ssl.app.src @@ -1,7 +1,7 @@ %% -*- erlang -*- {application, rabbitmq_auth_mechanism_ssl, [{description, "RabbitMQ SSL authentication (SASL EXTERNAL)"}, - {vsn, "%%VSN%%"}, + {vsn, "3.6.1"}, {modules, []}, {registered, []}, {mod, {rabbit_auth_mechanism_ssl_app, []}}, diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/CONTRIBUTING.md b/rabbitmq-server/deps/rabbitmq_codegen/CONTRIBUTING.md similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/CONTRIBUTING.md rename to rabbitmq-server/deps/rabbitmq_codegen/CONTRIBUTING.md diff --git a/rabbitmq-server/codegen/LICENSE b/rabbitmq-server/deps/rabbitmq_codegen/LICENSE similarity index 100% rename from rabbitmq-server/codegen/LICENSE rename to rabbitmq-server/deps/rabbitmq_codegen/LICENSE diff --git a/rabbitmq-server/plugins-src/licensing/LICENSE-MPL-RabbitMQ b/rabbitmq-server/deps/rabbitmq_codegen/LICENSE-MPL-RabbitMQ similarity index 100% rename from rabbitmq-server/plugins-src/licensing/LICENSE-MPL-RabbitMQ rename to rabbitmq-server/deps/rabbitmq_codegen/LICENSE-MPL-RabbitMQ diff --git a/rabbitmq-server/codegen/Makefile b/rabbitmq-server/deps/rabbitmq_codegen/Makefile similarity index 64% rename from rabbitmq-server/codegen/Makefile rename to rabbitmq-server/deps/rabbitmq_codegen/Makefile index 8e028c8..7b27ec7 100644 --- a/rabbitmq-server/codegen/Makefile +++ b/rabbitmq-server/deps/rabbitmq_codegen/Makefile @@ -1,5 +1,5 @@ all: - @echo "Please select a target from the Makefile." + @: clean: rm -f *.pyc diff --git a/rabbitmq-server/codegen/README.extensions.md b/rabbitmq-server/deps/rabbitmq_codegen/README.extensions.md similarity index 100% rename from rabbitmq-server/codegen/README.extensions.md rename to rabbitmq-server/deps/rabbitmq_codegen/README.extensions.md diff --git a/rabbitmq-server/codegen/amqp-rabbitmq-0.8.json b/rabbitmq-server/deps/rabbitmq_codegen/amqp-rabbitmq-0.8.json similarity index 99% rename from rabbitmq-server/codegen/amqp-rabbitmq-0.8.json rename to rabbitmq-server/deps/rabbitmq_codegen/amqp-rabbitmq-0.8.json index 35f8856..0950415 100644 --- a/rabbitmq-server/codegen/amqp-rabbitmq-0.8.json +++ b/rabbitmq-server/deps/rabbitmq_codegen/amqp-rabbitmq-0.8.json @@ -4,7 +4,7 @@ "minor-version": 0, "port": 5672, "copyright": [ - "Copyright (C) 2008-2013 GoPivotal, Inc.\n", + "Copyright (C) 2008-2016 Pivotal Software, Inc, Inc.\n", "\n", "Permission is hereby granted, free of charge, to any person\n", "obtaining a copy of this file (the \"Software\"), to deal in the\n", diff --git a/rabbitmq-server/codegen/amqp-rabbitmq-0.9.1.json b/rabbitmq-server/deps/rabbitmq_codegen/amqp-rabbitmq-0.9.1.json similarity index 99% rename from rabbitmq-server/codegen/amqp-rabbitmq-0.9.1.json rename to rabbitmq-server/deps/rabbitmq_codegen/amqp-rabbitmq-0.9.1.json index 0c3ee2a..2f4b10a 100644 --- a/rabbitmq-server/codegen/amqp-rabbitmq-0.9.1.json +++ b/rabbitmq-server/deps/rabbitmq_codegen/amqp-rabbitmq-0.9.1.json @@ -5,7 +5,7 @@ "revision": 1, "port": 5672, "copyright": [ - "Copyright (C) 2008-2013 GoPivotal, Inc.\n", + "Copyright (C) 2008-2016 Pivotal Software, Inc, Inc.\n", "\n", "Permission is hereby granted, free of charge, to any person\n", "obtaining a copy of this file (the \"Software\"), to deal in the\n", diff --git a/rabbitmq-server/codegen/amqp_codegen.py b/rabbitmq-server/deps/rabbitmq_codegen/amqp_codegen.py similarity index 93% rename from rabbitmq-server/codegen/amqp_codegen.py rename to rabbitmq-server/deps/rabbitmq_codegen/amqp_codegen.py index 2623a5d..843bcdf 100644 --- a/rabbitmq-server/codegen/amqp_codegen.py +++ b/rabbitmq-server/deps/rabbitmq_codegen/amqp_codegen.py @@ -23,7 +23,7 @@ from optparse import OptionParser try: try: import simplejson as json - except ImportError, e: + except ImportError as e: if sys.hexversion >= 0x20600f0: import json else: @@ -63,13 +63,13 @@ def extension_info_merger(key, acc, new, ignore_conflicts): def domains_merger(key, acc, new, ignore_conflicts): merged = dict((k, v) for [k, v] in acc) for [k, v] in new: - if merged.has_key(k): + if k in merged: if not ignore_conflicts: raise AmqpSpecFileMergeConflict(key, acc, new) else: merged[k] = v - return [[k, v] for (k, v) in merged.iteritems()] + return [[k, v] for (k, v) in merged.items()] def merge_dict_lists_by(dict_key, acc, new, ignore_conflicts): acc_index = set(v[dict_key] for v in acc) @@ -123,12 +123,12 @@ def merge_load_specs(filenames, ignore_conflicts): docs = [json.load(handle) for handle in handles] spec = {} for doc in docs: - for (key, value) in doc.iteritems(): + for (key, value) in doc.items(): (merger, default_value) = mergers.get(key, (default_spec_value_merger, None)) spec[key] = merger(key, spec.get(key, default_value), value, ignore_conflicts) for handle in handles: handle.close() return spec - + class AmqpSpec: # Slight wart: use a class member rather than change the ctor signature # to avoid breaking everyone else's code. @@ -139,7 +139,7 @@ class AmqpSpec: self.major = self.spec['major-version'] self.minor = self.spec['minor-version'] - self.revision = self.spec.has_key('revision') and self.spec['revision'] or 0 + self.revision = 'revision' in self.spec and self.spec['revision'] or 0 self.port = self.spec['port'] self.domains = {} @@ -149,7 +149,7 @@ class AmqpSpec: self.constants = [] for d in self.spec['constants']: - if d.has_key('class'): + if 'class' in d: klass = d['class'] else: klass = '' @@ -158,10 +158,10 @@ class AmqpSpec: self.classes = [] for element in self.spec['classes']: self.classes.append(AmqpClass(self, element)) - + def allClasses(self): return self.classes - + def allMethods(self): return [m for c in self.classes for m in c.allMethods()] @@ -172,7 +172,7 @@ class AmqpEntity: def __init__(self, element): self.element = element self.name = element['name'] - + class AmqpClass(AmqpEntity): def __init__(self, spec, element): AmqpEntity.__init__(self, element) @@ -190,12 +190,12 @@ class AmqpClass(AmqpEntity): break self.fields = [] - if self.element.has_key('properties'): + if 'properties' in self.element: index = 0 for e in self.element['properties']: self.fields.append(AmqpField(self, e, index)) index = index + 1 - + def allMethods(self): return self.methods @@ -207,11 +207,11 @@ class AmqpMethod(AmqpEntity): AmqpEntity.__init__(self, element) self.klass = klass self.index = int(self.element['id']) - if self.element.has_key('synchronous'): + if 'synchronous' in self.element: self.isSynchronous = self.element['synchronous'] else: self.isSynchronous = False - if self.element.has_key('content'): + if 'content' in self.element: self.hasContent = self.element['content'] else: self.hasContent = False @@ -221,7 +221,7 @@ class AmqpMethod(AmqpEntity): for argument in element['arguments']: self.arguments.append(AmqpField(self, argument, index)) index = index + 1 - + def __repr__(self): return 'AmqpMethod("' + self.klass.name + "." + self.name + '" ' + repr(self.arguments) + ')' @@ -231,12 +231,12 @@ class AmqpField(AmqpEntity): self.method = method self.index = index - if self.element.has_key('type'): + if 'type' in self.element: self.domain = self.element['type'] else: self.domain = self.element['domain'] - - if self.element.has_key('default-value'): + + if 'default-value' in self.element: self.defaultvalue = self.element['default-value'] else: self.defaultvalue = None @@ -279,7 +279,7 @@ def do_main_dict(funcDict): sources = args[1:-1] dest = args[-1] AmqpSpec.ignore_conflicts = options.ignore_conflicts - if funcDict.has_key(function): + if function in funcDict: execute(funcDict[function], sources, dest) else: usage() diff --git a/rabbitmq-server/codegen/credit_extension.json b/rabbitmq-server/deps/rabbitmq_codegen/credit_extension.json similarity index 97% rename from rabbitmq-server/codegen/credit_extension.json rename to rabbitmq-server/deps/rabbitmq_codegen/credit_extension.json index b74391f..dd4805c 100644 --- a/rabbitmq-server/codegen/credit_extension.json +++ b/rabbitmq-server/deps/rabbitmq_codegen/credit_extension.json @@ -9,7 +9,7 @@ "hence you are strongly discouraged from building clients ", "which use it."], "copyright": [ - "Copyright (C) 2008-2013 GoPivotal, Inc.\n", + "Copyright (C) 2008-2016 Pivotal Software, Inc, Inc.\n", "\n", "Permission is hereby granted, free of charge, to any person\n", "obtaining a copy of this file (the \"Software\"), to deal in the\n", diff --git a/rabbitmq-server/codegen/demo_extension.json b/rabbitmq-server/deps/rabbitmq_codegen/demo_extension.json similarity index 100% rename from rabbitmq-server/codegen/demo_extension.json rename to rabbitmq-server/deps/rabbitmq_codegen/demo_extension.json diff --git a/rabbitmq-server/deps/rabbitmq_codegen/license_info b/rabbitmq-server/deps/rabbitmq_codegen/license_info new file mode 100644 index 0000000..a703cbd --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_codegen/license_info @@ -0,0 +1,4 @@ +The files amqp-rabbitmq-0.8.json and amqp-rabbitmq-0.9.1.json are +"Copyright (C) 2008-2016 Pivotal Software, Inc", Inc. and are covered by the MIT +license. + diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/CONTRIBUTING.md b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/CONTRIBUTING.md similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/CONTRIBUTING.md rename to rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/CONTRIBUTING.md diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/LICENSE b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/LICENSE similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/LICENSE rename to rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/LICENSE diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/LICENSE-MPL-RabbitMQ b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/LICENSE-MPL-RabbitMQ similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/LICENSE-MPL-RabbitMQ rename to rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/LICENSE-MPL-RabbitMQ index 99428fe..9faaa4e 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/LICENSE-MPL-RabbitMQ +++ b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/LICENSE-MPL-RabbitMQ @@ -447,7 +447,7 @@ EXHIBIT A -Mozilla Public License. The Original Code is RabbitMQ Consistent Hash Exchange. The Initial Developer of the Original Code is GoPivotal, Inc. - Copyright (c) 2011-2014 GoPivotal, Inc. All rights reserved.'' + Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved.'' [NOTE: The text of this Exhibit A may differ slightly from the text of the notices in the Source Code files of the Original Code. You should diff --git a/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/Makefile b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/Makefile new file mode 100644 index 0000000..cd80422 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/Makefile @@ -0,0 +1,21 @@ +PROJECT = rabbitmq_consistent_hash_exchange + +DEPS = amqp_client + +DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk + +# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be +# reviewed and merged. + +ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git +ERLANG_MK_COMMIT = rabbitmq-tmp + +include rabbitmq-components.mk +include erlang.mk + +# -------------------------------------------------------------------- +# Testing. +# -------------------------------------------------------------------- + +WITH_BROKER_TEST_COMMANDS := \ + rabbit_exchange_type_consistent_hash_test:test() diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/README.md b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/README.md similarity index 58% rename from rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/README.md rename to rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/README.md index 2554d53..a19f9ee 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/README.md +++ b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/README.md @@ -1,14 +1,17 @@ # RabbitMQ Consistent Hash Exchange Type +## What it Does + This plugin adds a consistent-hash exchange type to RabbitMQ. In various scenarios, you may wish to ensure that messages sent to an exchange are consistently and equally distributed across a number of -different queues based on the routing key of the message (or a -nominated header, see "Routing on a header" below). You could arrange -for this to occur yourself by using a direct or topic exchange, -binding queues to that exchange and then publishing messages to that -exchange that match the various binding keys. +different queues based on the routing key of the message, a nominated +header (see "Routing on a header" below), or a message property (see +"Routing on a message property" below). You could arrange for this to +occur yourself by using a direct or topic exchange, binding queues +to that exchange and then publishing messages to that exchange that +match the various binding keys. However, arranging things this way can be problematic: @@ -29,6 +32,8 @@ to the computed hash (and the hash space wraps around). The effect of this is that when a new bucket is added or an existing bucket removed, only a very few hashes change which bucket they are routed to. +## How It Works + In the case of Consistent Hashing as an exchange type, the hash is calculated from the hash of the routing key of each message received. Thus messages that have the same routing key will have the @@ -54,37 +59,48 @@ is processed. Hence in general, at most one queue. The exchange type is "x-consistent-hash". +## Supported RabbitMQ Versions + +This plugin supports RabbitMQ 3.3.x and later versions. + + +## Examples + +### Erlang + Here is an example using the Erlang client: - -include_lib("amqp_client/include/amqp_client.hrl"). +```erlang +-include_lib("amqp_client/include/amqp_client.hrl"). - test() -> - {ok, Conn} = amqp_connection:start(#amqp_params_network{}), - {ok, Chan} = amqp_connection:open_channel(Conn), - Queues = [<<"q0">>, <<"q1">>, <<"q2">>, <<"q3">>], - amqp_channel:call(Chan, - #'exchange.declare' { - exchange = <<"e">>, type = <<"x-consistent-hash">> - }), - [amqp_channel:call(Chan, #'queue.declare' { queue = Q }) || Q <- Queues], - [amqp_channel:call(Chan, #'queue.bind' { queue = Q, - exchange = <<"e">>, - routing_key = <<"10">> }) - || Q <- [<<"q0">>, <<"q1">>]], - [amqp_channel:call(Chan, #'queue.bind' { queue = Q, - exchange = <<"e">>, - routing_key = <<"20">> }) - || Q <- [<<"q2">>, <<"q3">>]], - Msg = #amqp_msg { props = #'P_basic'{}, payload = <<>> }, - [amqp_channel:call(Chan, - #'basic.publish'{ - exchange = <<"e">>, - routing_key = list_to_binary( - integer_to_list( - random:uniform(1000000))) - }, Msg) || _ <- lists:seq(1,100000)], - amqp_connection:close(Conn), - ok. +test() -> + {ok, Conn} = amqp_connection:start(#amqp_params_network{}), + {ok, Chan} = amqp_connection:open_channel(Conn), + Queues = [<<"q0">>, <<"q1">>, <<"q2">>, <<"q3">>], + amqp_channel:call(Chan, + #'exchange.declare' { + exchange = <<"e">>, type = <<"x-consistent-hash">> + }), + [amqp_channel:call(Chan, #'queue.declare' { queue = Q }) || Q <- Queues], + [amqp_channel:call(Chan, #'queue.bind' { queue = Q, + exchange = <<"e">>, + routing_key = <<"10">> }) + || Q <- [<<"q0">>, <<"q1">>]], + [amqp_channel:call(Chan, #'queue.bind' { queue = Q, + exchange = <<"e">>, + routing_key = <<"20">> }) + || Q <- [<<"q2">>, <<"q3">>]], + Msg = #amqp_msg { props = #'P_basic'{}, payload = <<>> }, + [amqp_channel:call(Chan, + #'basic.publish'{ + exchange = <<"e">>, + routing_key = list_to_binary( + integer_to_list( + random:uniform(1000000))) + }, Msg) || _ <- lists:seq(1,100000)], +amqp_connection:close(Conn), +ok. +``` As you can see, the queues `q0` and `q1` get bound each with 10 points in the hash space to the exchange `e` which means they'll each get @@ -122,16 +138,52 @@ exchange to route based on a named header instead. To do this, declare the exchange with a string argument called "hash-header" naming the header to be used. For example using the Erlang client as above: +```erlang amqp_channel:call( Chan, #'exchange.declare' { exchange = <<"e">>, type = <<"x-consistent-hash">>, arguments = [{<<"hash-header">>, longstr, <<"hash-me">>}] }). +``` If you specify "hash-header" and then publish messages without the named header, they will all get routed to the same (arbitrarily-chosen) queue. +## Routing on a message property + +In addition to a value in the header property, you can also route on the +``message_id``, ``correlation_id``, or ``timestamp`` message property. To do so, +declare the exchange with a string argument called "hash-property" naming the +property to be used. For example using the Erlang client as above: + +```erlang + amqp_channel:call( + Chan, #'exchange.declare' { + exchange = <<"e">>, + type = <<"x-consistent-hash">>, + arguments = [{<<"hash-property">>, longstr, <<"message_id">>}] + }). +``` + +Note that you can not declare an exchange that routes on both "hash-header" and +"hash-property". If you specify "hash-property" and then publish messages without +a value in the named property, they will all get routed to the same +(arbitrarily-chosen) queue. + +## Getting Help + Any comments or feedback welcome, to the -[rabbitmq-discuss mailing list](https://lists.rabbitmq.com/cgi-bin/mailman/listinfo/rabbitmq-discuss) -or info@rabbitmq.com. +[RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users). + +## Continuous Integration + +[![Build Status](https://travis-ci.org/rabbitmq/rabbitmq-consistent-hash-exchange.svg?branch=master)](https://travis-ci.org/rabbitmq/rabbitmq-consistent-hash-exchange) + +## Copyright and License + +(c) 2013-2015 Pivotal Software Inc. + +Released under the Mozilla Public License 1.1, same as RabbitMQ. +See [LICENSE](https://github.com/rabbitmq/rabbitmq-consistent-hash-exchange/blob/master/LICENSE) for +details. diff --git a/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/erlang.mk b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/erlang.mk new file mode 100644 index 0000000..1688ee8 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/erlang.mk @@ -0,0 +1,6640 @@ +# Copyright (c) 2013-2015, Loïc Hoguin +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +.PHONY: all app deps search rel docs install-docs check tests clean distclean help erlang-mk + +ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) + +ERLANG_MK_VERSION = 2.0.0-pre.2-16-gb52203c + +# Core configuration. + +PROJECT ?= $(notdir $(CURDIR)) +PROJECT := $(strip $(PROJECT)) + +PROJECT_VERSION ?= rolling + +# Verbosity. + +V ?= 0 + +verbose_0 = @ +verbose_2 = set -x; +verbose = $(verbose_$(V)) + +gen_verbose_0 = @echo " GEN " $@; +gen_verbose_2 = set -x; +gen_verbose = $(gen_verbose_$(V)) + +# Temporary files directory. + +ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk +export ERLANG_MK_TMP + +# "erl" command. + +ERL = erl +A0 -noinput -boot start_clean + +# Platform detection. + +ifeq ($(PLATFORM),) +UNAME_S := $(shell uname -s) + +ifeq ($(UNAME_S),Linux) +PLATFORM = linux +else ifeq ($(UNAME_S),Darwin) +PLATFORM = darwin +else ifeq ($(UNAME_S),SunOS) +PLATFORM = solaris +else ifeq ($(UNAME_S),GNU) +PLATFORM = gnu +else ifeq ($(UNAME_S),FreeBSD) +PLATFORM = freebsd +else ifeq ($(UNAME_S),NetBSD) +PLATFORM = netbsd +else ifeq ($(UNAME_S),OpenBSD) +PLATFORM = openbsd +else ifeq ($(UNAME_S),DragonFly) +PLATFORM = dragonfly +else ifeq ($(shell uname -o),Msys) +PLATFORM = msys2 +else +$(error Unable to detect platform. Please open a ticket with the output of uname -a.) +endif + +export PLATFORM +endif + +# Core targets. + +all:: deps app rel + +# Noop to avoid a Make warning when there's nothing to do. +rel:: + $(verbose) : + +check:: clean app tests + +clean:: clean-crashdump + +clean-crashdump: +ifneq ($(wildcard erl_crash.dump),) + $(gen_verbose) rm -f erl_crash.dump +endif + +distclean:: clean distclean-tmp + +distclean-tmp: + $(gen_verbose) rm -rf $(ERLANG_MK_TMP) + +help:: + $(verbose) printf "%s\n" \ + "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \ + "Copyright (c) 2013-2015 Loïc Hoguin " \ + "" \ + "Usage: [V=1] $(MAKE) [target]..." \ + "" \ + "Core targets:" \ + " all Run deps, app and rel targets in that order" \ + " app Compile the project" \ + " deps Fetch dependencies (if needed) and compile them" \ + " fetch-deps Fetch dependencies (if needed) without compiling them" \ + " list-deps Fetch dependencies (if needed) and list them" \ + " search q=... Search for a package in the built-in index" \ + " rel Build a release for this project, if applicable" \ + " docs Build the documentation for this project" \ + " install-docs Install the man pages for this project" \ + " check Compile and run all tests and analysis for this project" \ + " tests Run the tests for this project" \ + " clean Delete temporary and output files from most targets" \ + " distclean Delete all temporary and output files" \ + " help Display this help and exit" \ + " erlang-mk Update erlang.mk to the latest version" + +# Core functions. + +empty := +space := $(empty) $(empty) +tab := $(empty) $(empty) +comma := , + +define newline + + +endef + +define comma_list +$(subst $(space),$(comma),$(strip $(1))) +endef + +# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. +define erlang +$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk +endef + +ifeq ($(PLATFORM),msys2) +core_native_path = $(subst \,\\\\,$(shell cygpath -w $1)) +else +core_native_path = $1 +endif + +ifeq ($(shell which wget 2>/dev/null | wc -l), 1) +define core_http_get + wget --no-check-certificate -O $(1) $(2)|| rm $(1) +endef +else +define core_http_get.erl + ssl:start(), + inets:start(), + case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of + {ok, {{_, 200, _}, _, Body}} -> + case file:write_file("$(1)", Body) of + ok -> ok; + {error, R1} -> halt(R1) + end; + {error, R2} -> + halt(R2) + end, + halt(0). +endef + +define core_http_get + $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2)) +endef +endif + +core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1))) + +core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2))) + +core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1))))))))))))))))))))))))))) + +core_ls = $(filter-out $(1),$(shell echo $(1))) + +# @todo Use a solution that does not require using perl. +core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2) + +# Automated update. + +ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk +ERLANG_MK_COMMIT ?= +ERLANG_MK_BUILD_CONFIG ?= build.config +ERLANG_MK_BUILD_DIR ?= .erlang.mk.build + +erlang-mk: + git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) +ifdef ERLANG_MK_COMMIT + cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) +endif + if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi + $(MAKE) -C $(ERLANG_MK_BUILD_DIR) + cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk + rm -rf $(ERLANG_MK_BUILD_DIR) + +# The erlang.mk package index is bundled in the default erlang.mk build. +# Search for the string "copyright" to skip to the rest of the code. + +PACKAGES += aberth +pkg_aberth_name = aberth +pkg_aberth_description = Generic BERT-RPC server in Erlang +pkg_aberth_homepage = https://github.com/a13x/aberth +pkg_aberth_fetch = git +pkg_aberth_repo = https://github.com/a13x/aberth +pkg_aberth_commit = master + +PACKAGES += active +pkg_active_name = active +pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running +pkg_active_homepage = https://github.com/proger/active +pkg_active_fetch = git +pkg_active_repo = https://github.com/proger/active +pkg_active_commit = master + +PACKAGES += actordb_core +pkg_actordb_core_name = actordb_core +pkg_actordb_core_description = ActorDB main source +pkg_actordb_core_homepage = http://www.actordb.com/ +pkg_actordb_core_fetch = git +pkg_actordb_core_repo = https://github.com/biokoda/actordb_core +pkg_actordb_core_commit = master + +PACKAGES += actordb_thrift +pkg_actordb_thrift_name = actordb_thrift +pkg_actordb_thrift_description = Thrift API for ActorDB +pkg_actordb_thrift_homepage = http://www.actordb.com/ +pkg_actordb_thrift_fetch = git +pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift +pkg_actordb_thrift_commit = master + +PACKAGES += aleppo +pkg_aleppo_name = aleppo +pkg_aleppo_description = Alternative Erlang Pre-Processor +pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo +pkg_aleppo_fetch = git +pkg_aleppo_repo = https://github.com/ErlyORM/aleppo +pkg_aleppo_commit = master + +PACKAGES += alog +pkg_alog_name = alog +pkg_alog_description = Simply the best logging framework for Erlang +pkg_alog_homepage = https://github.com/siberian-fast-food/alogger +pkg_alog_fetch = git +pkg_alog_repo = https://github.com/siberian-fast-food/alogger +pkg_alog_commit = master + +PACKAGES += amqp_client +pkg_amqp_client_name = amqp_client +pkg_amqp_client_description = RabbitMQ Erlang AMQP client +pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html +pkg_amqp_client_fetch = git +pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git +pkg_amqp_client_commit = master + +PACKAGES += annotations +pkg_annotations_name = annotations +pkg_annotations_description = Simple code instrumentation utilities +pkg_annotations_homepage = https://github.com/hyperthunk/annotations +pkg_annotations_fetch = git +pkg_annotations_repo = https://github.com/hyperthunk/annotations +pkg_annotations_commit = master + +PACKAGES += antidote +pkg_antidote_name = antidote +pkg_antidote_description = Large-scale computation without synchronisation +pkg_antidote_homepage = https://syncfree.lip6.fr/ +pkg_antidote_fetch = git +pkg_antidote_repo = https://github.com/SyncFree/antidote +pkg_antidote_commit = master + +PACKAGES += apns +pkg_apns_name = apns +pkg_apns_description = Apple Push Notification Server for Erlang +pkg_apns_homepage = http://inaka.github.com/apns4erl +pkg_apns_fetch = git +pkg_apns_repo = https://github.com/inaka/apns4erl +pkg_apns_commit = 1.0.4 + +PACKAGES += azdht +pkg_azdht_name = azdht +pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang +pkg_azdht_homepage = https://github.com/arcusfelis/azdht +pkg_azdht_fetch = git +pkg_azdht_repo = https://github.com/arcusfelis/azdht +pkg_azdht_commit = master + +PACKAGES += backoff +pkg_backoff_name = backoff +pkg_backoff_description = Simple exponential backoffs in Erlang +pkg_backoff_homepage = https://github.com/ferd/backoff +pkg_backoff_fetch = git +pkg_backoff_repo = https://github.com/ferd/backoff +pkg_backoff_commit = master + +PACKAGES += barrel_tcp +pkg_barrel_tcp_name = barrel_tcp +pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang. +pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_fetch = git +pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_commit = master + +PACKAGES += basho_bench +pkg_basho_bench_name = basho_bench +pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for. +pkg_basho_bench_homepage = https://github.com/basho/basho_bench +pkg_basho_bench_fetch = git +pkg_basho_bench_repo = https://github.com/basho/basho_bench +pkg_basho_bench_commit = master + +PACKAGES += bcrypt +pkg_bcrypt_name = bcrypt +pkg_bcrypt_description = Bcrypt Erlang / C library +pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt +pkg_bcrypt_fetch = git +pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt +pkg_bcrypt_commit = master + +PACKAGES += beam +pkg_beam_name = beam +pkg_beam_description = BEAM emulator written in Erlang +pkg_beam_homepage = https://github.com/tonyrog/beam +pkg_beam_fetch = git +pkg_beam_repo = https://github.com/tonyrog/beam +pkg_beam_commit = master + +PACKAGES += beanstalk +pkg_beanstalk_name = beanstalk +pkg_beanstalk_description = An Erlang client for beanstalkd +pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_fetch = git +pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_commit = master + +PACKAGES += bear +pkg_bear_name = bear +pkg_bear_description = a set of statistics functions for erlang +pkg_bear_homepage = https://github.com/boundary/bear +pkg_bear_fetch = git +pkg_bear_repo = https://github.com/boundary/bear +pkg_bear_commit = master + +PACKAGES += bertconf +pkg_bertconf_name = bertconf +pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded +pkg_bertconf_homepage = https://github.com/ferd/bertconf +pkg_bertconf_fetch = git +pkg_bertconf_repo = https://github.com/ferd/bertconf +pkg_bertconf_commit = master + +PACKAGES += bifrost +pkg_bifrost_name = bifrost +pkg_bifrost_description = Erlang FTP Server Framework +pkg_bifrost_homepage = https://github.com/thorstadt/bifrost +pkg_bifrost_fetch = git +pkg_bifrost_repo = https://github.com/thorstadt/bifrost +pkg_bifrost_commit = master + +PACKAGES += binpp +pkg_binpp_name = binpp +pkg_binpp_description = Erlang Binary Pretty Printer +pkg_binpp_homepage = https://github.com/jtendo/binpp +pkg_binpp_fetch = git +pkg_binpp_repo = https://github.com/jtendo/binpp +pkg_binpp_commit = master + +PACKAGES += bisect +pkg_bisect_name = bisect +pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang +pkg_bisect_homepage = https://github.com/knutin/bisect +pkg_bisect_fetch = git +pkg_bisect_repo = https://github.com/knutin/bisect +pkg_bisect_commit = master + +PACKAGES += bitcask +pkg_bitcask_name = bitcask +pkg_bitcask_description = because you need another a key/value storage engine +pkg_bitcask_homepage = https://github.com/basho/bitcask +pkg_bitcask_fetch = git +pkg_bitcask_repo = https://github.com/basho/bitcask +pkg_bitcask_commit = master + +PACKAGES += bitstore +pkg_bitstore_name = bitstore +pkg_bitstore_description = A document based ontology development environment +pkg_bitstore_homepage = https://github.com/bdionne/bitstore +pkg_bitstore_fetch = git +pkg_bitstore_repo = https://github.com/bdionne/bitstore +pkg_bitstore_commit = master + +PACKAGES += bootstrap +pkg_bootstrap_name = bootstrap +pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application. +pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap +pkg_bootstrap_fetch = git +pkg_bootstrap_repo = https://github.com/schlagert/bootstrap +pkg_bootstrap_commit = master + +PACKAGES += boss +pkg_boss_name = boss +pkg_boss_description = Erlang web MVC, now featuring Comet +pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_fetch = git +pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_commit = master + +PACKAGES += boss_db +pkg_boss_db_name = boss_db +pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang +pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db +pkg_boss_db_fetch = git +pkg_boss_db_repo = https://github.com/ErlyORM/boss_db +pkg_boss_db_commit = master + +PACKAGES += bson +pkg_bson_name = bson +pkg_bson_description = BSON documents in Erlang, see bsonspec.org +pkg_bson_homepage = https://github.com/comtihon/bson-erlang +pkg_bson_fetch = git +pkg_bson_repo = https://github.com/comtihon/bson-erlang +pkg_bson_commit = master + +PACKAGES += bullet +pkg_bullet_name = bullet +pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy. +pkg_bullet_homepage = http://ninenines.eu +pkg_bullet_fetch = git +pkg_bullet_repo = https://github.com/ninenines/bullet +pkg_bullet_commit = master + +PACKAGES += cache +pkg_cache_name = cache +pkg_cache_description = Erlang in-memory cache +pkg_cache_homepage = https://github.com/fogfish/cache +pkg_cache_fetch = git +pkg_cache_repo = https://github.com/fogfish/cache +pkg_cache_commit = master + +PACKAGES += cake +pkg_cake_name = cake +pkg_cake_description = Really simple terminal colorization +pkg_cake_homepage = https://github.com/darach/cake-erl +pkg_cake_fetch = git +pkg_cake_repo = https://github.com/darach/cake-erl +pkg_cake_commit = v0.1.2 + +PACKAGES += carotene +pkg_carotene_name = carotene +pkg_carotene_description = Real-time server +pkg_carotene_homepage = https://github.com/carotene/carotene +pkg_carotene_fetch = git +pkg_carotene_repo = https://github.com/carotene/carotene +pkg_carotene_commit = master + +PACKAGES += cberl +pkg_cberl_name = cberl +pkg_cberl_description = NIF based Erlang bindings for Couchbase +pkg_cberl_homepage = https://github.com/chitika/cberl +pkg_cberl_fetch = git +pkg_cberl_repo = https://github.com/chitika/cberl +pkg_cberl_commit = master + +PACKAGES += cecho +pkg_cecho_name = cecho +pkg_cecho_description = An ncurses library for Erlang +pkg_cecho_homepage = https://github.com/mazenharake/cecho +pkg_cecho_fetch = git +pkg_cecho_repo = https://github.com/mazenharake/cecho +pkg_cecho_commit = master + +PACKAGES += cferl +pkg_cferl_name = cferl +pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client +pkg_cferl_homepage = https://github.com/ddossot/cferl +pkg_cferl_fetch = git +pkg_cferl_repo = https://github.com/ddossot/cferl +pkg_cferl_commit = master + +PACKAGES += chaos_monkey +pkg_chaos_monkey_name = chaos_monkey +pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes. +pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_fetch = git +pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_commit = master + +PACKAGES += check_node +pkg_check_node_name = check_node +pkg_check_node_description = Nagios Scripts for monitoring Riak +pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios +pkg_check_node_fetch = git +pkg_check_node_repo = https://github.com/basho-labs/riak_nagios +pkg_check_node_commit = master + +PACKAGES += chronos +pkg_chronos_name = chronos +pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests. +pkg_chronos_homepage = https://github.com/lehoff/chronos +pkg_chronos_fetch = git +pkg_chronos_repo = https://github.com/lehoff/chronos +pkg_chronos_commit = master + +PACKAGES += cl +pkg_cl_name = cl +pkg_cl_description = OpenCL binding for Erlang +pkg_cl_homepage = https://github.com/tonyrog/cl +pkg_cl_fetch = git +pkg_cl_repo = https://github.com/tonyrog/cl +pkg_cl_commit = master + +PACKAGES += classifier +pkg_classifier_name = classifier +pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier +pkg_classifier_homepage = https://github.com/inaka/classifier +pkg_classifier_fetch = git +pkg_classifier_repo = https://github.com/inaka/classifier +pkg_classifier_commit = master + +PACKAGES += clique +pkg_clique_name = clique +pkg_clique_description = CLI Framework for Erlang +pkg_clique_homepage = https://github.com/basho/clique +pkg_clique_fetch = git +pkg_clique_repo = https://github.com/basho/clique +pkg_clique_commit = develop + +PACKAGES += cloudi_core +pkg_cloudi_core_name = cloudi_core +pkg_cloudi_core_description = CloudI internal service runtime +pkg_cloudi_core_homepage = http://cloudi.org/ +pkg_cloudi_core_fetch = git +pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core +pkg_cloudi_core_commit = master + +PACKAGES += cloudi_service_api_requests +pkg_cloudi_service_api_requests_name = cloudi_service_api_requests +pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support) +pkg_cloudi_service_api_requests_homepage = http://cloudi.org/ +pkg_cloudi_service_api_requests_fetch = git +pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests +pkg_cloudi_service_api_requests_commit = master + +PACKAGES += cloudi_service_db +pkg_cloudi_service_db_name = cloudi_service_db +pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) +pkg_cloudi_service_db_homepage = http://cloudi.org/ +pkg_cloudi_service_db_fetch = git +pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db +pkg_cloudi_service_db_commit = master + +PACKAGES += cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service +pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_fetch = git +pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_commit = master + +PACKAGES += cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service +pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_cql_fetch = git +pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_commit = master + +PACKAGES += cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service +pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/ +pkg_cloudi_service_db_couchdb_fetch = git +pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_commit = master + +PACKAGES += cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service +pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/ +pkg_cloudi_service_db_elasticsearch_fetch = git +pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_commit = master + +PACKAGES += cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_description = memcached CloudI Service +pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/ +pkg_cloudi_service_db_memcached_fetch = git +pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_commit = master + +PACKAGES += cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_description = MySQL CloudI Service +pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_mysql_fetch = git +pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_commit = master + +PACKAGES += cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service +pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_pgsql_fetch = git +pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_commit = master + +PACKAGES += cloudi_service_db_riak +pkg_cloudi_service_db_riak_name = cloudi_service_db_riak +pkg_cloudi_service_db_riak_description = Riak CloudI Service +pkg_cloudi_service_db_riak_homepage = http://cloudi.org/ +pkg_cloudi_service_db_riak_fetch = git +pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak +pkg_cloudi_service_db_riak_commit = master + +PACKAGES += cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service +pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/ +pkg_cloudi_service_db_tokyotyrant_fetch = git +pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_commit = master + +PACKAGES += cloudi_service_filesystem +pkg_cloudi_service_filesystem_name = cloudi_service_filesystem +pkg_cloudi_service_filesystem_description = Filesystem CloudI Service +pkg_cloudi_service_filesystem_homepage = http://cloudi.org/ +pkg_cloudi_service_filesystem_fetch = git +pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem +pkg_cloudi_service_filesystem_commit = master + +PACKAGES += cloudi_service_http_client +pkg_cloudi_service_http_client_name = cloudi_service_http_client +pkg_cloudi_service_http_client_description = HTTP client CloudI Service +pkg_cloudi_service_http_client_homepage = http://cloudi.org/ +pkg_cloudi_service_http_client_fetch = git +pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client +pkg_cloudi_service_http_client_commit = master + +PACKAGES += cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service +pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/ +pkg_cloudi_service_http_cowboy_fetch = git +pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_commit = master + +PACKAGES += cloudi_service_http_elli +pkg_cloudi_service_http_elli_name = cloudi_service_http_elli +pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service +pkg_cloudi_service_http_elli_homepage = http://cloudi.org/ +pkg_cloudi_service_http_elli_fetch = git +pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli +pkg_cloudi_service_http_elli_commit = master + +PACKAGES += cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service +pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/ +pkg_cloudi_service_map_reduce_fetch = git +pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_commit = master + +PACKAGES += cloudi_service_oauth1 +pkg_cloudi_service_oauth1_name = cloudi_service_oauth1 +pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service +pkg_cloudi_service_oauth1_homepage = http://cloudi.org/ +pkg_cloudi_service_oauth1_fetch = git +pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1 +pkg_cloudi_service_oauth1_commit = master + +PACKAGES += cloudi_service_queue +pkg_cloudi_service_queue_name = cloudi_service_queue +pkg_cloudi_service_queue_description = Persistent Queue Service +pkg_cloudi_service_queue_homepage = http://cloudi.org/ +pkg_cloudi_service_queue_fetch = git +pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue +pkg_cloudi_service_queue_commit = master + +PACKAGES += cloudi_service_quorum +pkg_cloudi_service_quorum_name = cloudi_service_quorum +pkg_cloudi_service_quorum_description = CloudI Quorum Service +pkg_cloudi_service_quorum_homepage = http://cloudi.org/ +pkg_cloudi_service_quorum_fetch = git +pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum +pkg_cloudi_service_quorum_commit = master + +PACKAGES += cloudi_service_router +pkg_cloudi_service_router_name = cloudi_service_router +pkg_cloudi_service_router_description = CloudI Router Service +pkg_cloudi_service_router_homepage = http://cloudi.org/ +pkg_cloudi_service_router_fetch = git +pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router +pkg_cloudi_service_router_commit = master + +PACKAGES += cloudi_service_tcp +pkg_cloudi_service_tcp_name = cloudi_service_tcp +pkg_cloudi_service_tcp_description = TCP CloudI Service +pkg_cloudi_service_tcp_homepage = http://cloudi.org/ +pkg_cloudi_service_tcp_fetch = git +pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp +pkg_cloudi_service_tcp_commit = master + +PACKAGES += cloudi_service_timers +pkg_cloudi_service_timers_name = cloudi_service_timers +pkg_cloudi_service_timers_description = Timers CloudI Service +pkg_cloudi_service_timers_homepage = http://cloudi.org/ +pkg_cloudi_service_timers_fetch = git +pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers +pkg_cloudi_service_timers_commit = master + +PACKAGES += cloudi_service_udp +pkg_cloudi_service_udp_name = cloudi_service_udp +pkg_cloudi_service_udp_description = UDP CloudI Service +pkg_cloudi_service_udp_homepage = http://cloudi.org/ +pkg_cloudi_service_udp_fetch = git +pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp +pkg_cloudi_service_udp_commit = master + +PACKAGES += cloudi_service_validate +pkg_cloudi_service_validate_name = cloudi_service_validate +pkg_cloudi_service_validate_description = CloudI Validate Service +pkg_cloudi_service_validate_homepage = http://cloudi.org/ +pkg_cloudi_service_validate_fetch = git +pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate +pkg_cloudi_service_validate_commit = master + +PACKAGES += cloudi_service_zeromq +pkg_cloudi_service_zeromq_name = cloudi_service_zeromq +pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service +pkg_cloudi_service_zeromq_homepage = http://cloudi.org/ +pkg_cloudi_service_zeromq_fetch = git +pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq +pkg_cloudi_service_zeromq_commit = master + +PACKAGES += cluster_info +pkg_cluster_info_name = cluster_info +pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app +pkg_cluster_info_homepage = https://github.com/basho/cluster_info +pkg_cluster_info_fetch = git +pkg_cluster_info_repo = https://github.com/basho/cluster_info +pkg_cluster_info_commit = master + +PACKAGES += color +pkg_color_name = color +pkg_color_description = ANSI colors for your Erlang +pkg_color_homepage = https://github.com/julianduque/erlang-color +pkg_color_fetch = git +pkg_color_repo = https://github.com/julianduque/erlang-color +pkg_color_commit = master + +PACKAGES += confetti +pkg_confetti_name = confetti +pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids +pkg_confetti_homepage = https://github.com/jtendo/confetti +pkg_confetti_fetch = git +pkg_confetti_repo = https://github.com/jtendo/confetti +pkg_confetti_commit = master + +PACKAGES += couchbeam +pkg_couchbeam_name = couchbeam +pkg_couchbeam_description = Apache CouchDB client in Erlang +pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam +pkg_couchbeam_fetch = git +pkg_couchbeam_repo = https://github.com/benoitc/couchbeam +pkg_couchbeam_commit = master + +PACKAGES += covertool +pkg_covertool_name = covertool +pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports +pkg_covertool_homepage = https://github.com/idubrov/covertool +pkg_covertool_fetch = git +pkg_covertool_repo = https://github.com/idubrov/covertool +pkg_covertool_commit = master + +PACKAGES += cowboy +pkg_cowboy_name = cowboy +pkg_cowboy_description = Small, fast and modular HTTP server. +pkg_cowboy_homepage = http://ninenines.eu +pkg_cowboy_fetch = git +pkg_cowboy_repo = https://github.com/ninenines/cowboy +pkg_cowboy_commit = 1.0.1 + +PACKAGES += cowdb +pkg_cowdb_name = cowdb +pkg_cowdb_description = Pure Key/Value database library for Erlang Applications +pkg_cowdb_homepage = https://github.com/refuge/cowdb +pkg_cowdb_fetch = git +pkg_cowdb_repo = https://github.com/refuge/cowdb +pkg_cowdb_commit = master + +PACKAGES += cowlib +pkg_cowlib_name = cowlib +pkg_cowlib_description = Support library for manipulating Web protocols. +pkg_cowlib_homepage = http://ninenines.eu +pkg_cowlib_fetch = git +pkg_cowlib_repo = https://github.com/ninenines/cowlib +pkg_cowlib_commit = 1.0.1 + +PACKAGES += cpg +pkg_cpg_name = cpg +pkg_cpg_description = CloudI Process Groups +pkg_cpg_homepage = https://github.com/okeuday/cpg +pkg_cpg_fetch = git +pkg_cpg_repo = https://github.com/okeuday/cpg +pkg_cpg_commit = master + +PACKAGES += cqerl +pkg_cqerl_name = cqerl +pkg_cqerl_description = Native Erlang CQL client for Cassandra +pkg_cqerl_homepage = https://matehat.github.io/cqerl/ +pkg_cqerl_fetch = git +pkg_cqerl_repo = https://github.com/matehat/cqerl +pkg_cqerl_commit = master + +PACKAGES += cr +pkg_cr_name = cr +pkg_cr_description = Chain Replication +pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm +pkg_cr_fetch = git +pkg_cr_repo = https://github.com/spawnproc/cr +pkg_cr_commit = master + +PACKAGES += cuttlefish +pkg_cuttlefish_name = cuttlefish +pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me? +pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish +pkg_cuttlefish_fetch = git +pkg_cuttlefish_repo = https://github.com/basho/cuttlefish +pkg_cuttlefish_commit = master + +PACKAGES += damocles +pkg_damocles_name = damocles +pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box. +pkg_damocles_homepage = https://github.com/lostcolony/damocles +pkg_damocles_fetch = git +pkg_damocles_repo = https://github.com/lostcolony/damocles +pkg_damocles_commit = master + +PACKAGES += debbie +pkg_debbie_name = debbie +pkg_debbie_description = .DEB Built In Erlang +pkg_debbie_homepage = https://github.com/crownedgrouse/debbie +pkg_debbie_fetch = git +pkg_debbie_repo = https://github.com/crownedgrouse/debbie +pkg_debbie_commit = master + +PACKAGES += decimal +pkg_decimal_name = decimal +pkg_decimal_description = An Erlang decimal arithmetic library +pkg_decimal_homepage = https://github.com/tim/erlang-decimal +pkg_decimal_fetch = git +pkg_decimal_repo = https://github.com/tim/erlang-decimal +pkg_decimal_commit = master + +PACKAGES += detergent +pkg_detergent_name = detergent +pkg_detergent_description = An emulsifying Erlang SOAP library +pkg_detergent_homepage = https://github.com/devinus/detergent +pkg_detergent_fetch = git +pkg_detergent_repo = https://github.com/devinus/detergent +pkg_detergent_commit = master + +PACKAGES += detest +pkg_detest_name = detest +pkg_detest_description = Tool for running tests on a cluster of erlang nodes +pkg_detest_homepage = https://github.com/biokoda/detest +pkg_detest_fetch = git +pkg_detest_repo = https://github.com/biokoda/detest +pkg_detest_commit = master + +PACKAGES += dh_date +pkg_dh_date_name = dh_date +pkg_dh_date_description = Date formatting / parsing library for erlang +pkg_dh_date_homepage = https://github.com/daleharvey/dh_date +pkg_dh_date_fetch = git +pkg_dh_date_repo = https://github.com/daleharvey/dh_date +pkg_dh_date_commit = master + +PACKAGES += dhtcrawler +pkg_dhtcrawler_name = dhtcrawler +pkg_dhtcrawler_description = dhtcrawler is a DHT crawler written in erlang. It can join a DHT network and crawl many P2P torrents. +pkg_dhtcrawler_homepage = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_fetch = git +pkg_dhtcrawler_repo = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_commit = master + +PACKAGES += dirbusterl +pkg_dirbusterl_name = dirbusterl +pkg_dirbusterl_description = DirBuster successor in Erlang +pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_fetch = git +pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_commit = master + +PACKAGES += dispcount +pkg_dispcount_name = dispcount +pkg_dispcount_description = Erlang task dispatcher based on ETS counters. +pkg_dispcount_homepage = https://github.com/ferd/dispcount +pkg_dispcount_fetch = git +pkg_dispcount_repo = https://github.com/ferd/dispcount +pkg_dispcount_commit = master + +PACKAGES += dlhttpc +pkg_dlhttpc_name = dlhttpc +pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints +pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc +pkg_dlhttpc_fetch = git +pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc +pkg_dlhttpc_commit = master + +PACKAGES += dns +pkg_dns_name = dns +pkg_dns_description = Erlang DNS library +pkg_dns_homepage = https://github.com/aetrion/dns_erlang +pkg_dns_fetch = git +pkg_dns_repo = https://github.com/aetrion/dns_erlang +pkg_dns_commit = master + +PACKAGES += dnssd +pkg_dnssd_name = dnssd +pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation +pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_fetch = git +pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_commit = master + +PACKAGES += dtl +pkg_dtl_name = dtl +pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang. +pkg_dtl_homepage = https://github.com/oinksoft/dtl +pkg_dtl_fetch = git +pkg_dtl_repo = https://github.com/oinksoft/dtl +pkg_dtl_commit = master + +PACKAGES += dynamic_compile +pkg_dynamic_compile_name = dynamic_compile +pkg_dynamic_compile_description = compile and load erlang modules from string input +pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_fetch = git +pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_commit = master + +PACKAGES += e2 +pkg_e2_name = e2 +pkg_e2_description = Library to simply writing correct OTP applications. +pkg_e2_homepage = http://e2project.org +pkg_e2_fetch = git +pkg_e2_repo = https://github.com/gar1t/e2 +pkg_e2_commit = master + +PACKAGES += eamf +pkg_eamf_name = eamf +pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang +pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf +pkg_eamf_fetch = git +pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf +pkg_eamf_commit = master + +PACKAGES += eavro +pkg_eavro_name = eavro +pkg_eavro_description = Apache Avro encoder/decoder +pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_fetch = git +pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_commit = master + +PACKAGES += ecapnp +pkg_ecapnp_name = ecapnp +pkg_ecapnp_description = Cap'n Proto library for Erlang +pkg_ecapnp_homepage = https://github.com/kaos/ecapnp +pkg_ecapnp_fetch = git +pkg_ecapnp_repo = https://github.com/kaos/ecapnp +pkg_ecapnp_commit = master + +PACKAGES += econfig +pkg_econfig_name = econfig +pkg_econfig_description = simple Erlang config handler using INI files +pkg_econfig_homepage = https://github.com/benoitc/econfig +pkg_econfig_fetch = git +pkg_econfig_repo = https://github.com/benoitc/econfig +pkg_econfig_commit = master + +PACKAGES += edate +pkg_edate_name = edate +pkg_edate_description = date manipulation library for erlang +pkg_edate_homepage = https://github.com/dweldon/edate +pkg_edate_fetch = git +pkg_edate_repo = https://github.com/dweldon/edate +pkg_edate_commit = master + +PACKAGES += edgar +pkg_edgar_name = edgar +pkg_edgar_description = Erlang Does GNU AR +pkg_edgar_homepage = https://github.com/crownedgrouse/edgar +pkg_edgar_fetch = git +pkg_edgar_repo = https://github.com/crownedgrouse/edgar +pkg_edgar_commit = master + +PACKAGES += edis +pkg_edis_name = edis +pkg_edis_description = An Erlang implementation of Redis KV Store +pkg_edis_homepage = http://inaka.github.com/edis/ +pkg_edis_fetch = git +pkg_edis_repo = https://github.com/inaka/edis +pkg_edis_commit = master + +PACKAGES += edns +pkg_edns_name = edns +pkg_edns_description = Erlang/OTP DNS server +pkg_edns_homepage = https://github.com/hcvst/erlang-dns +pkg_edns_fetch = git +pkg_edns_repo = https://github.com/hcvst/erlang-dns +pkg_edns_commit = master + +PACKAGES += edown +pkg_edown_name = edown +pkg_edown_description = EDoc extension for generating Github-flavored Markdown +pkg_edown_homepage = https://github.com/uwiger/edown +pkg_edown_fetch = git +pkg_edown_repo = https://github.com/uwiger/edown +pkg_edown_commit = master + +PACKAGES += eep +pkg_eep_name = eep +pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy +pkg_eep_homepage = https://github.com/virtan/eep +pkg_eep_fetch = git +pkg_eep_repo = https://github.com/virtan/eep +pkg_eep_commit = master + +PACKAGES += eep_app +pkg_eep_app_name = eep_app +pkg_eep_app_description = Embedded Event Processing +pkg_eep_app_homepage = https://github.com/darach/eep-erl +pkg_eep_app_fetch = git +pkg_eep_app_repo = https://github.com/darach/eep-erl +pkg_eep_app_commit = master + +PACKAGES += efene +pkg_efene_name = efene +pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX +pkg_efene_homepage = https://github.com/efene/efene +pkg_efene_fetch = git +pkg_efene_repo = https://github.com/efene/efene +pkg_efene_commit = master + +PACKAGES += eganglia +pkg_eganglia_name = eganglia +pkg_eganglia_description = Erlang library to interact with Ganglia +pkg_eganglia_homepage = https://github.com/inaka/eganglia +pkg_eganglia_fetch = git +pkg_eganglia_repo = https://github.com/inaka/eganglia +pkg_eganglia_commit = v0.9.1 + +PACKAGES += egeoip +pkg_egeoip_name = egeoip +pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database. +pkg_egeoip_homepage = https://github.com/mochi/egeoip +pkg_egeoip_fetch = git +pkg_egeoip_repo = https://github.com/mochi/egeoip +pkg_egeoip_commit = master + +PACKAGES += ehsa +pkg_ehsa_name = ehsa +pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules +pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa +pkg_ehsa_fetch = hg +pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa +pkg_ehsa_commit = 2.0.4 + +PACKAGES += ej +pkg_ej_name = ej +pkg_ej_description = Helper module for working with Erlang terms representing JSON +pkg_ej_homepage = https://github.com/seth/ej +pkg_ej_fetch = git +pkg_ej_repo = https://github.com/seth/ej +pkg_ej_commit = master + +PACKAGES += ejabberd +pkg_ejabberd_name = ejabberd +pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform +pkg_ejabberd_homepage = https://github.com/processone/ejabberd +pkg_ejabberd_fetch = git +pkg_ejabberd_repo = https://github.com/processone/ejabberd +pkg_ejabberd_commit = master + +PACKAGES += ejwt +pkg_ejwt_name = ejwt +pkg_ejwt_description = erlang library for JSON Web Token +pkg_ejwt_homepage = https://github.com/artefactop/ejwt +pkg_ejwt_fetch = git +pkg_ejwt_repo = https://github.com/artefactop/ejwt +pkg_ejwt_commit = master + +PACKAGES += ekaf +pkg_ekaf_name = ekaf +pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang. +pkg_ekaf_homepage = https://github.com/helpshift/ekaf +pkg_ekaf_fetch = git +pkg_ekaf_repo = https://github.com/helpshift/ekaf +pkg_ekaf_commit = master + +PACKAGES += elarm +pkg_elarm_name = elarm +pkg_elarm_description = Alarm Manager for Erlang. +pkg_elarm_homepage = https://github.com/esl/elarm +pkg_elarm_fetch = git +pkg_elarm_repo = https://github.com/esl/elarm +pkg_elarm_commit = master + +PACKAGES += eleveldb +pkg_eleveldb_name = eleveldb +pkg_eleveldb_description = Erlang LevelDB API +pkg_eleveldb_homepage = https://github.com/basho/eleveldb +pkg_eleveldb_fetch = git +pkg_eleveldb_repo = https://github.com/basho/eleveldb +pkg_eleveldb_commit = master + +PACKAGES += elli +pkg_elli_name = elli +pkg_elli_description = Simple, robust and performant Erlang web server +pkg_elli_homepage = https://github.com/knutin/elli +pkg_elli_fetch = git +pkg_elli_repo = https://github.com/knutin/elli +pkg_elli_commit = master + +PACKAGES += elvis +pkg_elvis_name = elvis +pkg_elvis_description = Erlang Style Reviewer +pkg_elvis_homepage = https://github.com/inaka/elvis +pkg_elvis_fetch = git +pkg_elvis_repo = https://github.com/inaka/elvis +pkg_elvis_commit = 0.2.4 + +PACKAGES += emagick +pkg_emagick_name = emagick +pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool. +pkg_emagick_homepage = https://github.com/kivra/emagick +pkg_emagick_fetch = git +pkg_emagick_repo = https://github.com/kivra/emagick +pkg_emagick_commit = master + +PACKAGES += emysql +pkg_emysql_name = emysql +pkg_emysql_description = Stable, pure Erlang MySQL driver. +pkg_emysql_homepage = https://github.com/Eonblast/Emysql +pkg_emysql_fetch = git +pkg_emysql_repo = https://github.com/Eonblast/Emysql +pkg_emysql_commit = master + +PACKAGES += enm +pkg_enm_name = enm +pkg_enm_description = Erlang driver for nanomsg +pkg_enm_homepage = https://github.com/basho/enm +pkg_enm_fetch = git +pkg_enm_repo = https://github.com/basho/enm +pkg_enm_commit = master + +PACKAGES += entop +pkg_entop_name = entop +pkg_entop_description = A top-like tool for monitoring an Erlang node +pkg_entop_homepage = https://github.com/mazenharake/entop +pkg_entop_fetch = git +pkg_entop_repo = https://github.com/mazenharake/entop +pkg_entop_commit = master + +PACKAGES += epcap +pkg_epcap_name = epcap +pkg_epcap_description = Erlang packet capture interface using pcap +pkg_epcap_homepage = https://github.com/msantos/epcap +pkg_epcap_fetch = git +pkg_epcap_repo = https://github.com/msantos/epcap +pkg_epcap_commit = master + +PACKAGES += eper +pkg_eper_name = eper +pkg_eper_description = Erlang performance and debugging tools. +pkg_eper_homepage = https://github.com/massemanet/eper +pkg_eper_fetch = git +pkg_eper_repo = https://github.com/massemanet/eper +pkg_eper_commit = master + +PACKAGES += epgsql +pkg_epgsql_name = epgsql +pkg_epgsql_description = Erlang PostgreSQL client library. +pkg_epgsql_homepage = https://github.com/epgsql/epgsql +pkg_epgsql_fetch = git +pkg_epgsql_repo = https://github.com/epgsql/epgsql +pkg_epgsql_commit = master + +PACKAGES += episcina +pkg_episcina_name = episcina +pkg_episcina_description = A simple non intrusive resource pool for connections +pkg_episcina_homepage = https://github.com/erlware/episcina +pkg_episcina_fetch = git +pkg_episcina_repo = https://github.com/erlware/episcina +pkg_episcina_commit = master + +PACKAGES += eplot +pkg_eplot_name = eplot +pkg_eplot_description = A plot engine written in erlang. +pkg_eplot_homepage = https://github.com/psyeugenic/eplot +pkg_eplot_fetch = git +pkg_eplot_repo = https://github.com/psyeugenic/eplot +pkg_eplot_commit = master + +PACKAGES += epocxy +pkg_epocxy_name = epocxy +pkg_epocxy_description = Erlang Patterns of Concurrency +pkg_epocxy_homepage = https://github.com/duomark/epocxy +pkg_epocxy_fetch = git +pkg_epocxy_repo = https://github.com/duomark/epocxy +pkg_epocxy_commit = master + +PACKAGES += epubnub +pkg_epubnub_name = epubnub +pkg_epubnub_description = Erlang PubNub API +pkg_epubnub_homepage = https://github.com/tsloughter/epubnub +pkg_epubnub_fetch = git +pkg_epubnub_repo = https://github.com/tsloughter/epubnub +pkg_epubnub_commit = master + +PACKAGES += eqm +pkg_eqm_name = eqm +pkg_eqm_description = Erlang pub sub with supply-demand channels +pkg_eqm_homepage = https://github.com/loucash/eqm +pkg_eqm_fetch = git +pkg_eqm_repo = https://github.com/loucash/eqm +pkg_eqm_commit = master + +PACKAGES += eredis +pkg_eredis_name = eredis +pkg_eredis_description = Erlang Redis client +pkg_eredis_homepage = https://github.com/wooga/eredis +pkg_eredis_fetch = git +pkg_eredis_repo = https://github.com/wooga/eredis +pkg_eredis_commit = master + +PACKAGES += eredis_pool +pkg_eredis_pool_name = eredis_pool +pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy. +pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_fetch = git +pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_commit = master + +PACKAGES += erl_streams +pkg_erl_streams_name = erl_streams +pkg_erl_streams_description = Streams in Erlang +pkg_erl_streams_homepage = https://github.com/epappas/erl_streams +pkg_erl_streams_fetch = git +pkg_erl_streams_repo = https://github.com/epappas/erl_streams +pkg_erl_streams_commit = master + +PACKAGES += erlang_cep +pkg_erlang_cep_name = erlang_cep +pkg_erlang_cep_description = A basic CEP package written in erlang +pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_fetch = git +pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_commit = master + +PACKAGES += erlang_js +pkg_erlang_js_name = erlang_js +pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime. +pkg_erlang_js_homepage = https://github.com/basho/erlang_js +pkg_erlang_js_fetch = git +pkg_erlang_js_repo = https://github.com/basho/erlang_js +pkg_erlang_js_commit = master + +PACKAGES += erlang_localtime +pkg_erlang_localtime_name = erlang_localtime +pkg_erlang_localtime_description = Erlang library for conversion from one local time to another +pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_fetch = git +pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_commit = master + +PACKAGES += erlang_smtp +pkg_erlang_smtp_name = erlang_smtp +pkg_erlang_smtp_description = Erlang SMTP and POP3 server code. +pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_fetch = git +pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_commit = master + +PACKAGES += erlang_term +pkg_erlang_term_name = erlang_term +pkg_erlang_term_description = Erlang Term Info +pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term +pkg_erlang_term_fetch = git +pkg_erlang_term_repo = https://github.com/okeuday/erlang_term +pkg_erlang_term_commit = master + +PACKAGES += erlastic_search +pkg_erlastic_search_name = erlastic_search +pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface. +pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_fetch = git +pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_commit = master + +PACKAGES += erlasticsearch +pkg_erlasticsearch_name = erlasticsearch +pkg_erlasticsearch_description = Erlang thrift interface to elastic_search +pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_fetch = git +pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_commit = master + +PACKAGES += erlbrake +pkg_erlbrake_name = erlbrake +pkg_erlbrake_description = Erlang Airbrake notification client +pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake +pkg_erlbrake_fetch = git +pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake +pkg_erlbrake_commit = master + +PACKAGES += erlcloud +pkg_erlcloud_name = erlcloud +pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB) +pkg_erlcloud_homepage = https://github.com/gleber/erlcloud +pkg_erlcloud_fetch = git +pkg_erlcloud_repo = https://github.com/gleber/erlcloud +pkg_erlcloud_commit = master + +PACKAGES += erlcron +pkg_erlcron_name = erlcron +pkg_erlcron_description = Erlang cronish system +pkg_erlcron_homepage = https://github.com/erlware/erlcron +pkg_erlcron_fetch = git +pkg_erlcron_repo = https://github.com/erlware/erlcron +pkg_erlcron_commit = master + +PACKAGES += erldb +pkg_erldb_name = erldb +pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang +pkg_erldb_homepage = http://erldb.org +pkg_erldb_fetch = git +pkg_erldb_repo = https://github.com/erldb/erldb +pkg_erldb_commit = master + +PACKAGES += erldis +pkg_erldis_name = erldis +pkg_erldis_description = redis erlang client library +pkg_erldis_homepage = https://github.com/cstar/erldis +pkg_erldis_fetch = git +pkg_erldis_repo = https://github.com/cstar/erldis +pkg_erldis_commit = master + +PACKAGES += erldns +pkg_erldns_name = erldns +pkg_erldns_description = DNS server, in erlang. +pkg_erldns_homepage = https://github.com/aetrion/erl-dns +pkg_erldns_fetch = git +pkg_erldns_repo = https://github.com/aetrion/erl-dns +pkg_erldns_commit = master + +PACKAGES += erldocker +pkg_erldocker_name = erldocker +pkg_erldocker_description = Docker Remote API client for Erlang +pkg_erldocker_homepage = https://github.com/proger/erldocker +pkg_erldocker_fetch = git +pkg_erldocker_repo = https://github.com/proger/erldocker +pkg_erldocker_commit = master + +PACKAGES += erlfsmon +pkg_erlfsmon_name = erlfsmon +pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX +pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon +pkg_erlfsmon_fetch = git +pkg_erlfsmon_repo = https://github.com/proger/erlfsmon +pkg_erlfsmon_commit = master + +PACKAGES += erlgit +pkg_erlgit_name = erlgit +pkg_erlgit_description = Erlang convenience wrapper around git executable +pkg_erlgit_homepage = https://github.com/gleber/erlgit +pkg_erlgit_fetch = git +pkg_erlgit_repo = https://github.com/gleber/erlgit +pkg_erlgit_commit = master + +PACKAGES += erlguten +pkg_erlguten_name = erlguten +pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang. +pkg_erlguten_homepage = https://github.com/richcarl/erlguten +pkg_erlguten_fetch = git +pkg_erlguten_repo = https://github.com/richcarl/erlguten +pkg_erlguten_commit = master + +PACKAGES += erlmc +pkg_erlmc_name = erlmc +pkg_erlmc_description = Erlang memcached binary protocol client +pkg_erlmc_homepage = https://github.com/jkvor/erlmc +pkg_erlmc_fetch = git +pkg_erlmc_repo = https://github.com/jkvor/erlmc +pkg_erlmc_commit = master + +PACKAGES += erlmongo +pkg_erlmongo_name = erlmongo +pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support +pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_fetch = git +pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_commit = master + +PACKAGES += erlog +pkg_erlog_name = erlog +pkg_erlog_description = Prolog interpreter in and for Erlang +pkg_erlog_homepage = https://github.com/rvirding/erlog +pkg_erlog_fetch = git +pkg_erlog_repo = https://github.com/rvirding/erlog +pkg_erlog_commit = master + +PACKAGES += erlpass +pkg_erlpass_name = erlpass +pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever. +pkg_erlpass_homepage = https://github.com/ferd/erlpass +pkg_erlpass_fetch = git +pkg_erlpass_repo = https://github.com/ferd/erlpass +pkg_erlpass_commit = master + +PACKAGES += erlport +pkg_erlport_name = erlport +pkg_erlport_description = ErlPort - connect Erlang to other languages +pkg_erlport_homepage = https://github.com/hdima/erlport +pkg_erlport_fetch = git +pkg_erlport_repo = https://github.com/hdima/erlport +pkg_erlport_commit = master + +PACKAGES += erlsh +pkg_erlsh_name = erlsh +pkg_erlsh_description = Erlang shell tools +pkg_erlsh_homepage = https://github.com/proger/erlsh +pkg_erlsh_fetch = git +pkg_erlsh_repo = https://github.com/proger/erlsh +pkg_erlsh_commit = master + +PACKAGES += erlsha2 +pkg_erlsha2_name = erlsha2 +pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs. +pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2 +pkg_erlsha2_fetch = git +pkg_erlsha2_repo = https://github.com/vinoski/erlsha2 +pkg_erlsha2_commit = master + +PACKAGES += erlsom +pkg_erlsom_name = erlsom +pkg_erlsom_description = XML parser for Erlang +pkg_erlsom_homepage = https://github.com/willemdj/erlsom +pkg_erlsom_fetch = git +pkg_erlsom_repo = https://github.com/willemdj/erlsom +pkg_erlsom_commit = master + +PACKAGES += erlubi +pkg_erlubi_name = erlubi +pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer) +pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi +pkg_erlubi_fetch = git +pkg_erlubi_repo = https://github.com/krestenkrab/erlubi +pkg_erlubi_commit = master + +PACKAGES += erlvolt +pkg_erlvolt_name = erlvolt +pkg_erlvolt_description = VoltDB Erlang Client Driver +pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_fetch = git +pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_commit = master + +PACKAGES += erlware_commons +pkg_erlware_commons_name = erlware_commons +pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components. +pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons +pkg_erlware_commons_fetch = git +pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons +pkg_erlware_commons_commit = master + +PACKAGES += erlydtl +pkg_erlydtl_name = erlydtl +pkg_erlydtl_description = Django Template Language for Erlang. +pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl +pkg_erlydtl_fetch = git +pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl +pkg_erlydtl_commit = master + +PACKAGES += errd +pkg_errd_name = errd +pkg_errd_description = Erlang RRDTool library +pkg_errd_homepage = https://github.com/archaelus/errd +pkg_errd_fetch = git +pkg_errd_repo = https://github.com/archaelus/errd +pkg_errd_commit = master + +PACKAGES += erserve +pkg_erserve_name = erserve +pkg_erserve_description = Erlang/Rserve communication interface +pkg_erserve_homepage = https://github.com/del/erserve +pkg_erserve_fetch = git +pkg_erserve_repo = https://github.com/del/erserve +pkg_erserve_commit = master + +PACKAGES += erwa +pkg_erwa_name = erwa +pkg_erwa_description = A WAMP router and client written in Erlang. +pkg_erwa_homepage = https://github.com/bwegh/erwa +pkg_erwa_fetch = git +pkg_erwa_repo = https://github.com/bwegh/erwa +pkg_erwa_commit = 0.1.1 + +PACKAGES += espec +pkg_espec_name = espec +pkg_espec_description = ESpec: Behaviour driven development framework for Erlang +pkg_espec_homepage = https://github.com/lucaspiller/espec +pkg_espec_fetch = git +pkg_espec_repo = https://github.com/lucaspiller/espec +pkg_espec_commit = master + +PACKAGES += estatsd +pkg_estatsd_name = estatsd +pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite +pkg_estatsd_homepage = https://github.com/RJ/estatsd +pkg_estatsd_fetch = git +pkg_estatsd_repo = https://github.com/RJ/estatsd +pkg_estatsd_commit = master + +PACKAGES += etap +pkg_etap_name = etap +pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output. +pkg_etap_homepage = https://github.com/ngerakines/etap +pkg_etap_fetch = git +pkg_etap_repo = https://github.com/ngerakines/etap +pkg_etap_commit = master + +PACKAGES += etest +pkg_etest_name = etest +pkg_etest_description = A lightweight, convention over configuration test framework for Erlang +pkg_etest_homepage = https://github.com/wooga/etest +pkg_etest_fetch = git +pkg_etest_repo = https://github.com/wooga/etest +pkg_etest_commit = master + +PACKAGES += etest_http +pkg_etest_http_name = etest_http +pkg_etest_http_description = etest Assertions around HTTP (client-side) +pkg_etest_http_homepage = https://github.com/wooga/etest_http +pkg_etest_http_fetch = git +pkg_etest_http_repo = https://github.com/wooga/etest_http +pkg_etest_http_commit = master + +PACKAGES += etoml +pkg_etoml_name = etoml +pkg_etoml_description = TOML language erlang parser +pkg_etoml_homepage = https://github.com/kalta/etoml +pkg_etoml_fetch = git +pkg_etoml_repo = https://github.com/kalta/etoml +pkg_etoml_commit = master + +PACKAGES += eunit +pkg_eunit_name = eunit +pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. +pkg_eunit_homepage = https://github.com/richcarl/eunit +pkg_eunit_fetch = git +pkg_eunit_repo = https://github.com/richcarl/eunit +pkg_eunit_commit = master + +PACKAGES += eunit_formatters +pkg_eunit_formatters_name = eunit_formatters +pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better. +pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_fetch = git +pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_commit = master + +PACKAGES += euthanasia +pkg_euthanasia_name = euthanasia +pkg_euthanasia_description = Merciful killer for your Erlang processes +pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia +pkg_euthanasia_fetch = git +pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia +pkg_euthanasia_commit = master + +PACKAGES += evum +pkg_evum_name = evum +pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM +pkg_evum_homepage = https://github.com/msantos/evum +pkg_evum_fetch = git +pkg_evum_repo = https://github.com/msantos/evum +pkg_evum_commit = master + +PACKAGES += exec +pkg_exec_name = exec +pkg_exec_description = Execute and control OS processes from Erlang/OTP. +pkg_exec_homepage = http://saleyn.github.com/erlexec +pkg_exec_fetch = git +pkg_exec_repo = https://github.com/saleyn/erlexec +pkg_exec_commit = master + +PACKAGES += exml +pkg_exml_name = exml +pkg_exml_description = XML parsing library in Erlang +pkg_exml_homepage = https://github.com/paulgray/exml +pkg_exml_fetch = git +pkg_exml_repo = https://github.com/paulgray/exml +pkg_exml_commit = master + +PACKAGES += exometer +pkg_exometer_name = exometer +pkg_exometer_description = Basic measurement objects and probe behavior +pkg_exometer_homepage = https://github.com/Feuerlabs/exometer +pkg_exometer_fetch = git +pkg_exometer_repo = https://github.com/Feuerlabs/exometer +pkg_exometer_commit = 1.2 + +PACKAGES += exs1024 +pkg_exs1024_name = exs1024 +pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang. +pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024 +pkg_exs1024_fetch = git +pkg_exs1024_repo = https://github.com/jj1bdx/exs1024 +pkg_exs1024_commit = master + +PACKAGES += exs64 +pkg_exs64_name = exs64 +pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang. +pkg_exs64_homepage = https://github.com/jj1bdx/exs64 +pkg_exs64_fetch = git +pkg_exs64_repo = https://github.com/jj1bdx/exs64 +pkg_exs64_commit = master + +PACKAGES += exsplus116 +pkg_exsplus116_name = exsplus116 +pkg_exsplus116_description = Xorshift116plus for Erlang +pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_fetch = git +pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_commit = master + +PACKAGES += exsplus128 +pkg_exsplus128_name = exsplus128 +pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang. +pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_fetch = git +pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_commit = master + +PACKAGES += ezmq +pkg_ezmq_name = ezmq +pkg_ezmq_description = zMQ implemented in Erlang +pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq +pkg_ezmq_fetch = git +pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq +pkg_ezmq_commit = master + +PACKAGES += ezmtp +pkg_ezmtp_name = ezmtp +pkg_ezmtp_description = ZMTP protocol in pure Erlang. +pkg_ezmtp_homepage = https://github.com/a13x/ezmtp +pkg_ezmtp_fetch = git +pkg_ezmtp_repo = https://github.com/a13x/ezmtp +pkg_ezmtp_commit = master + +PACKAGES += fast_disk_log +pkg_fast_disk_log_name = fast_disk_log +pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger +pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_fetch = git +pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_commit = master + +PACKAGES += feeder +pkg_feeder_name = feeder +pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds. +pkg_feeder_homepage = https://github.com/michaelnisi/feeder +pkg_feeder_fetch = git +pkg_feeder_repo = https://github.com/michaelnisi/feeder +pkg_feeder_commit = v1.4.6 + +PACKAGES += fix +pkg_fix_name = fix +pkg_fix_description = http://fixprotocol.org/ implementation. +pkg_fix_homepage = https://github.com/maxlapshin/fix +pkg_fix_fetch = git +pkg_fix_repo = https://github.com/maxlapshin/fix +pkg_fix_commit = master + +PACKAGES += flower +pkg_flower_name = flower +pkg_flower_description = FlowER - a Erlang OpenFlow development platform +pkg_flower_homepage = https://github.com/travelping/flower +pkg_flower_fetch = git +pkg_flower_repo = https://github.com/travelping/flower +pkg_flower_commit = master + +PACKAGES += fn +pkg_fn_name = fn +pkg_fn_description = Function utilities for Erlang +pkg_fn_homepage = https://github.com/reiddraper/fn +pkg_fn_fetch = git +pkg_fn_repo = https://github.com/reiddraper/fn +pkg_fn_commit = master + +PACKAGES += folsom +pkg_folsom_name = folsom +pkg_folsom_description = Expose Erlang Events and Metrics +pkg_folsom_homepage = https://github.com/boundary/folsom +pkg_folsom_fetch = git +pkg_folsom_repo = https://github.com/boundary/folsom +pkg_folsom_commit = master + +PACKAGES += folsom_cowboy +pkg_folsom_cowboy_name = folsom_cowboy +pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper. +pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_fetch = git +pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_commit = master + +PACKAGES += folsomite +pkg_folsomite_name = folsomite +pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics +pkg_folsomite_homepage = https://github.com/campanja/folsomite +pkg_folsomite_fetch = git +pkg_folsomite_repo = https://github.com/campanja/folsomite +pkg_folsomite_commit = master + +PACKAGES += fs +pkg_fs_name = fs +pkg_fs_description = Erlang FileSystem Listener +pkg_fs_homepage = https://github.com/synrc/fs +pkg_fs_fetch = git +pkg_fs_repo = https://github.com/synrc/fs +pkg_fs_commit = master + +PACKAGES += fuse +pkg_fuse_name = fuse +pkg_fuse_description = A Circuit Breaker for Erlang +pkg_fuse_homepage = https://github.com/jlouis/fuse +pkg_fuse_fetch = git +pkg_fuse_repo = https://github.com/jlouis/fuse +pkg_fuse_commit = master + +PACKAGES += gcm +pkg_gcm_name = gcm +pkg_gcm_description = An Erlang application for Google Cloud Messaging +pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang +pkg_gcm_fetch = git +pkg_gcm_repo = https://github.com/pdincau/gcm-erlang +pkg_gcm_commit = master + +PACKAGES += gcprof +pkg_gcprof_name = gcprof +pkg_gcprof_description = Garbage Collection profiler for Erlang +pkg_gcprof_homepage = https://github.com/knutin/gcprof +pkg_gcprof_fetch = git +pkg_gcprof_repo = https://github.com/knutin/gcprof +pkg_gcprof_commit = master + +PACKAGES += geas +pkg_geas_name = geas +pkg_geas_description = Guess Erlang Application Scattering +pkg_geas_homepage = https://github.com/crownedgrouse/geas +pkg_geas_fetch = git +pkg_geas_repo = https://github.com/crownedgrouse/geas +pkg_geas_commit = master + +PACKAGES += geef +pkg_geef_name = geef +pkg_geef_description = Git NEEEEF (Erlang NIF) +pkg_geef_homepage = https://github.com/carlosmn/geef +pkg_geef_fetch = git +pkg_geef_repo = https://github.com/carlosmn/geef +pkg_geef_commit = master + +PACKAGES += gen_cycle +pkg_gen_cycle_name = gen_cycle +pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks +pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_fetch = git +pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_commit = develop + +PACKAGES += gen_icmp +pkg_gen_icmp_name = gen_icmp +pkg_gen_icmp_description = Erlang interface to ICMP sockets +pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp +pkg_gen_icmp_fetch = git +pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp +pkg_gen_icmp_commit = master + +PACKAGES += gen_nb_server +pkg_gen_nb_server_name = gen_nb_server +pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers +pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_fetch = git +pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_commit = master + +PACKAGES += gen_paxos +pkg_gen_paxos_name = gen_paxos +pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol +pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos +pkg_gen_paxos_fetch = git +pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos +pkg_gen_paxos_commit = master + +PACKAGES += gen_smtp +pkg_gen_smtp_name = gen_smtp +pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules +pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_fetch = git +pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_commit = master + +PACKAGES += gen_tracker +pkg_gen_tracker_name = gen_tracker +pkg_gen_tracker_description = supervisor with ets handling of children and their metadata +pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_fetch = git +pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_commit = master + +PACKAGES += gen_unix +pkg_gen_unix_name = gen_unix +pkg_gen_unix_description = Erlang Unix socket interface +pkg_gen_unix_homepage = https://github.com/msantos/gen_unix +pkg_gen_unix_fetch = git +pkg_gen_unix_repo = https://github.com/msantos/gen_unix +pkg_gen_unix_commit = master + +PACKAGES += getopt +pkg_getopt_name = getopt +pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax +pkg_getopt_homepage = https://github.com/jcomellas/getopt +pkg_getopt_fetch = git +pkg_getopt_repo = https://github.com/jcomellas/getopt +pkg_getopt_commit = master + +PACKAGES += gettext +pkg_gettext_name = gettext +pkg_gettext_description = Erlang internationalization library. +pkg_gettext_homepage = https://github.com/etnt/gettext +pkg_gettext_fetch = git +pkg_gettext_repo = https://github.com/etnt/gettext +pkg_gettext_commit = master + +PACKAGES += giallo +pkg_giallo_name = giallo +pkg_giallo_description = Small and flexible web framework on top of Cowboy +pkg_giallo_homepage = https://github.com/kivra/giallo +pkg_giallo_fetch = git +pkg_giallo_repo = https://github.com/kivra/giallo +pkg_giallo_commit = master + +PACKAGES += gin +pkg_gin_name = gin +pkg_gin_description = The guards and for Erlang parse_transform +pkg_gin_homepage = https://github.com/mad-cocktail/gin +pkg_gin_fetch = git +pkg_gin_repo = https://github.com/mad-cocktail/gin +pkg_gin_commit = master + +PACKAGES += gitty +pkg_gitty_name = gitty +pkg_gitty_description = Git access in erlang +pkg_gitty_homepage = https://github.com/maxlapshin/gitty +pkg_gitty_fetch = git +pkg_gitty_repo = https://github.com/maxlapshin/gitty +pkg_gitty_commit = master + +PACKAGES += gold_fever +pkg_gold_fever_name = gold_fever +pkg_gold_fever_description = A Treasure Hunt for Erlangers +pkg_gold_fever_homepage = https://github.com/inaka/gold_fever +pkg_gold_fever_fetch = git +pkg_gold_fever_repo = https://github.com/inaka/gold_fever +pkg_gold_fever_commit = master + +PACKAGES += gossiperl +pkg_gossiperl_name = gossiperl +pkg_gossiperl_description = Gossip middleware in Erlang +pkg_gossiperl_homepage = http://gossiperl.com/ +pkg_gossiperl_fetch = git +pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl +pkg_gossiperl_commit = master + +PACKAGES += gpb +pkg_gpb_name = gpb +pkg_gpb_description = A Google Protobuf implementation for Erlang +pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_fetch = git +pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_commit = master + +PACKAGES += gproc +pkg_gproc_name = gproc +pkg_gproc_description = Extended process registry for Erlang +pkg_gproc_homepage = https://github.com/uwiger/gproc +pkg_gproc_fetch = git +pkg_gproc_repo = https://github.com/uwiger/gproc +pkg_gproc_commit = master + +PACKAGES += grapherl +pkg_grapherl_name = grapherl +pkg_grapherl_description = Create graphs of Erlang systems and programs +pkg_grapherl_homepage = https://github.com/eproxus/grapherl +pkg_grapherl_fetch = git +pkg_grapherl_repo = https://github.com/eproxus/grapherl +pkg_grapherl_commit = master + +PACKAGES += gun +pkg_gun_name = gun +pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang. +pkg_gun_homepage = http//ninenines.eu +pkg_gun_fetch = git +pkg_gun_repo = https://github.com/ninenines/gun +pkg_gun_commit = master + +PACKAGES += gut +pkg_gut_name = gut +pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman +pkg_gut_homepage = https://github.com/unbalancedparentheses/gut +pkg_gut_fetch = git +pkg_gut_repo = https://github.com/unbalancedparentheses/gut +pkg_gut_commit = master + +PACKAGES += hackney +pkg_hackney_name = hackney +pkg_hackney_description = simple HTTP client in Erlang +pkg_hackney_homepage = https://github.com/benoitc/hackney +pkg_hackney_fetch = git +pkg_hackney_repo = https://github.com/benoitc/hackney +pkg_hackney_commit = master + +PACKAGES += hamcrest +pkg_hamcrest_name = hamcrest +pkg_hamcrest_description = Erlang port of Hamcrest +pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_fetch = git +pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_commit = master + +PACKAGES += hanoidb +pkg_hanoidb_name = hanoidb +pkg_hanoidb_description = Erlang LSM BTree Storage +pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_fetch = git +pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_commit = master + +PACKAGES += hottub +pkg_hottub_name = hottub +pkg_hottub_description = Permanent Erlang Worker Pool +pkg_hottub_homepage = https://github.com/bfrog/hottub +pkg_hottub_fetch = git +pkg_hottub_repo = https://github.com/bfrog/hottub +pkg_hottub_commit = master + +PACKAGES += hpack +pkg_hpack_name = hpack +pkg_hpack_description = HPACK Implementation for Erlang +pkg_hpack_homepage = https://github.com/joedevivo/hpack +pkg_hpack_fetch = git +pkg_hpack_repo = https://github.com/joedevivo/hpack +pkg_hpack_commit = master + +PACKAGES += hyper +pkg_hyper_name = hyper +pkg_hyper_description = Erlang implementation of HyperLogLog +pkg_hyper_homepage = https://github.com/GameAnalytics/hyper +pkg_hyper_fetch = git +pkg_hyper_repo = https://github.com/GameAnalytics/hyper +pkg_hyper_commit = master + +PACKAGES += ibrowse +pkg_ibrowse_name = ibrowse +pkg_ibrowse_description = Erlang HTTP client +pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_fetch = git +pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_commit = v4.1.1 + +PACKAGES += ierlang +pkg_ierlang_name = ierlang +pkg_ierlang_description = An Erlang language kernel for IPython. +pkg_ierlang_homepage = https://github.com/robbielynch/ierlang +pkg_ierlang_fetch = git +pkg_ierlang_repo = https://github.com/robbielynch/ierlang +pkg_ierlang_commit = master + +PACKAGES += iota +pkg_iota_name = iota +pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code +pkg_iota_homepage = https://github.com/jpgneves/iota +pkg_iota_fetch = git +pkg_iota_repo = https://github.com/jpgneves/iota +pkg_iota_commit = master + +PACKAGES += irc_lib +pkg_irc_lib_name = irc_lib +pkg_irc_lib_description = Erlang irc client library +pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_fetch = git +pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_commit = master + +PACKAGES += ircd +pkg_ircd_name = ircd +pkg_ircd_description = A pluggable IRC daemon application/library for Erlang. +pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd +pkg_ircd_fetch = git +pkg_ircd_repo = https://github.com/tonyg/erlang-ircd +pkg_ircd_commit = master + +PACKAGES += iris +pkg_iris_name = iris +pkg_iris_description = Iris Erlang binding +pkg_iris_homepage = https://github.com/project-iris/iris-erl +pkg_iris_fetch = git +pkg_iris_repo = https://github.com/project-iris/iris-erl +pkg_iris_commit = master + +PACKAGES += iso8601 +pkg_iso8601_name = iso8601 +pkg_iso8601_description = Erlang ISO 8601 date formatter/parser +pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_fetch = git +pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_commit = master + +PACKAGES += jamdb_sybase +pkg_jamdb_sybase_name = jamdb_sybase +pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE +pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_fetch = git +pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_commit = 0.6.0 + +PACKAGES += jerg +pkg_jerg_name = jerg +pkg_jerg_description = JSON Schema to Erlang Records Generator +pkg_jerg_homepage = https://github.com/ddossot/jerg +pkg_jerg_fetch = git +pkg_jerg_repo = https://github.com/ddossot/jerg +pkg_jerg_commit = master + +PACKAGES += jesse +pkg_jesse_name = jesse +pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang. +pkg_jesse_homepage = https://github.com/klarna/jesse +pkg_jesse_fetch = git +pkg_jesse_repo = https://github.com/klarna/jesse +pkg_jesse_commit = master + +PACKAGES += jiffy +pkg_jiffy_name = jiffy +pkg_jiffy_description = JSON NIFs for Erlang. +pkg_jiffy_homepage = https://github.com/davisp/jiffy +pkg_jiffy_fetch = git +pkg_jiffy_repo = https://github.com/davisp/jiffy +pkg_jiffy_commit = master + +PACKAGES += jiffy_v +pkg_jiffy_v_name = jiffy_v +pkg_jiffy_v_description = JSON validation utility +pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_fetch = git +pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_commit = 0.3.3 + +PACKAGES += jobs +pkg_jobs_name = jobs +pkg_jobs_description = a Job scheduler for load regulation +pkg_jobs_homepage = https://github.com/esl/jobs +pkg_jobs_fetch = git +pkg_jobs_repo = https://github.com/esl/jobs +pkg_jobs_commit = 0.3 + +PACKAGES += joxa +pkg_joxa_name = joxa +pkg_joxa_description = A Modern Lisp for the Erlang VM +pkg_joxa_homepage = https://github.com/joxa/joxa +pkg_joxa_fetch = git +pkg_joxa_repo = https://github.com/joxa/joxa +pkg_joxa_commit = master + +PACKAGES += json +pkg_json_name = json +pkg_json_description = a high level json library for erlang (17.0+) +pkg_json_homepage = https://github.com/talentdeficit/json +pkg_json_fetch = git +pkg_json_repo = https://github.com/talentdeficit/json +pkg_json_commit = master + +PACKAGES += json_rec +pkg_json_rec_name = json_rec +pkg_json_rec_description = JSON to erlang record +pkg_json_rec_homepage = https://github.com/justinkirby/json_rec +pkg_json_rec_fetch = git +pkg_json_rec_repo = https://github.com/justinkirby/json_rec +pkg_json_rec_commit = master + +PACKAGES += jsonerl +pkg_jsonerl_name = jsonerl +pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder +pkg_jsonerl_homepage = https://github.com/lambder/jsonerl +pkg_jsonerl_fetch = git +pkg_jsonerl_repo = https://github.com/lambder/jsonerl +pkg_jsonerl_commit = master + +PACKAGES += jsonpath +pkg_jsonpath_name = jsonpath +pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation +pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_fetch = git +pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_commit = master + +PACKAGES += jsonx +pkg_jsonx_name = jsonx +pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C. +pkg_jsonx_homepage = https://github.com/iskra/jsonx +pkg_jsonx_fetch = git +pkg_jsonx_repo = https://github.com/iskra/jsonx +pkg_jsonx_commit = master + +PACKAGES += jsx +pkg_jsx_name = jsx +pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON. +pkg_jsx_homepage = https://github.com/talentdeficit/jsx +pkg_jsx_fetch = git +pkg_jsx_repo = https://github.com/talentdeficit/jsx +pkg_jsx_commit = master + +PACKAGES += kafka +pkg_kafka_name = kafka +pkg_kafka_description = Kafka consumer and producer in Erlang +pkg_kafka_homepage = https://github.com/wooga/kafka-erlang +pkg_kafka_fetch = git +pkg_kafka_repo = https://github.com/wooga/kafka-erlang +pkg_kafka_commit = master + +PACKAGES += kai +pkg_kai_name = kai +pkg_kai_description = DHT storage by Takeshi Inoue +pkg_kai_homepage = https://github.com/synrc/kai +pkg_kai_fetch = git +pkg_kai_repo = https://github.com/synrc/kai +pkg_kai_commit = master + +PACKAGES += katja +pkg_katja_name = katja +pkg_katja_description = A simple Riemann client written in Erlang. +pkg_katja_homepage = https://github.com/nifoc/katja +pkg_katja_fetch = git +pkg_katja_repo = https://github.com/nifoc/katja +pkg_katja_commit = master + +PACKAGES += kdht +pkg_kdht_name = kdht +pkg_kdht_description = kdht is an erlang DHT implementation +pkg_kdht_homepage = https://github.com/kevinlynx/kdht +pkg_kdht_fetch = git +pkg_kdht_repo = https://github.com/kevinlynx/kdht +pkg_kdht_commit = master + +PACKAGES += key2value +pkg_key2value_name = key2value +pkg_key2value_description = Erlang 2-way map +pkg_key2value_homepage = https://github.com/okeuday/key2value +pkg_key2value_fetch = git +pkg_key2value_repo = https://github.com/okeuday/key2value +pkg_key2value_commit = master + +PACKAGES += keys1value +pkg_keys1value_name = keys1value +pkg_keys1value_description = Erlang set associative map for key lists +pkg_keys1value_homepage = https://github.com/okeuday/keys1value +pkg_keys1value_fetch = git +pkg_keys1value_repo = https://github.com/okeuday/keys1value +pkg_keys1value_commit = master + +PACKAGES += kinetic +pkg_kinetic_name = kinetic +pkg_kinetic_description = Erlang Kinesis Client +pkg_kinetic_homepage = https://github.com/AdRoll/kinetic +pkg_kinetic_fetch = git +pkg_kinetic_repo = https://github.com/AdRoll/kinetic +pkg_kinetic_commit = master + +PACKAGES += kjell +pkg_kjell_name = kjell +pkg_kjell_description = Erlang Shell +pkg_kjell_homepage = https://github.com/karlll/kjell +pkg_kjell_fetch = git +pkg_kjell_repo = https://github.com/karlll/kjell +pkg_kjell_commit = master + +PACKAGES += kraken +pkg_kraken_name = kraken +pkg_kraken_description = Distributed Pubsub Server for Realtime Apps +pkg_kraken_homepage = https://github.com/Asana/kraken +pkg_kraken_fetch = git +pkg_kraken_repo = https://github.com/Asana/kraken +pkg_kraken_commit = master + +PACKAGES += kucumberl +pkg_kucumberl_name = kucumberl +pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber +pkg_kucumberl_homepage = https://github.com/openshine/kucumberl +pkg_kucumberl_fetch = git +pkg_kucumberl_repo = https://github.com/openshine/kucumberl +pkg_kucumberl_commit = master + +PACKAGES += kvc +pkg_kvc_name = kvc +pkg_kvc_description = KVC - Key Value Coding for Erlang data structures +pkg_kvc_homepage = https://github.com/etrepum/kvc +pkg_kvc_fetch = git +pkg_kvc_repo = https://github.com/etrepum/kvc +pkg_kvc_commit = master + +PACKAGES += kvlists +pkg_kvlists_name = kvlists +pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang +pkg_kvlists_homepage = https://github.com/jcomellas/kvlists +pkg_kvlists_fetch = git +pkg_kvlists_repo = https://github.com/jcomellas/kvlists +pkg_kvlists_commit = master + +PACKAGES += kvs +pkg_kvs_name = kvs +pkg_kvs_description = Container and Iterator +pkg_kvs_homepage = https://github.com/synrc/kvs +pkg_kvs_fetch = git +pkg_kvs_repo = https://github.com/synrc/kvs +pkg_kvs_commit = master + +PACKAGES += lager +pkg_lager_name = lager +pkg_lager_description = A logging framework for Erlang/OTP. +pkg_lager_homepage = https://github.com/basho/lager +pkg_lager_fetch = git +pkg_lager_repo = https://github.com/basho/lager +pkg_lager_commit = master + +PACKAGES += lager_amqp_backend +pkg_lager_amqp_backend_name = lager_amqp_backend +pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend +pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_fetch = git +pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_commit = master + +PACKAGES += lager_syslog +pkg_lager_syslog_name = lager_syslog +pkg_lager_syslog_description = Syslog backend for lager +pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog +pkg_lager_syslog_fetch = git +pkg_lager_syslog_repo = https://github.com/basho/lager_syslog +pkg_lager_syslog_commit = master + +PACKAGES += lambdapad +pkg_lambdapad_name = lambdapad +pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang. +pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad +pkg_lambdapad_fetch = git +pkg_lambdapad_repo = https://github.com/gar1t/lambdapad +pkg_lambdapad_commit = master + +PACKAGES += lasp +pkg_lasp_name = lasp +pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations +pkg_lasp_homepage = http://lasp-lang.org/ +pkg_lasp_fetch = git +pkg_lasp_repo = https://github.com/lasp-lang/lasp +pkg_lasp_commit = master + +PACKAGES += lasse +pkg_lasse_name = lasse +pkg_lasse_description = SSE handler for Cowboy +pkg_lasse_homepage = https://github.com/inaka/lasse +pkg_lasse_fetch = git +pkg_lasse_repo = https://github.com/inaka/lasse +pkg_lasse_commit = 0.1.0 + +PACKAGES += ldap +pkg_ldap_name = ldap +pkg_ldap_description = LDAP server written in Erlang +pkg_ldap_homepage = https://github.com/spawnproc/ldap +pkg_ldap_fetch = git +pkg_ldap_repo = https://github.com/spawnproc/ldap +pkg_ldap_commit = master + +PACKAGES += lethink +pkg_lethink_name = lethink +pkg_lethink_description = erlang driver for rethinkdb +pkg_lethink_homepage = https://github.com/taybin/lethink +pkg_lethink_fetch = git +pkg_lethink_repo = https://github.com/taybin/lethink +pkg_lethink_commit = master + +PACKAGES += lfe +pkg_lfe_name = lfe +pkg_lfe_description = Lisp Flavoured Erlang (LFE) +pkg_lfe_homepage = https://github.com/rvirding/lfe +pkg_lfe_fetch = git +pkg_lfe_repo = https://github.com/rvirding/lfe +pkg_lfe_commit = master + +PACKAGES += ling +pkg_ling_name = ling +pkg_ling_description = Erlang on Xen +pkg_ling_homepage = https://github.com/cloudozer/ling +pkg_ling_fetch = git +pkg_ling_repo = https://github.com/cloudozer/ling +pkg_ling_commit = master + +PACKAGES += live +pkg_live_name = live +pkg_live_description = Automated module and configuration reloader. +pkg_live_homepage = http://ninenines.eu +pkg_live_fetch = git +pkg_live_repo = https://github.com/ninenines/live +pkg_live_commit = master + +PACKAGES += lmq +pkg_lmq_name = lmq +pkg_lmq_description = Lightweight Message Queue +pkg_lmq_homepage = https://github.com/iij/lmq +pkg_lmq_fetch = git +pkg_lmq_repo = https://github.com/iij/lmq +pkg_lmq_commit = master + +PACKAGES += locker +pkg_locker_name = locker +pkg_locker_description = Atomic distributed 'check and set' for short-lived keys +pkg_locker_homepage = https://github.com/wooga/locker +pkg_locker_fetch = git +pkg_locker_repo = https://github.com/wooga/locker +pkg_locker_commit = master + +PACKAGES += locks +pkg_locks_name = locks +pkg_locks_description = A scalable, deadlock-resolving resource locker +pkg_locks_homepage = https://github.com/uwiger/locks +pkg_locks_fetch = git +pkg_locks_repo = https://github.com/uwiger/locks +pkg_locks_commit = master + +PACKAGES += log4erl +pkg_log4erl_name = log4erl +pkg_log4erl_description = A logger for erlang in the spirit of Log4J. +pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl +pkg_log4erl_fetch = git +pkg_log4erl_repo = https://github.com/ahmednawras/log4erl +pkg_log4erl_commit = master + +PACKAGES += lol +pkg_lol_name = lol +pkg_lol_description = Lisp on erLang, and programming is fun again +pkg_lol_homepage = https://github.com/b0oh/lol +pkg_lol_fetch = git +pkg_lol_repo = https://github.com/b0oh/lol +pkg_lol_commit = master + +PACKAGES += lucid +pkg_lucid_name = lucid +pkg_lucid_description = HTTP/2 server written in Erlang +pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid +pkg_lucid_fetch = git +pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid +pkg_lucid_commit = master + +PACKAGES += luerl +pkg_luerl_name = luerl +pkg_luerl_description = Lua in Erlang +pkg_luerl_homepage = https://github.com/rvirding/luerl +pkg_luerl_fetch = git +pkg_luerl_repo = https://github.com/rvirding/luerl +pkg_luerl_commit = develop + +PACKAGES += luwak +pkg_luwak_name = luwak +pkg_luwak_description = Large-object storage interface for Riak +pkg_luwak_homepage = https://github.com/basho/luwak +pkg_luwak_fetch = git +pkg_luwak_repo = https://github.com/basho/luwak +pkg_luwak_commit = master + +PACKAGES += lux +pkg_lux_name = lux +pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands +pkg_lux_homepage = https://github.com/hawk/lux +pkg_lux_fetch = git +pkg_lux_repo = https://github.com/hawk/lux +pkg_lux_commit = master + +PACKAGES += machi +pkg_machi_name = machi +pkg_machi_description = Machi file store +pkg_machi_homepage = https://github.com/basho/machi +pkg_machi_fetch = git +pkg_machi_repo = https://github.com/basho/machi +pkg_machi_commit = master + +PACKAGES += mad +pkg_mad_name = mad +pkg_mad_description = Small and Fast Rebar Replacement +pkg_mad_homepage = https://github.com/synrc/mad +pkg_mad_fetch = git +pkg_mad_repo = https://github.com/synrc/mad +pkg_mad_commit = master + +PACKAGES += marina +pkg_marina_name = marina +pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client +pkg_marina_homepage = https://github.com/lpgauth/marina +pkg_marina_fetch = git +pkg_marina_repo = https://github.com/lpgauth/marina +pkg_marina_commit = master + +PACKAGES += mavg +pkg_mavg_name = mavg +pkg_mavg_description = Erlang :: Exponential moving average library +pkg_mavg_homepage = https://github.com/EchoTeam/mavg +pkg_mavg_fetch = git +pkg_mavg_repo = https://github.com/EchoTeam/mavg +pkg_mavg_commit = master + +PACKAGES += mc_erl +pkg_mc_erl_name = mc_erl +pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. +pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl +pkg_mc_erl_fetch = git +pkg_mc_erl_repo = https://github.com/clonejo/mc-erl +pkg_mc_erl_commit = master + +PACKAGES += mcd +pkg_mcd_name = mcd +pkg_mcd_description = Fast memcached protocol client in pure Erlang +pkg_mcd_homepage = https://github.com/EchoTeam/mcd +pkg_mcd_fetch = git +pkg_mcd_repo = https://github.com/EchoTeam/mcd +pkg_mcd_commit = master + +PACKAGES += mcerlang +pkg_mcerlang_name = mcerlang +pkg_mcerlang_description = The McErlang model checker for Erlang +pkg_mcerlang_homepage = https://github.com/fredlund/McErlang +pkg_mcerlang_fetch = git +pkg_mcerlang_repo = https://github.com/fredlund/McErlang +pkg_mcerlang_commit = master + +PACKAGES += meck +pkg_meck_name = meck +pkg_meck_description = A mocking library for Erlang +pkg_meck_homepage = https://github.com/eproxus/meck +pkg_meck_fetch = git +pkg_meck_repo = https://github.com/eproxus/meck +pkg_meck_commit = master + +PACKAGES += mekao +pkg_mekao_name = mekao +pkg_mekao_description = SQL constructor +pkg_mekao_homepage = https://github.com/ddosia/mekao +pkg_mekao_fetch = git +pkg_mekao_repo = https://github.com/ddosia/mekao +pkg_mekao_commit = master + +PACKAGES += memo +pkg_memo_name = memo +pkg_memo_description = Erlang memoization server +pkg_memo_homepage = https://github.com/tuncer/memo +pkg_memo_fetch = git +pkg_memo_repo = https://github.com/tuncer/memo +pkg_memo_commit = master + +PACKAGES += merge_index +pkg_merge_index_name = merge_index +pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop). +pkg_merge_index_homepage = https://github.com/basho/merge_index +pkg_merge_index_fetch = git +pkg_merge_index_repo = https://github.com/basho/merge_index +pkg_merge_index_commit = master + +PACKAGES += merl +pkg_merl_name = merl +pkg_merl_description = Metaprogramming in Erlang +pkg_merl_homepage = https://github.com/richcarl/merl +pkg_merl_fetch = git +pkg_merl_repo = https://github.com/richcarl/merl +pkg_merl_commit = master + +PACKAGES += mimetypes +pkg_mimetypes_name = mimetypes +pkg_mimetypes_description = Erlang MIME types library +pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes +pkg_mimetypes_fetch = git +pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes +pkg_mimetypes_commit = master + +PACKAGES += mixer +pkg_mixer_name = mixer +pkg_mixer_description = Mix in functions from other modules +pkg_mixer_homepage = https://github.com/chef/mixer +pkg_mixer_fetch = git +pkg_mixer_repo = https://github.com/chef/mixer +pkg_mixer_commit = master + +PACKAGES += mochiweb +pkg_mochiweb_name = mochiweb +pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers. +pkg_mochiweb_homepage = https://github.com/mochi/mochiweb +pkg_mochiweb_fetch = git +pkg_mochiweb_repo = https://github.com/mochi/mochiweb +pkg_mochiweb_commit = master + +PACKAGES += mochiweb_xpath +pkg_mochiweb_xpath_name = mochiweb_xpath +pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser +pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_fetch = git +pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_commit = master + +PACKAGES += mockgyver +pkg_mockgyver_name = mockgyver +pkg_mockgyver_description = A mocking library for Erlang +pkg_mockgyver_homepage = https://github.com/klajo/mockgyver +pkg_mockgyver_fetch = git +pkg_mockgyver_repo = https://github.com/klajo/mockgyver +pkg_mockgyver_commit = master + +PACKAGES += modlib +pkg_modlib_name = modlib +pkg_modlib_description = Web framework based on Erlang's inets httpd +pkg_modlib_homepage = https://github.com/gar1t/modlib +pkg_modlib_fetch = git +pkg_modlib_repo = https://github.com/gar1t/modlib +pkg_modlib_commit = master + +PACKAGES += mongodb +pkg_mongodb_name = mongodb +pkg_mongodb_description = MongoDB driver for Erlang +pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_fetch = git +pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_commit = master + +PACKAGES += mongooseim +pkg_mongooseim_name = mongooseim +pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions +pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform +pkg_mongooseim_fetch = git +pkg_mongooseim_repo = https://github.com/esl/MongooseIM +pkg_mongooseim_commit = master + +PACKAGES += moyo +pkg_moyo_name = moyo +pkg_moyo_description = Erlang utility functions library +pkg_moyo_homepage = https://github.com/dwango/moyo +pkg_moyo_fetch = git +pkg_moyo_repo = https://github.com/dwango/moyo +pkg_moyo_commit = master + +PACKAGES += msgpack +pkg_msgpack_name = msgpack +pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang +pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_fetch = git +pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_commit = master + +PACKAGES += mu2 +pkg_mu2_name = mu2 +pkg_mu2_description = Erlang mutation testing tool +pkg_mu2_homepage = https://github.com/ramsay-t/mu2 +pkg_mu2_fetch = git +pkg_mu2_repo = https://github.com/ramsay-t/mu2 +pkg_mu2_commit = master + +PACKAGES += mustache +pkg_mustache_name = mustache +pkg_mustache_description = Mustache template engine for Erlang. +pkg_mustache_homepage = https://github.com/mojombo/mustache.erl +pkg_mustache_fetch = git +pkg_mustache_repo = https://github.com/mojombo/mustache.erl +pkg_mustache_commit = master + +PACKAGES += myproto +pkg_myproto_name = myproto +pkg_myproto_description = MySQL Server Protocol in Erlang +pkg_myproto_homepage = https://github.com/altenwald/myproto +pkg_myproto_fetch = git +pkg_myproto_repo = https://github.com/altenwald/myproto +pkg_myproto_commit = master + +PACKAGES += mysql +pkg_mysql_name = mysql +pkg_mysql_description = Erlang MySQL Driver (from code.google.com) +pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_fetch = git +pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_commit = master + +PACKAGES += n2o +pkg_n2o_name = n2o +pkg_n2o_description = WebSocket Application Server +pkg_n2o_homepage = https://github.com/5HT/n2o +pkg_n2o_fetch = git +pkg_n2o_repo = https://github.com/5HT/n2o +pkg_n2o_commit = master + +PACKAGES += nat_upnp +pkg_nat_upnp_name = nat_upnp +pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD +pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_fetch = git +pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_commit = master + +PACKAGES += neo4j +pkg_neo4j_name = neo4j +pkg_neo4j_description = Erlang client library for Neo4J. +pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_fetch = git +pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_commit = master + +PACKAGES += neotoma +pkg_neotoma_name = neotoma +pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars. +pkg_neotoma_homepage = https://github.com/seancribbs/neotoma +pkg_neotoma_fetch = git +pkg_neotoma_repo = https://github.com/seancribbs/neotoma +pkg_neotoma_commit = master + +PACKAGES += newrelic +pkg_newrelic_name = newrelic +pkg_newrelic_description = Erlang library for sending metrics to New Relic +pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang +pkg_newrelic_fetch = git +pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang +pkg_newrelic_commit = master + +PACKAGES += nifty +pkg_nifty_name = nifty +pkg_nifty_description = Erlang NIF wrapper generator +pkg_nifty_homepage = https://github.com/parapluu/nifty +pkg_nifty_fetch = git +pkg_nifty_repo = https://github.com/parapluu/nifty +pkg_nifty_commit = master + +PACKAGES += nitrogen_core +pkg_nitrogen_core_name = nitrogen_core +pkg_nitrogen_core_description = The core Nitrogen library. +pkg_nitrogen_core_homepage = http://nitrogenproject.com/ +pkg_nitrogen_core_fetch = git +pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core +pkg_nitrogen_core_commit = master + +PACKAGES += nkbase +pkg_nkbase_name = nkbase +pkg_nkbase_description = NkBASE distributed database +pkg_nkbase_homepage = https://github.com/Nekso/nkbase +pkg_nkbase_fetch = git +pkg_nkbase_repo = https://github.com/Nekso/nkbase +pkg_nkbase_commit = develop + +PACKAGES += nkdocker +pkg_nkdocker_name = nkdocker +pkg_nkdocker_description = Erlang Docker client +pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker +pkg_nkdocker_fetch = git +pkg_nkdocker_repo = https://github.com/Nekso/nkdocker +pkg_nkdocker_commit = master + +PACKAGES += nkpacket +pkg_nkpacket_name = nkpacket +pkg_nkpacket_description = Generic Erlang transport layer +pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket +pkg_nkpacket_fetch = git +pkg_nkpacket_repo = https://github.com/Nekso/nkpacket +pkg_nkpacket_commit = master + +PACKAGES += nksip +pkg_nksip_name = nksip +pkg_nksip_description = Erlang SIP application server +pkg_nksip_homepage = https://github.com/kalta/nksip +pkg_nksip_fetch = git +pkg_nksip_repo = https://github.com/kalta/nksip +pkg_nksip_commit = master + +PACKAGES += nodefinder +pkg_nodefinder_name = nodefinder +pkg_nodefinder_description = automatic node discovery via UDP multicast +pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder +pkg_nodefinder_fetch = git +pkg_nodefinder_repo = https://github.com/okeuday/nodefinder +pkg_nodefinder_commit = master + +PACKAGES += nprocreg +pkg_nprocreg_name = nprocreg +pkg_nprocreg_description = Minimal Distributed Erlang Process Registry +pkg_nprocreg_homepage = http://nitrogenproject.com/ +pkg_nprocreg_fetch = git +pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg +pkg_nprocreg_commit = master + +PACKAGES += oauth +pkg_oauth_name = oauth +pkg_oauth_description = An Erlang OAuth 1.0 implementation +pkg_oauth_homepage = https://github.com/tim/erlang-oauth +pkg_oauth_fetch = git +pkg_oauth_repo = https://github.com/tim/erlang-oauth +pkg_oauth_commit = master + +PACKAGES += oauth2 +pkg_oauth2_name = oauth2 +pkg_oauth2_description = Erlang Oauth2 implementation +pkg_oauth2_homepage = https://github.com/kivra/oauth2 +pkg_oauth2_fetch = git +pkg_oauth2_repo = https://github.com/kivra/oauth2 +pkg_oauth2_commit = master + +PACKAGES += oauth2c +pkg_oauth2c_name = oauth2c +pkg_oauth2c_description = Erlang OAuth2 Client +pkg_oauth2c_homepage = https://github.com/kivra/oauth2_client +pkg_oauth2c_fetch = git +pkg_oauth2c_repo = https://github.com/kivra/oauth2_client +pkg_oauth2c_commit = master + +PACKAGES += octopus +pkg_octopus_name = octopus +pkg_octopus_description = Small and flexible pool manager written in Erlang +pkg_octopus_homepage = https://github.com/erlangbureau/octopus +pkg_octopus_fetch = git +pkg_octopus_repo = https://github.com/erlangbureau/octopus +pkg_octopus_commit = 1.0.0 + +PACKAGES += of_protocol +pkg_of_protocol_name = of_protocol +pkg_of_protocol_description = OpenFlow Protocol Library for Erlang +pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_fetch = git +pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_commit = master + +PACKAGES += opencouch +pkg_opencouch_name = couch +pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB +pkg_opencouch_homepage = https://github.com/benoitc/opencouch +pkg_opencouch_fetch = git +pkg_opencouch_repo = https://github.com/benoitc/opencouch +pkg_opencouch_commit = master + +PACKAGES += openflow +pkg_openflow_name = openflow +pkg_openflow_description = An OpenFlow controller written in pure erlang +pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_fetch = git +pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_commit = master + +PACKAGES += openid +pkg_openid_name = openid +pkg_openid_description = Erlang OpenID +pkg_openid_homepage = https://github.com/brendonh/erl_openid +pkg_openid_fetch = git +pkg_openid_repo = https://github.com/brendonh/erl_openid +pkg_openid_commit = master + +PACKAGES += openpoker +pkg_openpoker_name = openpoker +pkg_openpoker_description = Genesis Texas hold'em Game Server +pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker +pkg_openpoker_fetch = git +pkg_openpoker_repo = https://github.com/hpyhacking/openpoker +pkg_openpoker_commit = master + +PACKAGES += pal +pkg_pal_name = pal +pkg_pal_description = Pragmatic Authentication Library +pkg_pal_homepage = https://github.com/manifest/pal +pkg_pal_fetch = git +pkg_pal_repo = https://github.com/manifest/pal +pkg_pal_commit = master + +PACKAGES += parse_trans +pkg_parse_trans_name = parse_trans +pkg_parse_trans_description = Parse transform utilities for Erlang +pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans +pkg_parse_trans_fetch = git +pkg_parse_trans_repo = https://github.com/uwiger/parse_trans +pkg_parse_trans_commit = master + +PACKAGES += parsexml +pkg_parsexml_name = parsexml +pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API +pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml +pkg_parsexml_fetch = git +pkg_parsexml_repo = https://github.com/maxlapshin/parsexml +pkg_parsexml_commit = master + +PACKAGES += pegjs +pkg_pegjs_name = pegjs +pkg_pegjs_description = An implementation of PEG.js grammar for Erlang. +pkg_pegjs_homepage = https://github.com/dmitriid/pegjs +pkg_pegjs_fetch = git +pkg_pegjs_repo = https://github.com/dmitriid/pegjs +pkg_pegjs_commit = 0.3 + +PACKAGES += percept2 +pkg_percept2_name = percept2 +pkg_percept2_description = Concurrent profiling tool for Erlang +pkg_percept2_homepage = https://github.com/huiqing/percept2 +pkg_percept2_fetch = git +pkg_percept2_repo = https://github.com/huiqing/percept2 +pkg_percept2_commit = master + +PACKAGES += pgsql +pkg_pgsql_name = pgsql +pkg_pgsql_description = Erlang PostgreSQL driver +pkg_pgsql_homepage = https://github.com/semiocast/pgsql +pkg_pgsql_fetch = git +pkg_pgsql_repo = https://github.com/semiocast/pgsql +pkg_pgsql_commit = master + +PACKAGES += pkgx +pkg_pkgx_name = pkgx +pkg_pkgx_description = Build .deb packages from Erlang releases +pkg_pkgx_homepage = https://github.com/arjan/pkgx +pkg_pkgx_fetch = git +pkg_pkgx_repo = https://github.com/arjan/pkgx +pkg_pkgx_commit = master + +PACKAGES += pkt +pkg_pkt_name = pkt +pkg_pkt_description = Erlang network protocol library +pkg_pkt_homepage = https://github.com/msantos/pkt +pkg_pkt_fetch = git +pkg_pkt_repo = https://github.com/msantos/pkt +pkg_pkt_commit = master + +PACKAGES += plain_fsm +pkg_plain_fsm_name = plain_fsm +pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs. +pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_fetch = git +pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_commit = master + +PACKAGES += plumtree +pkg_plumtree_name = plumtree +pkg_plumtree_description = Epidemic Broadcast Trees +pkg_plumtree_homepage = https://github.com/helium/plumtree +pkg_plumtree_fetch = git +pkg_plumtree_repo = https://github.com/helium/plumtree +pkg_plumtree_commit = master + +PACKAGES += pmod_transform +pkg_pmod_transform_name = pmod_transform +pkg_pmod_transform_description = Parse transform for parameterized modules +pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform +pkg_pmod_transform_fetch = git +pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform +pkg_pmod_transform_commit = master + +PACKAGES += pobox +pkg_pobox_name = pobox +pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang +pkg_pobox_homepage = https://github.com/ferd/pobox +pkg_pobox_fetch = git +pkg_pobox_repo = https://github.com/ferd/pobox +pkg_pobox_commit = master + +PACKAGES += ponos +pkg_ponos_name = ponos +pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang +pkg_ponos_homepage = https://github.com/klarna/ponos +pkg_ponos_fetch = git +pkg_ponos_repo = https://github.com/klarna/ponos +pkg_ponos_commit = master + +PACKAGES += poolboy +pkg_poolboy_name = poolboy +pkg_poolboy_description = A hunky Erlang worker pool factory +pkg_poolboy_homepage = https://github.com/devinus/poolboy +pkg_poolboy_fetch = git +pkg_poolboy_repo = https://github.com/devinus/poolboy +pkg_poolboy_commit = master + +PACKAGES += pooler +pkg_pooler_name = pooler +pkg_pooler_description = An OTP Process Pool Application +pkg_pooler_homepage = https://github.com/seth/pooler +pkg_pooler_fetch = git +pkg_pooler_repo = https://github.com/seth/pooler +pkg_pooler_commit = master + +PACKAGES += pqueue +pkg_pqueue_name = pqueue +pkg_pqueue_description = Erlang Priority Queues +pkg_pqueue_homepage = https://github.com/okeuday/pqueue +pkg_pqueue_fetch = git +pkg_pqueue_repo = https://github.com/okeuday/pqueue +pkg_pqueue_commit = master + +PACKAGES += procket +pkg_procket_name = procket +pkg_procket_description = Erlang interface to low level socket operations +pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket +pkg_procket_fetch = git +pkg_procket_repo = https://github.com/msantos/procket +pkg_procket_commit = master + +PACKAGES += prop +pkg_prop_name = prop +pkg_prop_description = An Erlang code scaffolding and generator system. +pkg_prop_homepage = https://github.com/nuex/prop +pkg_prop_fetch = git +pkg_prop_repo = https://github.com/nuex/prop +pkg_prop_commit = master + +PACKAGES += proper +pkg_proper_name = proper +pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang. +pkg_proper_homepage = http://proper.softlab.ntua.gr +pkg_proper_fetch = git +pkg_proper_repo = https://github.com/manopapad/proper +pkg_proper_commit = master + +PACKAGES += props +pkg_props_name = props +pkg_props_description = Property structure library +pkg_props_homepage = https://github.com/greyarea/props +pkg_props_fetch = git +pkg_props_repo = https://github.com/greyarea/props +pkg_props_commit = master + +PACKAGES += protobuffs +pkg_protobuffs_name = protobuffs +pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs. +pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_fetch = git +pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_commit = master + +PACKAGES += psycho +pkg_psycho_name = psycho +pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware. +pkg_psycho_homepage = https://github.com/gar1t/psycho +pkg_psycho_fetch = git +pkg_psycho_repo = https://github.com/gar1t/psycho +pkg_psycho_commit = master + +PACKAGES += purity +pkg_purity_name = purity +pkg_purity_description = A side-effect analyzer for Erlang +pkg_purity_homepage = https://github.com/mpitid/purity +pkg_purity_fetch = git +pkg_purity_repo = https://github.com/mpitid/purity +pkg_purity_commit = master + +PACKAGES += push_service +pkg_push_service_name = push_service +pkg_push_service_description = Push service +pkg_push_service_homepage = https://github.com/hairyhum/push_service +pkg_push_service_fetch = git +pkg_push_service_repo = https://github.com/hairyhum/push_service +pkg_push_service_commit = master + +PACKAGES += qdate +pkg_qdate_name = qdate +pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang. +pkg_qdate_homepage = https://github.com/choptastic/qdate +pkg_qdate_fetch = git +pkg_qdate_repo = https://github.com/choptastic/qdate +pkg_qdate_commit = 0.4.0 + +PACKAGES += qrcode +pkg_qrcode_name = qrcode +pkg_qrcode_description = QR Code encoder in Erlang +pkg_qrcode_homepage = https://github.com/komone/qrcode +pkg_qrcode_fetch = git +pkg_qrcode_repo = https://github.com/komone/qrcode +pkg_qrcode_commit = master + +PACKAGES += quest +pkg_quest_name = quest +pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang. +pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest +pkg_quest_fetch = git +pkg_quest_repo = https://github.com/eriksoe/ErlangQuest +pkg_quest_commit = master + +PACKAGES += quickrand +pkg_quickrand_name = quickrand +pkg_quickrand_description = Quick Erlang Random Number Generation +pkg_quickrand_homepage = https://github.com/okeuday/quickrand +pkg_quickrand_fetch = git +pkg_quickrand_repo = https://github.com/okeuday/quickrand +pkg_quickrand_commit = master + +PACKAGES += rabbit +pkg_rabbit_name = rabbit +pkg_rabbit_description = RabbitMQ Server +pkg_rabbit_homepage = https://www.rabbitmq.com/ +pkg_rabbit_fetch = git +pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git +pkg_rabbit_commit = master + +PACKAGES += rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak +pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_fetch = git +pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_commit = master + +PACKAGES += rack +pkg_rack_name = rack +pkg_rack_description = Rack handler for erlang +pkg_rack_homepage = https://github.com/erlyvideo/rack +pkg_rack_fetch = git +pkg_rack_repo = https://github.com/erlyvideo/rack +pkg_rack_commit = master + +PACKAGES += radierl +pkg_radierl_name = radierl +pkg_radierl_description = RADIUS protocol stack implemented in Erlang. +pkg_radierl_homepage = https://github.com/vances/radierl +pkg_radierl_fetch = git +pkg_radierl_repo = https://github.com/vances/radierl +pkg_radierl_commit = master + +PACKAGES += rafter +pkg_rafter_name = rafter +pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol +pkg_rafter_homepage = https://github.com/andrewjstone/rafter +pkg_rafter_fetch = git +pkg_rafter_repo = https://github.com/andrewjstone/rafter +pkg_rafter_commit = master + +PACKAGES += ranch +pkg_ranch_name = ranch +pkg_ranch_description = Socket acceptor pool for TCP protocols. +pkg_ranch_homepage = http://ninenines.eu +pkg_ranch_fetch = git +pkg_ranch_repo = https://github.com/ninenines/ranch +pkg_ranch_commit = 1.1.0 + +PACKAGES += rbeacon +pkg_rbeacon_name = rbeacon +pkg_rbeacon_description = LAN discovery and presence in Erlang. +pkg_rbeacon_homepage = https://github.com/refuge/rbeacon +pkg_rbeacon_fetch = git +pkg_rbeacon_repo = https://github.com/refuge/rbeacon +pkg_rbeacon_commit = master + +PACKAGES += rebar +pkg_rebar_name = rebar +pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases. +pkg_rebar_homepage = http://www.rebar3.org +pkg_rebar_fetch = git +pkg_rebar_repo = https://github.com/rebar/rebar3 +pkg_rebar_commit = master + +PACKAGES += rebus +pkg_rebus_name = rebus +pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang. +pkg_rebus_homepage = https://github.com/olle/rebus +pkg_rebus_fetch = git +pkg_rebus_repo = https://github.com/olle/rebus +pkg_rebus_commit = master + +PACKAGES += rec2json +pkg_rec2json_name = rec2json +pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily. +pkg_rec2json_homepage = https://github.com/lordnull/rec2json +pkg_rec2json_fetch = git +pkg_rec2json_repo = https://github.com/lordnull/rec2json +pkg_rec2json_commit = master + +PACKAGES += recon +pkg_recon_name = recon +pkg_recon_description = Collection of functions and scripts to debug Erlang in production. +pkg_recon_homepage = https://github.com/ferd/recon +pkg_recon_fetch = git +pkg_recon_repo = https://github.com/ferd/recon +pkg_recon_commit = 2.2.1 + +PACKAGES += record_info +pkg_record_info_name = record_info +pkg_record_info_description = Convert between record and proplist +pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info +pkg_record_info_fetch = git +pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info +pkg_record_info_commit = master + +PACKAGES += redgrid +pkg_redgrid_name = redgrid +pkg_redgrid_description = automatic Erlang node discovery via redis +pkg_redgrid_homepage = https://github.com/jkvor/redgrid +pkg_redgrid_fetch = git +pkg_redgrid_repo = https://github.com/jkvor/redgrid +pkg_redgrid_commit = master + +PACKAGES += redo +pkg_redo_name = redo +pkg_redo_description = pipelined erlang redis client +pkg_redo_homepage = https://github.com/jkvor/redo +pkg_redo_fetch = git +pkg_redo_repo = https://github.com/jkvor/redo +pkg_redo_commit = master + +PACKAGES += reload_mk +pkg_reload_mk_name = reload_mk +pkg_reload_mk_description = Live reload plugin for erlang.mk. +pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk +pkg_reload_mk_fetch = git +pkg_reload_mk_repo = https://github.com/bullno1/reload.mk +pkg_reload_mk_commit = master + +PACKAGES += reltool_util +pkg_reltool_util_name = reltool_util +pkg_reltool_util_description = Erlang reltool utility functionality application +pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util +pkg_reltool_util_fetch = git +pkg_reltool_util_repo = https://github.com/okeuday/reltool_util +pkg_reltool_util_commit = master + +PACKAGES += relx +pkg_relx_name = relx +pkg_relx_description = Sane, simple release creation for Erlang +pkg_relx_homepage = https://github.com/erlware/relx +pkg_relx_fetch = git +pkg_relx_repo = https://github.com/erlware/relx +pkg_relx_commit = master + +PACKAGES += resource_discovery +pkg_resource_discovery_name = resource_discovery +pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster. +pkg_resource_discovery_homepage = http://erlware.org/ +pkg_resource_discovery_fetch = git +pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery +pkg_resource_discovery_commit = master + +PACKAGES += restc +pkg_restc_name = restc +pkg_restc_description = Erlang Rest Client +pkg_restc_homepage = https://github.com/kivra/restclient +pkg_restc_fetch = git +pkg_restc_repo = https://github.com/kivra/restclient +pkg_restc_commit = master + +PACKAGES += rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation. +pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_fetch = git +pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_commit = master + +PACKAGES += riak_control +pkg_riak_control_name = riak_control +pkg_riak_control_description = Webmachine-based administration interface for Riak. +pkg_riak_control_homepage = https://github.com/basho/riak_control +pkg_riak_control_fetch = git +pkg_riak_control_repo = https://github.com/basho/riak_control +pkg_riak_control_commit = master + +PACKAGES += riak_core +pkg_riak_core_name = riak_core +pkg_riak_core_description = Distributed systems infrastructure used by Riak. +pkg_riak_core_homepage = https://github.com/basho/riak_core +pkg_riak_core_fetch = git +pkg_riak_core_repo = https://github.com/basho/riak_core +pkg_riak_core_commit = master + +PACKAGES += riak_dt +pkg_riak_dt_name = riak_dt +pkg_riak_dt_description = Convergent replicated datatypes in Erlang +pkg_riak_dt_homepage = https://github.com/basho/riak_dt +pkg_riak_dt_fetch = git +pkg_riak_dt_repo = https://github.com/basho/riak_dt +pkg_riak_dt_commit = master + +PACKAGES += riak_ensemble +pkg_riak_ensemble_name = riak_ensemble +pkg_riak_ensemble_description = Multi-Paxos framework in Erlang +pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_fetch = git +pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_commit = master + +PACKAGES += riak_kv +pkg_riak_kv_name = riak_kv +pkg_riak_kv_description = Riak Key/Value Store +pkg_riak_kv_homepage = https://github.com/basho/riak_kv +pkg_riak_kv_fetch = git +pkg_riak_kv_repo = https://github.com/basho/riak_kv +pkg_riak_kv_commit = master + +PACKAGES += riak_pg +pkg_riak_pg_name = riak_pg +pkg_riak_pg_description = Distributed process groups with riak_core. +pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_fetch = git +pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_commit = master + +PACKAGES += riak_pipe +pkg_riak_pipe_name = riak_pipe +pkg_riak_pipe_description = Riak Pipelines +pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe +pkg_riak_pipe_fetch = git +pkg_riak_pipe_repo = https://github.com/basho/riak_pipe +pkg_riak_pipe_commit = master + +PACKAGES += riak_sysmon +pkg_riak_sysmon_name = riak_sysmon +pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages +pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_fetch = git +pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_commit = master + +PACKAGES += riak_test +pkg_riak_test_name = riak_test +pkg_riak_test_description = I'm in your cluster, testing your riaks +pkg_riak_test_homepage = https://github.com/basho/riak_test +pkg_riak_test_fetch = git +pkg_riak_test_repo = https://github.com/basho/riak_test +pkg_riak_test_commit = master + +PACKAGES += riakc +pkg_riakc_name = riakc +pkg_riakc_description = Erlang clients for Riak. +pkg_riakc_homepage = https://github.com/basho/riak-erlang-client +pkg_riakc_fetch = git +pkg_riakc_repo = https://github.com/basho/riak-erlang-client +pkg_riakc_commit = master + +PACKAGES += riakhttpc +pkg_riakhttpc_name = riakhttpc +pkg_riakhttpc_description = Riak Erlang client using the HTTP interface +pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_fetch = git +pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_commit = master + +PACKAGES += riaknostic +pkg_riaknostic_name = riaknostic +pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap +pkg_riaknostic_homepage = https://github.com/basho/riaknostic +pkg_riaknostic_fetch = git +pkg_riaknostic_repo = https://github.com/basho/riaknostic +pkg_riaknostic_commit = master + +PACKAGES += riakpool +pkg_riakpool_name = riakpool +pkg_riakpool_description = erlang riak client pool +pkg_riakpool_homepage = https://github.com/dweldon/riakpool +pkg_riakpool_fetch = git +pkg_riakpool_repo = https://github.com/dweldon/riakpool +pkg_riakpool_commit = master + +PACKAGES += rivus_cep +pkg_rivus_cep_name = rivus_cep +pkg_rivus_cep_description = Complex event processing in Erlang +pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_fetch = git +pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_commit = master + +PACKAGES += rlimit +pkg_rlimit_name = rlimit +pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent +pkg_rlimit_homepage = https://github.com/jlouis/rlimit +pkg_rlimit_fetch = git +pkg_rlimit_repo = https://github.com/jlouis/rlimit +pkg_rlimit_commit = master + +PACKAGES += safetyvalve +pkg_safetyvalve_name = safetyvalve +pkg_safetyvalve_description = A safety valve for your erlang node +pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_fetch = git +pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_commit = master + +PACKAGES += seestar +pkg_seestar_name = seestar +pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol +pkg_seestar_homepage = https://github.com/iamaleksey/seestar +pkg_seestar_fetch = git +pkg_seestar_repo = https://github.com/iamaleksey/seestar +pkg_seestar_commit = master + +PACKAGES += service +pkg_service_name = service +pkg_service_description = A minimal Erlang behavior for creating CloudI internal services +pkg_service_homepage = http://cloudi.org/ +pkg_service_fetch = git +pkg_service_repo = https://github.com/CloudI/service +pkg_service_commit = master + +PACKAGES += setup +pkg_setup_name = setup +pkg_setup_description = Generic setup utility for Erlang-based systems +pkg_setup_homepage = https://github.com/uwiger/setup +pkg_setup_fetch = git +pkg_setup_repo = https://github.com/uwiger/setup +pkg_setup_commit = master + +PACKAGES += sext +pkg_sext_name = sext +pkg_sext_description = Sortable Erlang Term Serialization +pkg_sext_homepage = https://github.com/uwiger/sext +pkg_sext_fetch = git +pkg_sext_repo = https://github.com/uwiger/sext +pkg_sext_commit = master + +PACKAGES += sfmt +pkg_sfmt_name = sfmt +pkg_sfmt_description = SFMT pseudo random number generator for Erlang. +pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_fetch = git +pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_commit = master + +PACKAGES += sgte +pkg_sgte_name = sgte +pkg_sgte_description = A simple Erlang Template Engine +pkg_sgte_homepage = https://github.com/filippo/sgte +pkg_sgte_fetch = git +pkg_sgte_repo = https://github.com/filippo/sgte +pkg_sgte_commit = master + +PACKAGES += sheriff +pkg_sheriff_name = sheriff +pkg_sheriff_description = Parse transform for type based validation. +pkg_sheriff_homepage = http://ninenines.eu +pkg_sheriff_fetch = git +pkg_sheriff_repo = https://github.com/extend/sheriff +pkg_sheriff_commit = master + +PACKAGES += shotgun +pkg_shotgun_name = shotgun +pkg_shotgun_description = better than just a gun +pkg_shotgun_homepage = https://github.com/inaka/shotgun +pkg_shotgun_fetch = git +pkg_shotgun_repo = https://github.com/inaka/shotgun +pkg_shotgun_commit = 0.1.0 + +PACKAGES += sidejob +pkg_sidejob_name = sidejob +pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang +pkg_sidejob_homepage = https://github.com/basho/sidejob +pkg_sidejob_fetch = git +pkg_sidejob_repo = https://github.com/basho/sidejob +pkg_sidejob_commit = master + +PACKAGES += sieve +pkg_sieve_name = sieve +pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang +pkg_sieve_homepage = https://github.com/benoitc/sieve +pkg_sieve_fetch = git +pkg_sieve_repo = https://github.com/benoitc/sieve +pkg_sieve_commit = master + +PACKAGES += sighandler +pkg_sighandler_name = sighandler +pkg_sighandler_description = Handle UNIX signals in Er lang +pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler +pkg_sighandler_fetch = git +pkg_sighandler_repo = https://github.com/jkingsbery/sighandler +pkg_sighandler_commit = master + +PACKAGES += simhash +pkg_simhash_name = simhash +pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data. +pkg_simhash_homepage = https://github.com/ferd/simhash +pkg_simhash_fetch = git +pkg_simhash_repo = https://github.com/ferd/simhash +pkg_simhash_commit = master + +PACKAGES += simple_bridge +pkg_simple_bridge_name = simple_bridge +pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers. +pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_fetch = git +pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_commit = master + +PACKAGES += simple_oauth2 +pkg_simple_oauth2_name = simple_oauth2 +pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured) +pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_fetch = git +pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_commit = master + +PACKAGES += skel +pkg_skel_name = skel +pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang +pkg_skel_homepage = https://github.com/ParaPhrase/skel +pkg_skel_fetch = git +pkg_skel_repo = https://github.com/ParaPhrase/skel +pkg_skel_commit = master + +PACKAGES += smother +pkg_smother_name = smother +pkg_smother_description = Extended code coverage metrics for Erlang. +pkg_smother_homepage = https://ramsay-t.github.io/Smother/ +pkg_smother_fetch = git +pkg_smother_repo = https://github.com/ramsay-t/Smother +pkg_smother_commit = master + +PACKAGES += social +pkg_social_name = social +pkg_social_description = Cowboy handler for social login via OAuth2 providers +pkg_social_homepage = https://github.com/dvv/social +pkg_social_fetch = git +pkg_social_repo = https://github.com/dvv/social +pkg_social_commit = master + +PACKAGES += spapi_router +pkg_spapi_router_name = spapi_router +pkg_spapi_router_description = Partially-connected Erlang clustering +pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router +pkg_spapi_router_fetch = git +pkg_spapi_router_repo = https://github.com/spilgames/spapi-router +pkg_spapi_router_commit = master + +PACKAGES += sqerl +pkg_sqerl_name = sqerl +pkg_sqerl_description = An Erlang-flavoured SQL DSL +pkg_sqerl_homepage = https://github.com/hairyhum/sqerl +pkg_sqerl_fetch = git +pkg_sqerl_repo = https://github.com/hairyhum/sqerl +pkg_sqerl_commit = master + +PACKAGES += srly +pkg_srly_name = srly +pkg_srly_description = Native Erlang Unix serial interface +pkg_srly_homepage = https://github.com/msantos/srly +pkg_srly_fetch = git +pkg_srly_repo = https://github.com/msantos/srly +pkg_srly_commit = master + +PACKAGES += sshrpc +pkg_sshrpc_name = sshrpc +pkg_sshrpc_description = Erlang SSH RPC module (experimental) +pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_fetch = git +pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_commit = master + +PACKAGES += stable +pkg_stable_name = stable +pkg_stable_description = Library of assorted helpers for Cowboy web server. +pkg_stable_homepage = https://github.com/dvv/stable +pkg_stable_fetch = git +pkg_stable_repo = https://github.com/dvv/stable +pkg_stable_commit = master + +PACKAGES += statebox +pkg_statebox_name = statebox +pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak. +pkg_statebox_homepage = https://github.com/mochi/statebox +pkg_statebox_fetch = git +pkg_statebox_repo = https://github.com/mochi/statebox +pkg_statebox_commit = master + +PACKAGES += statebox_riak +pkg_statebox_riak_name = statebox_riak +pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media. +pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak +pkg_statebox_riak_fetch = git +pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak +pkg_statebox_riak_commit = master + +PACKAGES += statman +pkg_statman_name = statman +pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM +pkg_statman_homepage = https://github.com/knutin/statman +pkg_statman_fetch = git +pkg_statman_repo = https://github.com/knutin/statman +pkg_statman_commit = master + +PACKAGES += statsderl +pkg_statsderl_name = statsderl +pkg_statsderl_description = StatsD client (erlang) +pkg_statsderl_homepage = https://github.com/lpgauth/statsderl +pkg_statsderl_fetch = git +pkg_statsderl_repo = https://github.com/lpgauth/statsderl +pkg_statsderl_commit = master + +PACKAGES += stdinout_pool +pkg_stdinout_pool_name = stdinout_pool +pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication. +pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_fetch = git +pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_commit = master + +PACKAGES += stockdb +pkg_stockdb_name = stockdb +pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang +pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb +pkg_stockdb_fetch = git +pkg_stockdb_repo = https://github.com/maxlapshin/stockdb +pkg_stockdb_commit = master + +PACKAGES += stripe +pkg_stripe_name = stripe +pkg_stripe_description = Erlang interface to the stripe.com API +pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang +pkg_stripe_fetch = git +pkg_stripe_repo = https://github.com/mattsta/stripe-erlang +pkg_stripe_commit = v1 + +PACKAGES += surrogate +pkg_surrogate_name = surrogate +pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes. +pkg_surrogate_homepage = https://github.com/skruger/Surrogate +pkg_surrogate_fetch = git +pkg_surrogate_repo = https://github.com/skruger/Surrogate +pkg_surrogate_commit = master + +PACKAGES += swab +pkg_swab_name = swab +pkg_swab_description = General purpose buffer handling module +pkg_swab_homepage = https://github.com/crownedgrouse/swab +pkg_swab_fetch = git +pkg_swab_repo = https://github.com/crownedgrouse/swab +pkg_swab_commit = master + +PACKAGES += swarm +pkg_swarm_name = swarm +pkg_swarm_description = Fast and simple acceptor pool for Erlang +pkg_swarm_homepage = https://github.com/jeremey/swarm +pkg_swarm_fetch = git +pkg_swarm_repo = https://github.com/jeremey/swarm +pkg_swarm_commit = master + +PACKAGES += switchboard +pkg_switchboard_name = switchboard +pkg_switchboard_description = A framework for processing email using worker plugins. +pkg_switchboard_homepage = https://github.com/thusfresh/switchboard +pkg_switchboard_fetch = git +pkg_switchboard_repo = https://github.com/thusfresh/switchboard +pkg_switchboard_commit = master + +PACKAGES += syn +pkg_syn_name = syn +pkg_syn_description = A global process registry for Erlang. +pkg_syn_homepage = https://github.com/ostinelli/syn +pkg_syn_fetch = git +pkg_syn_repo = https://github.com/ostinelli/syn +pkg_syn_commit = master + +PACKAGES += sync +pkg_sync_name = sync +pkg_sync_description = On-the-fly recompiling and reloading in Erlang. +pkg_sync_homepage = https://github.com/rustyio/sync +pkg_sync_fetch = git +pkg_sync_repo = https://github.com/rustyio/sync +pkg_sync_commit = master + +PACKAGES += syntaxerl +pkg_syntaxerl_name = syntaxerl +pkg_syntaxerl_description = Syntax checker for Erlang +pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_fetch = git +pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_commit = master + +PACKAGES += syslog +pkg_syslog_name = syslog +pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3) +pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog +pkg_syslog_fetch = git +pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog +pkg_syslog_commit = master + +PACKAGES += taskforce +pkg_taskforce_name = taskforce +pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks. +pkg_taskforce_homepage = https://github.com/g-andrade/taskforce +pkg_taskforce_fetch = git +pkg_taskforce_repo = https://github.com/g-andrade/taskforce +pkg_taskforce_commit = master + +PACKAGES += tddreloader +pkg_tddreloader_name = tddreloader +pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes +pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader +pkg_tddreloader_fetch = git +pkg_tddreloader_repo = https://github.com/version2beta/tddreloader +pkg_tddreloader_commit = master + +PACKAGES += tempo +pkg_tempo_name = tempo +pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang. +pkg_tempo_homepage = https://github.com/selectel/tempo +pkg_tempo_fetch = git +pkg_tempo_repo = https://github.com/selectel/tempo +pkg_tempo_commit = master + +PACKAGES += ticktick +pkg_ticktick_name = ticktick +pkg_ticktick_description = Ticktick is an id generator for message service. +pkg_ticktick_homepage = https://github.com/ericliang/ticktick +pkg_ticktick_fetch = git +pkg_ticktick_repo = https://github.com/ericliang/ticktick +pkg_ticktick_commit = master + +PACKAGES += tinymq +pkg_tinymq_name = tinymq +pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue +pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_fetch = git +pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_commit = master + +PACKAGES += tinymt +pkg_tinymt_name = tinymt +pkg_tinymt_description = TinyMT pseudo random number generator for Erlang. +pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_fetch = git +pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_commit = master + +PACKAGES += tirerl +pkg_tirerl_name = tirerl +pkg_tirerl_description = Erlang interface to Elastic Search +pkg_tirerl_homepage = https://github.com/inaka/tirerl +pkg_tirerl_fetch = git +pkg_tirerl_repo = https://github.com/inaka/tirerl +pkg_tirerl_commit = master + +PACKAGES += traffic_tools +pkg_traffic_tools_name = traffic_tools +pkg_traffic_tools_description = Simple traffic limiting library +pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools +pkg_traffic_tools_fetch = git +pkg_traffic_tools_repo = https://github.com/systra/traffic_tools +pkg_traffic_tools_commit = master + +PACKAGES += trails +pkg_trails_name = trails +pkg_trails_description = A couple of improvements over Cowboy Routes +pkg_trails_homepage = http://inaka.github.io/cowboy-trails/ +pkg_trails_fetch = git +pkg_trails_repo = https://github.com/inaka/cowboy-trails +pkg_trails_commit = master + +PACKAGES += trane +pkg_trane_name = trane +pkg_trane_description = SAX style broken HTML parser in Erlang +pkg_trane_homepage = https://github.com/massemanet/trane +pkg_trane_fetch = git +pkg_trane_repo = https://github.com/massemanet/trane +pkg_trane_commit = master + +PACKAGES += transit +pkg_transit_name = transit +pkg_transit_description = transit format for erlang +pkg_transit_homepage = https://github.com/isaiah/transit-erlang +pkg_transit_fetch = git +pkg_transit_repo = https://github.com/isaiah/transit-erlang +pkg_transit_commit = master + +PACKAGES += trie +pkg_trie_name = trie +pkg_trie_description = Erlang Trie Implementation +pkg_trie_homepage = https://github.com/okeuday/trie +pkg_trie_fetch = git +pkg_trie_repo = https://github.com/okeuday/trie +pkg_trie_commit = master + +PACKAGES += triq +pkg_triq_name = triq +pkg_triq_description = Trifork QuickCheck +pkg_triq_homepage = https://github.com/krestenkrab/triq +pkg_triq_fetch = git +pkg_triq_repo = https://github.com/krestenkrab/triq +pkg_triq_commit = master + +PACKAGES += tunctl +pkg_tunctl_name = tunctl +pkg_tunctl_description = Erlang TUN/TAP interface +pkg_tunctl_homepage = https://github.com/msantos/tunctl +pkg_tunctl_fetch = git +pkg_tunctl_repo = https://github.com/msantos/tunctl +pkg_tunctl_commit = master + +PACKAGES += twerl +pkg_twerl_name = twerl +pkg_twerl_description = Erlang client for the Twitter Streaming API +pkg_twerl_homepage = https://github.com/lucaspiller/twerl +pkg_twerl_fetch = git +pkg_twerl_repo = https://github.com/lucaspiller/twerl +pkg_twerl_commit = oauth + +PACKAGES += twitter_erlang +pkg_twitter_erlang_name = twitter_erlang +pkg_twitter_erlang_description = An Erlang twitter client +pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_fetch = git +pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_commit = master + +PACKAGES += ucol_nif +pkg_ucol_nif_name = ucol_nif +pkg_ucol_nif_description = ICU based collation Erlang module +pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif +pkg_ucol_nif_fetch = git +pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif +pkg_ucol_nif_commit = master + +PACKAGES += unicorn +pkg_unicorn_name = unicorn +pkg_unicorn_description = Generic configuration server +pkg_unicorn_homepage = https://github.com/shizzard/unicorn +pkg_unicorn_fetch = git +pkg_unicorn_repo = https://github.com/shizzard/unicorn +pkg_unicorn_commit = 0.3.0 + +PACKAGES += unsplit +pkg_unsplit_name = unsplit +pkg_unsplit_description = Resolves conflicts in Mnesia after network splits +pkg_unsplit_homepage = https://github.com/uwiger/unsplit +pkg_unsplit_fetch = git +pkg_unsplit_repo = https://github.com/uwiger/unsplit +pkg_unsplit_commit = master + +PACKAGES += uuid +pkg_uuid_name = uuid +pkg_uuid_description = Erlang UUID Implementation +pkg_uuid_homepage = https://github.com/okeuday/uuid +pkg_uuid_fetch = git +pkg_uuid_repo = https://github.com/okeuday/uuid +pkg_uuid_commit = v1.4.0 + +PACKAGES += ux +pkg_ux_name = ux +pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation) +pkg_ux_homepage = https://github.com/erlang-unicode/ux +pkg_ux_fetch = git +pkg_ux_repo = https://github.com/erlang-unicode/ux +pkg_ux_commit = master + +PACKAGES += vert +pkg_vert_name = vert +pkg_vert_description = Erlang binding to libvirt virtualization API +pkg_vert_homepage = https://github.com/msantos/erlang-libvirt +pkg_vert_fetch = git +pkg_vert_repo = https://github.com/msantos/erlang-libvirt +pkg_vert_commit = master + +PACKAGES += verx +pkg_verx_name = verx +pkg_verx_description = Erlang implementation of the libvirtd remote protocol +pkg_verx_homepage = https://github.com/msantos/verx +pkg_verx_fetch = git +pkg_verx_repo = https://github.com/msantos/verx +pkg_verx_commit = master + +PACKAGES += vmq_acl +pkg_vmq_acl_name = vmq_acl +pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_acl_homepage = https://verne.mq/ +pkg_vmq_acl_fetch = git +pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl +pkg_vmq_acl_commit = master + +PACKAGES += vmq_bridge +pkg_vmq_bridge_name = vmq_bridge +pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_bridge_homepage = https://verne.mq/ +pkg_vmq_bridge_fetch = git +pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge +pkg_vmq_bridge_commit = master + +PACKAGES += vmq_graphite +pkg_vmq_graphite_name = vmq_graphite +pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_graphite_homepage = https://verne.mq/ +pkg_vmq_graphite_fetch = git +pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite +pkg_vmq_graphite_commit = master + +PACKAGES += vmq_passwd +pkg_vmq_passwd_name = vmq_passwd +pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_passwd_homepage = https://verne.mq/ +pkg_vmq_passwd_fetch = git +pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd +pkg_vmq_passwd_commit = master + +PACKAGES += vmq_server +pkg_vmq_server_name = vmq_server +pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_server_homepage = https://verne.mq/ +pkg_vmq_server_fetch = git +pkg_vmq_server_repo = https://github.com/erlio/vmq_server +pkg_vmq_server_commit = master + +PACKAGES += vmq_snmp +pkg_vmq_snmp_name = vmq_snmp +pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_snmp_homepage = https://verne.mq/ +pkg_vmq_snmp_fetch = git +pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp +pkg_vmq_snmp_commit = master + +PACKAGES += vmq_systree +pkg_vmq_systree_name = vmq_systree +pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_systree_homepage = https://verne.mq/ +pkg_vmq_systree_fetch = git +pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree +pkg_vmq_systree_commit = master + +PACKAGES += vmstats +pkg_vmstats_name = vmstats +pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs. +pkg_vmstats_homepage = https://github.com/ferd/vmstats +pkg_vmstats_fetch = git +pkg_vmstats_repo = https://github.com/ferd/vmstats +pkg_vmstats_commit = master + +PACKAGES += walrus +pkg_walrus_name = walrus +pkg_walrus_description = Walrus - Mustache-like Templating +pkg_walrus_homepage = https://github.com/devinus/walrus +pkg_walrus_fetch = git +pkg_walrus_repo = https://github.com/devinus/walrus +pkg_walrus_commit = master + +PACKAGES += webmachine +pkg_webmachine_name = webmachine +pkg_webmachine_description = A REST-based system for building web applications. +pkg_webmachine_homepage = https://github.com/basho/webmachine +pkg_webmachine_fetch = git +pkg_webmachine_repo = https://github.com/basho/webmachine +pkg_webmachine_commit = master + +PACKAGES += websocket_client +pkg_websocket_client_name = websocket_client +pkg_websocket_client_description = Erlang websocket client (ws and wss supported) +pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client +pkg_websocket_client_fetch = git +pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client +pkg_websocket_client_commit = master + +PACKAGES += worker_pool +pkg_worker_pool_name = worker_pool +pkg_worker_pool_description = a simple erlang worker pool +pkg_worker_pool_homepage = https://github.com/inaka/worker_pool +pkg_worker_pool_fetch = git +pkg_worker_pool_repo = https://github.com/inaka/worker_pool +pkg_worker_pool_commit = 1.0.3 + +PACKAGES += wrangler +pkg_wrangler_name = wrangler +pkg_wrangler_description = Import of the Wrangler svn repository. +pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html +pkg_wrangler_fetch = git +pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler +pkg_wrangler_commit = master + +PACKAGES += wsock +pkg_wsock_name = wsock +pkg_wsock_description = Erlang library to build WebSocket clients and servers +pkg_wsock_homepage = https://github.com/madtrick/wsock +pkg_wsock_fetch = git +pkg_wsock_repo = https://github.com/madtrick/wsock +pkg_wsock_commit = master + +PACKAGES += xhttpc +pkg_xhttpc_name = xhttpc +pkg_xhttpc_description = Extensible HTTP Client for Erlang +pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc +pkg_xhttpc_fetch = git +pkg_xhttpc_repo = https://github.com/seriyps/xhttpc +pkg_xhttpc_commit = master + +PACKAGES += xref_runner +pkg_xref_runner_name = xref_runner +pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref) +pkg_xref_runner_homepage = https://github.com/inaka/xref_runner +pkg_xref_runner_fetch = git +pkg_xref_runner_repo = https://github.com/inaka/xref_runner +pkg_xref_runner_commit = 0.2.0 + +PACKAGES += yamerl +pkg_yamerl_name = yamerl +pkg_yamerl_description = YAML 1.2 parser in pure Erlang +pkg_yamerl_homepage = https://github.com/yakaz/yamerl +pkg_yamerl_fetch = git +pkg_yamerl_repo = https://github.com/yakaz/yamerl +pkg_yamerl_commit = master + +PACKAGES += yamler +pkg_yamler_name = yamler +pkg_yamler_description = libyaml-based yaml loader for Erlang +pkg_yamler_homepage = https://github.com/goertzenator/yamler +pkg_yamler_fetch = git +pkg_yamler_repo = https://github.com/goertzenator/yamler +pkg_yamler_commit = master + +PACKAGES += yaws +pkg_yaws_name = yaws +pkg_yaws_description = Yaws webserver +pkg_yaws_homepage = http://yaws.hyber.org +pkg_yaws_fetch = git +pkg_yaws_repo = https://github.com/klacke/yaws +pkg_yaws_commit = master + +PACKAGES += zab_engine +pkg_zab_engine_name = zab_engine +pkg_zab_engine_description = zab propotocol implement by erlang +pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_fetch = git +pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_commit = master + +PACKAGES += zeta +pkg_zeta_name = zeta +pkg_zeta_description = HTTP access log parser in Erlang +pkg_zeta_homepage = https://github.com/s1n4/zeta +pkg_zeta_fetch = git +pkg_zeta_repo = https://github.com/s1n4/zeta +pkg_zeta_commit = + +PACKAGES += zippers +pkg_zippers_name = zippers +pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers +pkg_zippers_homepage = https://github.com/ferd/zippers +pkg_zippers_fetch = git +pkg_zippers_repo = https://github.com/ferd/zippers +pkg_zippers_commit = master + +PACKAGES += zlists +pkg_zlists_name = zlists +pkg_zlists_description = Erlang lazy lists library. +pkg_zlists_homepage = https://github.com/vjache/erlang-zlists +pkg_zlists_fetch = git +pkg_zlists_repo = https://github.com/vjache/erlang-zlists +pkg_zlists_commit = master + +PACKAGES += zraft_lib +pkg_zraft_lib_name = zraft_lib +pkg_zraft_lib_description = Erlang raft consensus protocol implementation +pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_fetch = git +pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_commit = master + +PACKAGES += zucchini +pkg_zucchini_name = zucchini +pkg_zucchini_description = An Erlang INI parser +pkg_zucchini_homepage = https://github.com/devinus/zucchini +pkg_zucchini_fetch = git +pkg_zucchini_repo = https://github.com/devinus/zucchini +pkg_zucchini_commit = master + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: search + +define pkg_print + $(verbose) printf "%s\n" \ + $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \ + "App name: $(pkg_$(1)_name)" \ + "Description: $(pkg_$(1)_description)" \ + "Home page: $(pkg_$(1)_homepage)" \ + "Fetch with: $(pkg_$(1)_fetch)" \ + "Repository: $(pkg_$(1)_repo)" \ + "Commit: $(pkg_$(1)_commit)" \ + "" + +endef + +search: +ifdef q + $(foreach p,$(PACKAGES), \ + $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \ + $(call pkg_print,$(p)))) +else + $(foreach p,$(PACKAGES),$(call pkg_print,$(p))) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-deps + +# Configuration. + +ifdef OTP_DEPS +$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.) +endif + +IGNORE_DEPS ?= +export IGNORE_DEPS + +APPS_DIR ?= $(CURDIR)/apps +export APPS_DIR + +DEPS_DIR ?= $(CURDIR)/deps +export DEPS_DIR + +REBAR_DEPS_DIR = $(DEPS_DIR) +export REBAR_DEPS_DIR + +dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) +dep_repo = $(patsubst git://github.com/%,https://github.com/%, \ + $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))) +dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) + +ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) +ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) + +ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),) +ifeq ($(ERL_LIBS),) + ERL_LIBS = $(APPS_DIR):$(DEPS_DIR) +else + ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR) +endif +endif +export ERL_LIBS + +export NO_AUTOPATCH + +# Verbosity. + +dep_verbose_0 = @echo " DEP " $(1); +dep_verbose_2 = set -x; +dep_verbose = $(dep_verbose_$(V)) + +# Core targets. + +ifneq ($(SKIP_DEPS),) +deps:: +else +deps:: $(ALL_DEPS_DIRS) +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \ + if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \ + :; \ + else \ + echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ + if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ + $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \ + else \ + echo "Error: No Makefile to build dependency $$dep."; \ + exit 2; \ + fi \ + fi \ + done +endif + +# Deps related targets. + +# @todo rename GNUmakefile and makefile into Makefile first, if they exist +# While Makefile file could be GNUmakefile or makefile, +# in practice only Makefile is needed so far. +define dep_autopatch + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \ + $(call dep_autopatch2,$(1)); \ + else \ + if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + $(call dep_autopatch_erlang_mk,$(1)); \ + else \ + $(call erlang,$(call dep_autopatch_app.erl,$(1))); \ + fi \ + fi \ + else \ + if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ + $(call dep_autopatch_noop,$(1)); \ + else \ + $(call dep_autopatch2,$(1)); \ + fi \ + fi +endef + +define dep_autopatch2 + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + if [ -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \ + $(call dep_autopatch_fetch_rebar); \ + $(call dep_autopatch_rebar,$(1)); \ + else \ + $(call dep_autopatch_gen,$(1)); \ + fi +endef + +define dep_autopatch_noop + printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile +endef + +# Overwrite erlang.mk with the current file by default. +ifeq ($(NO_AUTOPATCH_ERLANG_MK),) +define dep_autopatch_erlang_mk + echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \ + > $(DEPS_DIR)/$1/erlang.mk +endef +else +define dep_autopatch_erlang_mk + : +endef +endif + +define dep_autopatch_gen + printf "%s\n" \ + "ERLC_OPTS = +debug_info" \ + "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile +endef + +define dep_autopatch_fetch_rebar + mkdir -p $(ERLANG_MK_TMP); \ + if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ + git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \ + cd $(ERLANG_MK_TMP)/rebar; \ + git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \ + $(MAKE); \ + cd -; \ + fi +endef + +define dep_autopatch_rebar + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \ + fi; \ + $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \ + rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app +endef + +define dep_autopatch_rebar.erl + application:load(rebar), + application:set_env(rebar, log_level, debug), + Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of + {ok, Conf0} -> Conf0; + _ -> [] + end, + {Conf, OsEnv} = fun() -> + case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of + false -> {Conf1, []}; + true -> + Bindings0 = erl_eval:new_bindings(), + Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0), + Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1), + Before = os:getenv(), + {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings), + {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)} + end + end(), + Write = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append]) + end, + Escape = fun (Text) -> + re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}]) + end, + Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package " + "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"), + Write("C_SRC_DIR = /path/do/not/exist\n"), + Write("C_SRC_TYPE = rebar\n"), + Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"), + Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]), + fun() -> + Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), + case lists:keyfind(erl_opts, 1, Conf) of + false -> ok; + {_, ErlOpts} -> + lists:foreach(fun + ({d, D}) -> + Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + ({i, I}) -> + Write(["ERLC_OPTS += -I ", I, "\n"]); + ({platform_define, Regex, D}) -> + case rebar_utils:is_arch(Regex) of + true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + false -> ok + end; + ({parse_transform, PT}) -> + Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n"); + (_) -> ok + end, ErlOpts) + end, + Write("\n") + end(), + fun() -> + File = case lists:keyfind(deps, 1, Conf) of + false -> []; + {_, Deps} -> + [begin case case Dep of + {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}}; + {N, S} when is_tuple(S) -> {N, S}; + {N, _, S} -> {N, S}; + {N, _, S, _} -> {N, S}; + _ -> false + end of + false -> ok; + {Name, Source} -> + {Method, Repo, Commit} = case Source of + {hex, V} -> {hex, V, undefined}; + {git, R} -> {git, R, master}; + {M, R, {branch, C}} -> {M, R, C}; + {M, R, {ref, C}} -> {M, R, C}; + {M, R, {tag, C}} -> {M, R, C}; + {M, R, C} -> {M, R, C} + end, + Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit])) + end end || Dep <- Deps] + end + end(), + fun() -> + case lists:keyfind(erl_first_files, 1, Conf) of + false -> ok; + {_, Files} -> + Names = [[" ", case lists:reverse(F) of + "lre." ++ Elif -> lists:reverse(Elif); + Elif -> lists:reverse(Elif) + end] || "src/" ++ F <- Files], + Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) + end + end(), + FindFirst = fun(F, Fd) -> + case io:parse_erl_form(Fd, undefined) of + {ok, {attribute, _, compile, {parse_transform, PT}}, _} -> + [PT, F(F, Fd)]; + {ok, {attribute, _, compile, CompileOpts}, _} when is_list(CompileOpts) -> + case proplists:get_value(parse_transform, CompileOpts) of + undefined -> [F(F, Fd)]; + PT -> [PT, F(F, Fd)] + end; + {ok, {attribute, _, include, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end + end; + {ok, {attribute, _, include_lib, "$(1)/include/" ++ Hrl}, _} -> + {ok, HrlFd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]), + [F(F, HrlFd), F(F, Fd)]; + {ok, {attribute, _, include_lib, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {ok, {attribute, _, import, {Imp, _}}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(Imp) ++ ".erl", [read]) of + {ok, ImpFd} -> [Imp, F(F, ImpFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {eof, _} -> + file:close(Fd), + []; + _ -> + F(F, Fd) + end + end, + fun() -> + ErlFiles = filelib:wildcard("$(call core_native_path,$(DEPS_DIR)/$1/src/)*.erl"), + First0 = lists:usort(lists:flatten([begin + {ok, Fd} = file:open(F, [read]), + FindFirst(FindFirst, Fd) + end || F <- ErlFiles])), + First = lists:flatten([begin + {ok, Fd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", [read]), + FindFirst(FindFirst, Fd) + end || M <- First0, lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)]) ++ First0, + Write(["COMPILE_FIRST +=", [[" ", atom_to_list(M)] || M <- First, + lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)], "\n"]) + end(), + Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"), + Write("\npreprocess::\n"), + Write("\npre-deps::\n"), + Write("\npre-app::\n"), + PatchHook = fun(Cmd) -> + case Cmd of + "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + _ -> Escape(Cmd) + end + end, + fun() -> + case lists:keyfind(pre_hooks, 1, Conf) of + false -> ok; + {_, Hooks} -> + [case H of + {'get-deps', Cmd} -> + Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n"); + {compile, Cmd} -> + Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + {Regex, compile, Cmd} -> + case rebar_utils:is_arch(Regex) of + true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + false -> ok + end; + _ -> ok + end || H <- Hooks] + end + end(), + ShellToMk = fun(V) -> + re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]), + "-Werror\\\\b", "", [{return, list}, global]) + end, + PortSpecs = fun() -> + case lists:keyfind(port_specs, 1, Conf) of + false -> + case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of + false -> []; + true -> + [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"), + proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}] + end; + {_, Specs} -> + lists:flatten([case S of + {Output, Input} -> {ShellToMk(Output), Input, []}; + {Regex, Output, Input} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, []}; + false -> [] + end; + {Regex, Output, Input, [{env, Env}]} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, Env}; + false -> [] + end + end || S <- Specs]) + end + end(), + PortSpecWrite = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append]) + end, + case PortSpecs of + [] -> ok; + _ -> + Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"), + PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I ~s/erts-~s/include -I ~s\n", + [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])), + PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L ~s -lerl_interface -lei\n", + [code:lib_dir(erl_interface, lib)])), + [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv], + FilterEnv = fun(Env) -> + lists:flatten([case E of + {_, _} -> E; + {Regex, K, V} -> + case rebar_utils:is_arch(Regex) of + true -> {K, V}; + false -> [] + end + end || E <- Env]) + end, + MergeEnv = fun(Env) -> + lists:foldl(fun ({K, V}, Acc) -> + case lists:keyfind(K, 1, Acc) of + false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc]; + {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc] + end + end, [], Env) + end, + PortEnv = case lists:keyfind(port_env, 1, Conf) of + false -> []; + {_, PortEnv0} -> FilterEnv(PortEnv0) + end, + PortSpec = fun ({Output, Input0, Env}) -> + filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output), + Input = [[" ", I] || I <- Input0], + PortSpecWrite([ + [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))], + case $(PLATFORM) of + darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress"; + _ -> "" + end, + "\n\nall:: ", Output, "\n\n", + "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))], + Output, ": $$\(foreach ext,.c .C .cc .cpp,", + "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n", + "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)", + case filename:extension(Output) of + [] -> "\n"; + _ -> " -shared\n" + end]) + end, + [PortSpec(S) || S <- PortSpecs] + end, + Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"), + RunPlugin = fun(Plugin, Step) -> + case erlang:function_exported(Plugin, Step, 2) of + false -> ok; + true -> + c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"), + Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(), + dict:store(base_dir, "", dict:new())}, undefined), + io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret]) + end + end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins} -> + [begin + case lists:keyfind(deps, 1, Conf) of + false -> ok; + {_, Deps} -> + case lists:keyfind(P, 1, Deps) of + false -> ok; + _ -> + Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P), + io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]), + io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]), + code:add_patha(Path ++ "/ebin") + end + end + end || P <- Plugins], + [case code:load_file(P) of + {module, P} -> ok; + _ -> + case lists:keyfind(plugin_dir, 1, Conf) of + false -> ok; + {_, PluginsDir} -> + ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl", + {ok, P, Bin} = compile:file(ErlFile, [binary]), + {module, P} = code:load_binary(P, ErlFile, Bin) + end + end || P <- Plugins], + [RunPlugin(P, preprocess) || P <- Plugins], + [RunPlugin(P, pre_compile) || P <- Plugins], + [RunPlugin(P, compile) || P <- Plugins] + end + end(), + halt() +endef + +define dep_autopatch_app.erl + UpdateModules = fun(App) -> + case filelib:is_regular(App) of + false -> ok; + true -> + {ok, [{application, '$(1)', L0}]} = file:consult(App), + Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true, + fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []), + L = lists:keystore(modules, 1, L0, {modules, Mods}), + ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}])) + end + end, + UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"), + halt() +endef + +define dep_autopatch_appsrc.erl + AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", + AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end, + case filelib:is_regular(AppSrcIn) of + false -> ok; + true -> + {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), + L1 = lists:keystore(modules, 1, L0, {modules, []}), + L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end, + L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end, + ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])), + case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end + end, + halt() +endef + +define dep_fetch_git + git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1)); +endef + +define dep_fetch_git-submodule + git submodule update --init -- $(DEPS_DIR)/$1; +endef + +define dep_fetch_hg + hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1)); +endef + +define dep_fetch_svn + svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_cp + cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_hex.erl + ssl:start(), + inets:start(), + {ok, {{_, 200, _}, _, Body}} = httpc:request(get, + {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []}, + [], [{body_format, binary}]), + {ok, Files} = erl_tar:extract({binary, Body}, [memory]), + {_, Source} = lists:keyfind("contents.tar.gz", 1, Files), + ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]), + halt() +endef + +# Hex only has a package version. No need to look in the Erlang.mk packages. +define dep_fetch_hex + $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1)))))); +endef + +define dep_fetch_fail + echo "Error: Unknown or invalid dependency: $(1)." >&2; \ + exit 78; +endef + +# Kept for compatibility purposes with older Erlang.mk configuration. +define dep_fetch_legacy + $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \ + git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \ + cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master); +endef + +define dep_fetch + $(if $(dep_$(1)), \ + $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \ + $(word 1,$(dep_$(1))), \ + $(if $(IS_DEP),legacy,fail)), \ + $(if $(filter $(1),$(PACKAGES)), \ + $(pkg_$(1)_fetch), \ + fail)) +endef + +define dep_target +$(DEPS_DIR)/$(call dep_name,$1): + $(eval DEP_NAME := $(call dep_name,$1)) + $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) + $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ + echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \ + exit 17; \ + fi + $(verbose) mkdir -p $(DEPS_DIR) + $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$1)),$1) + $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure.ac -o -f $(DEPS_DIR)/$(DEP_NAME)/configure.in ]; then \ + echo " AUTO " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && autoreconf -Wall -vif -I m4; \ + fi + - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \ + echo " CONF " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \ + fi +ifeq ($(filter $(1),$(NO_AUTOPATCH)),) + $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi; \ + if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \ + echo " PATCH Downloading rabbitmq-server"; \ + git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \ + fi; \ + ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \ + elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi \ + else \ + $$(call dep_autopatch,$(DEP_NAME)) \ + fi +endif +endef + +$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep)))) + +ifndef IS_APP +clean:: clean-apps + +clean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \ + done + +distclean:: distclean-apps + +distclean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \ + done +endif + +ifndef SKIP_DEPS +distclean:: distclean-deps + +distclean-deps: + $(gen_verbose) rm -rf $(DEPS_DIR) +endif + +# Forward-declare variables used in core/deps-tools.mk. This is required +# in case plugins use them. + +ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/list-deps.log +ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/list-doc-deps.log +ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/list-rel-deps.log +ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/list-test-deps.log +ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/list-shell-deps.log + +# External plugins. + +DEP_PLUGINS ?= + +define core_dep_plugin +-include $(DEPS_DIR)/$(1) + +$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; +endef + +$(foreach p,$(DEP_PLUGINS),\ + $(eval $(if $(findstring /,$p),\ + $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ + $(call core_dep_plugin,$p/plugins.mk,$p)))) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Configuration. + +DTL_FULL_PATH ?= +DTL_PATH ?= templates/ +DTL_SUFFIX ?= _dtl + +# Verbosity. + +dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); +dtl_verbose = $(dtl_verbose_$(V)) + +# Core targets. + +define erlydtl_compile.erl + [begin + Module0 = case "$(strip $(DTL_FULL_PATH))" of + "" -> + filename:basename(F, ".dtl"); + _ -> + "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"), + re:replace(F2, "/", "_", [{return, list}, global]) + end, + Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), + case erlydtl:compile(F, Module, [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of + ok -> ok; + {ok, _} -> ok + end + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) + +DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl)) + +ifdef DTL_FULL_PATH +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%)))) +else +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES)))) +endif + +ifneq ($(words $(DTL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(DTL_FILES); \ + fi + @touch $@ + +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl +endif + +ebin/$(PROJECT).app:: $(DTL_FILES) + $(if $(strip $?),\ + $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?,-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Verbosity. + +proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); +proto_verbose = $(proto_verbose_$(V)) + +# Core targets. + +define compile_proto + $(verbose) mkdir -p ebin/ include/ + $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1))) + $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl + $(verbose) rm ebin/*.erl +endef + +define compile_proto.erl + [begin + Dir = filename:dirname(filename:dirname(F)), + protobuffs_compile:generate_source(F, + [{output_include_dir, Dir ++ "/include"}, + {output_src_dir, Dir ++ "/ebin"}]) + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) +ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto)) + $(if $(strip $?),$(call compile_proto,$?)) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-app + +# Configuration. + +ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \ + +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec +COMPILE_FIRST ?= +COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST))) +ERLC_EXCLUDE ?= +ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE))) + +ERLC_MIB_OPTS ?= +COMPILE_MIB_FIRST ?= +COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST))) + +# Verbosity. + +app_verbose_0 = @echo " APP " $(PROJECT); +app_verbose_2 = set -x; +app_verbose = $(app_verbose_$(V)) + +appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src; +appsrc_verbose_2 = set -x; +appsrc_verbose = $(appsrc_verbose_$(V)) + +makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d; +makedep_verbose_2 = set -x; +makedep_verbose = $(makedep_verbose_$(V)) + +erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\ + $(filter %.erl %.core,$(?F))); +erlc_verbose_2 = set -x; +erlc_verbose = $(erlc_verbose_$(V)) + +xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F)); +xyrl_verbose_2 = set -x; +xyrl_verbose = $(xyrl_verbose_$(V)) + +asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F)); +asn1_verbose_2 = set -x; +asn1_verbose = $(asn1_verbose_$(V)) + +mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F)); +mib_verbose_2 = set -x; +mib_verbose = $(mib_verbose_$(V)) + +ifneq ($(wildcard src/),) + +# Targets. + +ifeq ($(wildcard ebin/test),) +app:: deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +else +app:: clean deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +endif + +ifeq ($(wildcard src/$(PROJECT)_app.erl),) +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, []}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]} +]}. +endef +else +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}, + {mod, {$(PROJECT)_app, []}} +]}. +endef +endif + +app-build: ebin/$(PROJECT).app + $(verbose) : + +# Source files. + +ERL_FILES = $(sort $(call core_find,src/,*.erl)) +CORE_FILES = $(sort $(call core_find,src/,*.core)) + +# ASN.1 files. + +ifneq ($(wildcard asn1/),) +ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1)) +ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +define compile_asn1 + $(verbose) mkdir -p include/ + $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1) + $(verbose) mv asn1/*.erl src/ + $(verbose) mv asn1/*.hrl include/ + $(verbose) mv asn1/*.asn1db include/ +endef + +$(PROJECT).d:: $(ASN1_FILES) + $(if $(strip $?),$(call compile_asn1,$?)) +endif + +# SNMP MIB files. + +ifneq ($(wildcard mibs/),) +MIB_FILES = $(sort $(call core_find,mibs/,*.mib)) + +$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES) + $(verbose) mkdir -p include/ priv/mibs/ + $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $? + $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?))) +endif + +# Leex and Yecc files. + +XRL_FILES = $(sort $(call core_find,src/,*.xrl)) +XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES)))) +ERL_FILES += $(XRL_ERL_FILES) + +YRL_FILES = $(sort $(call core_find,src/,*.yrl)) +YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES)))) +ERL_FILES += $(YRL_ERL_FILES) + +$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES) + $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?) + +# Erlang and Core Erlang files. + +define makedep.erl + ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")), + Modules = [{filename:basename(F, ".erl"), F} || F <- ErlFiles], + Add = fun (Dep, Acc) -> + case lists:keyfind(atom_to_list(Dep), 1, Modules) of + {_, DepFile} -> [DepFile|Acc]; + false -> Acc + end + end, + AddHd = fun (Dep, Acc) -> + case {Dep, lists:keymember(Dep, 2, Modules)} of + {"src/" ++ _, false} -> [Dep|Acc]; + {"include/" ++ _, false} -> [Dep|Acc]; + _ -> Acc + end + end, + CompileFirst = fun (Deps) -> + First0 = [case filename:extension(D) of + ".erl" -> filename:basename(D, ".erl"); + _ -> [] + end || D <- Deps], + case lists:usort(First0) of + [] -> []; + [[]] -> []; + First -> ["COMPILE_FIRST +=", [[" ", F] || F <- First], "\n"] + end + end, + Depend = [begin + case epp:parse_file(F, ["include/"], []) of + {ok, Forms} -> + Deps = lists:usort(lists:foldl(fun + ({attribute, _, behavior, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, behaviour, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, compile, {parse_transform, Dep}}, Acc) -> Add(Dep, Acc); + ({attribute, _, file, {Dep, _}}, Acc) -> AddHd(Dep, Acc); + (_, Acc) -> Acc + end, [], Forms)), + case Deps of + [] -> ""; + _ -> [F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n", CompileFirst(Deps)] + end; + {error, enoent} -> + [] + end + end || F <- ErlFiles], + ok = file:write_file("$(1)", Depend), + halt() +endef + +ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),) +$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST) + $(makedep_verbose) $(call erlang,$(call makedep.erl,$@)) +endif + +ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \ + touch -c $(PROJECT).d; \ + fi + @touch $@ + +$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change +endif + +-include $(PROJECT).d + +ebin/$(PROJECT).app:: ebin/ + +ebin/: + $(verbose) mkdir -p ebin/ + +define compile_erl + $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \ + -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1)) +endef + +ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src) + $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?)) + $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE))) + $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true)) + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES))))))) +ifeq ($(wildcard src/$(PROJECT).app.src),) + $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \ + > ebin/$(PROJECT).app +else + $(verbose) if [ -z "$$(grep -E '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \ + echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \ + exit 1; \ + fi + $(appsrc_verbose) cat src/$(PROJECT).app.src \ + | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \ + | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(GITDESCRIBE)\"}/" \ + > ebin/$(PROJECT).app +endif + +clean:: clean-app + +clean-app: + $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \ + $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \ + $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +endif + +# Copyright (c) 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: docs-deps + +# Configuration. + +ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS)) + +# Targets. + +$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +doc-deps: +else +doc-deps: $(ALL_DOC_DEPS_DIRS) + $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rel-deps + +# Configuration. + +ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS)) + +# Targets. + +$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +rel-deps: +else +rel-deps: $(ALL_REL_DEPS_DIRS) + $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: test-deps test-dir test-build clean-test-dir + +# Configuration. + +TEST_DIR ?= $(CURDIR)/test + +ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS)) + +TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard +TEST_ERLC_OPTS += -DTEST=1 + +# Targets. + +$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +test-deps: +else +test-deps: $(ALL_TEST_DEPS_DIRS) + $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done +endif + +ifneq ($(wildcard $(TEST_DIR)),) +test-dir: + $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \ + $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/ +endif + +ifeq ($(wildcard ebin/test),) +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: clean deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" + $(gen_verbose) touch ebin/test +else +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" +endif + +clean:: clean-test-dir + +clean-test-dir: +ifneq ($(wildcard $(TEST_DIR)/*.beam),) + $(gen_verbose) rm -f $(TEST_DIR)/*.beam +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rebar.config + +# We strip out -Werror because we don't want to fail due to +# warnings when used as a dependency. + +compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/') + +define compat_convert_erlc_opts +$(if $(filter-out -Werror,$1),\ + $(if $(findstring +,$1),\ + $(shell echo $1 | cut -b 2-))) +endef + +define compat_rebar_config +{deps, [$(call comma_list,$(foreach d,$(DEPS),\ + {$(call dep_name,$d),".*",{git,"$(call dep_repo,$d)","$(call dep_commit,$d)"}}))]}. +{erl_opts, [$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$(ERLC_OPTS)),\ + $(call compat_convert_erlc_opts,$o)))]}. +endef + +$(eval _compat_rebar_config = $$(compat_rebar_config)) +$(eval export _compat_rebar_config) + +rebar.config: + $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc + +MAN_INSTALL_PATH ?= /usr/local/share/man +MAN_SECTIONS ?= 3 7 + +docs:: asciidoc + +asciidoc: distclean-asciidoc doc-deps asciidoc-guide asciidoc-manual + +ifeq ($(wildcard doc/src/guide/book.asciidoc),) +asciidoc-guide: +else +asciidoc-guide: + a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf + a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ +endif + +ifeq ($(wildcard doc/src/manual/*.asciidoc),) +asciidoc-manual: +else +asciidoc-manual: + for f in doc/src/manual/*.asciidoc ; do \ + a2x -v -f manpage $$f ; \ + done + for s in $(MAN_SECTIONS); do \ + mkdir -p doc/man$$s/ ; \ + mv doc/src/manual/*.$$s doc/man$$s/ ; \ + gzip doc/man$$s/*.$$s ; \ + done + +install-docs:: install-asciidoc + +install-asciidoc: asciidoc-manual + for s in $(MAN_SECTIONS); do \ + mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \ + install -g 0 -o 0 -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \ + done +endif + +distclean:: distclean-asciidoc + +distclean-asciidoc: + $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/ + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Bootstrap targets:" \ + " bootstrap Generate a skeleton of an OTP application" \ + " bootstrap-lib Generate a skeleton of an OTP library" \ + " bootstrap-rel Generate the files needed to build a release" \ + " new-app n=NAME Create a new local OTP application NAME" \ + " new-lib n=NAME Create a new local OTP library NAME" \ + " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ + " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \ + " list-templates List available templates" + +# Bootstrap templates. + +define bs_appsrc +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]}, + {mod, {$p_app, []}}, + {env, []} +]}. +endef + +define bs_appsrc_lib +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. +endef + +ifdef SP +define bs_Makefile +PROJECT = $p +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.0.1 + +# Whitespace to be used when creating files from templates. +SP = $(SP) + +include erlang.mk +endef +else +define bs_Makefile +PROJECT = $p +include erlang.mk +endef +endif + +define bs_apps_Makefile +PROJECT = $p +include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk +endef + +define bs_app +-module($p_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). + +start(_Type, _Args) -> + $p_sup:start_link(). + +stop(_State) -> + ok. +endef + +define bs_relx_config +{release, {$p_release, "1"}, [$p]}. +{extended_start_script, true}. +{sys_config, "rel/sys.config"}. +{vm_args, "rel/vm.args"}. +endef + +define bs_sys_config +[ +]. +endef + +define bs_vm_args +-name $p@127.0.0.1 +-setcookie $p +-heart +endef + +# Normal templates. + +define tpl_supervisor +-module($(n)). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +init([]) -> + Procs = [], + {ok, {{one_for_one, 1, 5}, Procs}}. +endef + +define tpl_gen_server +-module($(n)). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +%% gen_server. + +init([]) -> + {ok, #state{}}. + +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +endef + +define tpl_cowboy_http +-module($(n)). +-behaviour(cowboy_http_handler). + +-export([init/3]). +-export([handle/2]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {ok, Req, #state{}}. + +handle(Req, State=#state{}) -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_gen_fsm +-module($(n)). +-behaviour(gen_fsm). + +%% API. +-export([start_link/0]). + +%% gen_fsm. +-export([init/1]). +-export([state_name/2]). +-export([handle_event/3]). +-export([state_name/3]). +-export([handle_sync_event/4]). +-export([handle_info/3]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_fsm:start_link(?MODULE, [], []). + +%% gen_fsm. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_Event, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_Event, StateName, StateData) -> + {next_state, StateName, StateData}. + +state_name(_Event, _From, StateData) -> + {reply, ignored, state_name, StateData}. + +handle_sync_event(_Event, _From, StateName, StateData) -> + {reply, ignored, StateName, StateData}. + +handle_info(_Info, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + +define tpl_cowboy_loop +-module($(n)). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {loop, Req, #state{}, 5000, hibernate}. + +info(_Info, Req, State) -> + {loop, Req, State, hibernate}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_cowboy_rest +-module($(n)). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_html/2]). + +init(_, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. + +get_html(Req, State) -> + {<<"This is REST!">>, Req, State}. +endef + +define tpl_cowboy_ws +-module($(n)). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +-record(state, { +}). + +init(_, _, _) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, #state{}}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_ranch_protocol +-module($(n)). +-behaviour(ranch_protocol). + +-export([start_link/4]). +-export([init/4]). + +-type opts() :: []. +-export_type([opts/0]). + +-record(state, { + socket :: inet:socket(), + transport :: module() +}). + +start_link(Ref, Socket, Transport, Opts) -> + Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]), + {ok, Pid}. + +-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Ref, Socket, Transport, _Opts) -> + ok = ranch:accept_ack(Ref), + loop(#state{socket=Socket, transport=Transport}). + +loop(State) -> + loop(State). +endef + +# Plugin-specific targets. + +define render_template + $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) +endef + +ifndef WS +ifdef SP +WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a)) +else +WS = $(tab) +endif +endif + +bootstrap: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(eval n := $(PROJECT)_sup) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc,src/$(PROJECT).app.src) +endif + $(call render_template,bs_app,src/$(PROJECT)_app.erl) + $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl) + +bootstrap-lib: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src) +endif + +bootstrap-rel: +ifneq ($(wildcard relx.config),) + $(error Error: relx.config already exists) +endif +ifneq ($(wildcard rel/),) + $(error Error: rel/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_relx_config,relx.config) + $(verbose) mkdir rel/ + $(call render_template,bs_sys_config,rel/sys.config) + $(call render_template,bs_vm_args,rel/vm.args) + +new-app: +ifndef in + $(error Usage: $(MAKE) new-app in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(eval n := $(in)_sup) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) +endif + $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) + $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) + +new-lib: +ifndef in + $(error Usage: $(MAKE) new-lib in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) +endif + +new: +ifeq ($(wildcard src/)$(in),) + $(error Error: src/ directory does not exist) +endif +ifndef t + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifndef tpl_$(t) + $(error Unknown template) +endif +ifndef n + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in= +else + $(call render_template,tpl_$(t),src/$(n).erl) +endif + +list-templates: + $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-c_src distclean-c_src-env + +# Configuration. + +C_SRC_DIR ?= $(CURDIR)/c_src +C_SRC_ENV ?= $(C_SRC_DIR)/env.mk +C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT).so +C_SRC_TYPE ?= shared + +# System type and C compiler/flags. + +ifeq ($(PLATFORM),darwin) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall + LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress +else ifeq ($(PLATFORM),freebsd) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +else ifeq ($(PLATFORM),linux) + CC ?= gcc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +endif + +CFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) +CXXFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) + +LDLIBS += -L $(ERL_INTERFACE_LIB_DIR) -lerl_interface -lei + +# Verbosity. + +c_verbose_0 = @echo " C " $(?F); +c_verbose = $(c_verbose_$(V)) + +cpp_verbose_0 = @echo " CPP " $(?F); +cpp_verbose = $(cpp_verbose_$(V)) + +link_verbose_0 = @echo " LD " $(@F); +link_verbose = $(link_verbose_$(V)) + +# Targets. + +ifeq ($(wildcard $(C_SRC_DIR)),) +else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),) +app:: app-c_src + +test-build:: app-c_src + +app-c_src: + $(MAKE) -C $(C_SRC_DIR) + +clean:: + $(MAKE) -C $(C_SRC_DIR) clean + +else + +ifeq ($(SOURCES),) +SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat)))) +endif +OBJECTS = $(addsuffix .o, $(basename $(SOURCES))) + +COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c +COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c + +app:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +$(C_SRC_OUTPUT): $(OBJECTS) + $(verbose) mkdir -p priv/ + $(link_verbose) $(CC) $(OBJECTS) \ + $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \ + -o $(C_SRC_OUTPUT) + +%.o: %.c + $(COMPILE_C) $(OUTPUT_OPTION) $< + +%.o: %.cc + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.C + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.cpp + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +clean:: clean-c_src + +clean-c_src: + $(gen_verbose) rm -f $(C_SRC_OUTPUT) $(OBJECTS) + +endif + +ifneq ($(wildcard $(C_SRC_DIR)),) +$(C_SRC_ENV): + $(verbose) $(ERL) -eval "file:write_file(\"$(C_SRC_ENV)\", \ + io_lib:format( \ + \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \ + \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \ + \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \ + [code:root_dir(), erlang:system_info(version), \ + code:lib_dir(erl_interface, include), \ + code:lib_dir(erl_interface, lib)])), \ + halt()." + +distclean:: distclean-c_src-env + +distclean-c_src-env: + $(gen_verbose) rm -f $(C_SRC_ENV) + +-include $(C_SRC_ENV) +endif + +# Templates. + +define bs_c_nif +#include "erl_nif.h" + +static int loads = 0; + +static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info) +{ + /* Initialize private data. */ + *priv_data = NULL; + + loads++; + + return 0; +} + +static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info) +{ + /* Convert the private data to the new version. */ + *priv_data = *old_priv_data; + + loads++; + + return 0; +} + +static void unload(ErlNifEnv* env, void* priv_data) +{ + if (loads == 1) { + /* Destroy the private data. */ + } + + loads--; +} + +static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{ + if (enif_is_atom(env, argv[0])) { + return enif_make_tuple2(env, + enif_make_atom(env, "hello"), + argv[0]); + } + + return enif_make_tuple2(env, + enif_make_atom(env, "error"), + enif_make_atom(env, "badarg")); +} + +static ErlNifFunc nif_funcs[] = { + {"hello", 1, hello} +}; + +ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload) +endef + +define bs_erl_nif +-module($n). + +-export([hello/1]). + +-on_load(on_load/0). +on_load() -> + PrivDir = case code:priv_dir(?MODULE) of + {error, _} -> + AppPath = filename:dirname(filename:dirname(code:which(?MODULE))), + filename:join(AppPath, "priv"); + Path -> + Path + end, + erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0). + +hello(_) -> + erlang:nif_error({not_loaded, ?MODULE}). +endef + +new-nif: +ifneq ($(wildcard $(C_SRC_DIR)/$n.c),) + $(error Error: $(C_SRC_DIR)/$n.c already exists) +endif +ifneq ($(wildcard src/$n.erl),) + $(error Error: src/$n.erl already exists) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in= +else + $(verbose) mkdir -p $(C_SRC_DIR) src/ + $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c) + $(call render_template,bs_erl_nif,src/$n.erl) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ci ci-setup distclean-kerl + +KERL ?= $(CURDIR)/kerl +export KERL + +KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl + +OTP_GIT ?= https://github.com/erlang/otp + +CI_INSTALL_DIR ?= $(HOME)/erlang +CI_OTP ?= + +ifeq ($(strip $(CI_OTP)),) +ci:: +else +ci:: $(addprefix ci-,$(CI_OTP)) + +ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP)) + +ci-setup:: + +ci_verbose_0 = @echo " CI " $(1); +ci_verbose = $(ci_verbose_$(V)) + +define ci_target +ci-$(1): $(CI_INSTALL_DIR)/$(1) + $(ci_verbose) \ + PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \ + CI_OTP_RELEASE="$(1)" \ + CT_OPTS="-label $(1)" \ + $(MAKE) clean ci-setup tests +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp)))) + +define ci_otp_target +ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),) +$(CI_INSTALL_DIR)/$(1): $(KERL) + $(KERL) build git $(OTP_GIT) $(1) $(1) + $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1) +endif +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp)))) + +$(KERL): + $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL)) + $(verbose) chmod +x $(KERL) + +help:: + $(verbose) printf "%s\n" "" \ + "Continuous Integration targets:" \ + " ci Run '$(MAKE) tests' on all configured Erlang versions." \ + "" \ + "The CI_OTP variable must be defined with the Erlang versions" \ + "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3" + +distclean:: distclean-kerl + +distclean-kerl: + $(gen_verbose) rm -rf $(KERL) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ct distclean-ct + +# Configuration. + +CT_OPTS ?= +ifneq ($(wildcard $(TEST_DIR)),) + CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl)))) +else + CT_SUITES ?= +endif + +# Core targets. + +tests:: ct + +distclean:: distclean-ct + +help:: + $(verbose) printf "%s\n" "" \ + "Common_test targets:" \ + " ct Run all the common_test suites for this project" \ + "" \ + "All your common_test suites have their associated targets." \ + "A suite named http_SUITE can be ran using the ct-http target." + +# Plugin-specific targets. + +CT_RUN = ct_run \ + -no_auto_compile \ + -noinput \ + -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(TEST_DIR) \ + -dir $(TEST_DIR) \ + -logdir $(CURDIR)/logs + +ifeq ($(CT_SUITES),) +ct: +else +ct: test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) +endif + +define ct_suite_target +ct-$(1): test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(1)) $(CT_OPTS) +endef + +$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) + +distclean-ct: + $(gen_verbose) rm -rf $(CURDIR)/logs/ + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: plt distclean-plt dialyze + +# Configuration. + +DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt +export DIALYZER_PLT + +PLT_APPS ?= +DIALYZER_DIRS ?= --src -r src +DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions \ + -Wunmatched_returns # -Wunderspecs + +# Core targets. + +check:: dialyze + +distclean:: distclean-plt + +help:: + $(verbose) printf "%s\n" "" \ + "Dialyzer targets:" \ + " plt Build a PLT file for this project" \ + " dialyze Analyze the project using Dialyzer" + +# Plugin-specific targets. + +$(DIALYZER_PLT): deps app + $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS) + +plt: $(DIALYZER_PLT) + +distclean-plt: + $(gen_verbose) rm -f $(DIALYZER_PLT) + +ifneq ($(wildcard $(DIALYZER_PLT)),) +dialyze: +else +dialyze: $(DIALYZER_PLT) +endif + $(verbose) dialyzer --no_native $(DIALYZER_DIRS) $(DIALYZER_OPTS) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-edoc edoc + +# Configuration. + +EDOC_OPTS ?= + +# Core targets. + +docs:: distclean-edoc edoc + +distclean:: distclean-edoc + +# Plugin-specific targets. + +edoc: doc-deps + $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().' + +distclean-edoc: + $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: elvis distclean-elvis + +# Configuration. + +ELVIS_CONFIG ?= $(CURDIR)/elvis.config + +ELVIS ?= $(CURDIR)/elvis +export ELVIS + +ELVIS_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis +ELVIS_CONFIG_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis.config +ELVIS_OPTS ?= + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Elvis targets:" \ + " elvis Run Elvis using the local elvis.config or download the default otherwise" + +distclean:: distclean-elvis + +# Plugin-specific targets. + +$(ELVIS): + $(gen_verbose) $(call core_http_get,$(ELVIS),$(ELVIS_URL)) + $(verbose) chmod +x $(ELVIS) + +$(ELVIS_CONFIG): + $(verbose) $(call core_http_get,$(ELVIS_CONFIG),$(ELVIS_CONFIG_URL)) + +elvis: $(ELVIS) $(ELVIS_CONFIG) + $(verbose) $(ELVIS) rock -c $(ELVIS_CONFIG) $(ELVIS_OPTS) + +distclean-elvis: + $(gen_verbose) rm -rf $(ELVIS) + +# Copyright (c) 2014 Dave Cottlehuber +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-escript escript + +# Configuration. + +ESCRIPT_NAME ?= $(PROJECT) +ESCRIPT_COMMENT ?= This is an -*- erlang -*- file + +ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*" +ESCRIPT_SYS_CONFIG ?= "rel/sys.config" +ESCRIPT_EMU_ARGS ?= -pa . \ + -sasl errlog_type error \ + -escript main $(ESCRIPT_NAME) +ESCRIPT_SHEBANG ?= /usr/bin/env escript +ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**" + +# Core targets. + +distclean:: distclean-escript + +help:: + $(verbose) printf "%s\n" "" \ + "Escript targets:" \ + " escript Build an executable escript archive" \ + +# Plugin-specific targets. + +# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl +# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center +# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE : +# Software may only be used for the great good and the true happiness of all +# sentient beings. + +define ESCRIPT_RAW +'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\ +'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\ +' [F || F <- A, not filelib:is_dir(F) ] end,'\ +'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\ +'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\ +'Ez = fun(Escript) ->'\ +' Static = Files([$(ESCRIPT_STATIC)]),'\ +' Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\ +' Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\ +' escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\ +' {archive, Archive, [memory]},'\ +' {shebang, "$(ESCRIPT_SHEBANG)"},'\ +' {comment, "$(ESCRIPT_COMMENT)"},'\ +' {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\ +' ]),'\ +' file:change_mode(Escript, 8#755)'\ +'end,'\ +'Ez("$(ESCRIPT_NAME)"),'\ +'halt().' +endef + +ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW)) + +escript:: distclean-escript deps app + $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND) + +distclean-escript: + $(gen_verbose) rm -f $(ESCRIPT_NAME) + +# Copyright (c) 2014, Enrique Fernandez +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: eunit + +# Configuration + +EUNIT_OPTS ?= + +# Core targets. + +tests:: eunit + +help:: + $(verbose) printf "%s\n" "" \ + "EUnit targets:" \ + " eunit Run all the EUnit tests for this project" + +# Plugin-specific targets. + +define eunit.erl + case "$(COVER)" of + "" -> ok; + _ -> + case cover:compile_beam_directory("ebin") of + {error, _} -> halt(1); + _ -> ok + end + end, + case eunit:test([$(call comma_list,$(1))], [$(EUNIT_OPTS)]) of + ok -> ok; + error -> halt(2) + end, + case "$(COVER)" of + "" -> ok; + _ -> + cover:export("eunit.coverdata") + end, + halt() +endef + +EUNIT_EBIN_MODS = $(notdir $(basename $(call core_find,ebin/,*.beam))) +EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.beam))) +EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \ + $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),{module,'$(mod)'}) + +eunit: test-build + $(gen_verbose) $(ERL) -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin ebin \ + -eval "$(subst $(newline),,$(subst ",\",$(call eunit.erl,$(EUNIT_MODS))))" + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: relx-rel distclean-relx-rel distclean-relx run + +# Configuration. + +RELX ?= $(CURDIR)/relx +RELX_CONFIG ?= $(CURDIR)/relx.config + +RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.5.0/relx +RELX_OPTS ?= +RELX_OUTPUT_DIR ?= _rel + +ifeq ($(firstword $(RELX_OPTS)),-o) + RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) +else + RELX_OPTS += -o $(RELX_OUTPUT_DIR) +endif + +# Core targets. + +ifeq ($(IS_DEP),) +ifneq ($(wildcard $(RELX_CONFIG)),) +rel:: relx-rel +endif +endif + +distclean:: distclean-relx-rel distclean-relx + +# Plugin-specific targets. + +$(RELX): + $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) + $(verbose) chmod +x $(RELX) + +relx-rel: $(RELX) rel-deps app + $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) + +distclean-relx-rel: + $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) + +distclean-relx: + $(gen_verbose) rm -rf $(RELX) + +# Run target. + +ifeq ($(wildcard $(RELX_CONFIG)),) +run: +else + +define get_relx_release.erl + {ok, Config} = file:consult("$(RELX_CONFIG)"), + {release, {Name, _}, _} = lists:keyfind(release, 1, Config), + io:format("~s", [Name]), + halt(0). +endef + +RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))` + +run: all + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console + +help:: + $(verbose) printf "%s\n" "" \ + "Relx targets:" \ + " run Compile the project, build the release and run it" + +endif + +# Copyright (c) 2014, M Robert Martin +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: shell + +# Configuration. + +SHELL_ERL ?= erl +SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin +SHELL_OPTS ?= + +ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) + +# Core targets + +help:: + $(verbose) printf "%s\n" "" \ + "Shell targets:" \ + " shell Run an erlang shell with SHELL_OPTS or reasonable default" + +# Plugin-specific targets. + +$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) + +build-shell-deps: $(ALL_SHELL_DEPS_DIRS) + $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done + +shell: build-shell-deps + $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS) + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq) +.PHONY: triq + +# Targets. + +tests:: triq + +define triq_check.erl + code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]), + try + case $(1) of + all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]); + module -> triq:check($(2)); + function -> triq:check($(2)) + end + of + true -> halt(0); + _ -> halt(1) + catch error:undef -> + io:format("Undefined property or module~n"), + halt(0) + end. +endef + +ifdef t +ifeq (,$(findstring :,$(t))) +triq: test-build + $(verbose) $(call erlang,$(call triq_check.erl,module,$(t))) +else +triq: test-build + $(verbose) echo Testing $(t)/0 + $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)())) +endif +else +triq: test-build + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam)))))) + $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES))) +endif +endif + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: xref distclean-xref + +# Configuration. + +ifeq ($(XREF_CONFIG),) + XREF_ARGS := +else + XREF_ARGS := -c $(XREF_CONFIG) +endif + +XREFR ?= $(CURDIR)/xrefr +export XREFR + +XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Xref targets:" \ + " xref Run Xrefr using $XREF_CONFIG as config file if defined" + +distclean:: distclean-xref + +# Plugin-specific targets. + +$(XREFR): + $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL)) + $(verbose) chmod +x $(XREFR) + +xref: deps app $(XREFR) + $(gen_verbose) $(XREFR) $(XREFR_ARGS) + +distclean-xref: + $(gen_verbose) rm -rf $(XREFR) + +# Copyright 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +COVER_REPORT_DIR = cover + +# Hook in coverage to ct + +ifdef COVER +ifdef CT_RUN +# All modules in 'ebin' +COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam))) + +test-build:: $(TEST_DIR)/ct.cover.spec + +$(TEST_DIR)/ct.cover.spec: + $(verbose) echo Cover mods: $(COVER_MODS) + $(gen_verbose) printf "%s\n" \ + '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \ + '{export,"$(CURDIR)/ct.coverdata"}.' > $@ + +CT_RUN += -cover $(TEST_DIR)/ct.cover.spec +endif +endif + +# Core targets + +ifdef COVER +ifneq ($(COVER_REPORT_DIR),) +tests:: + $(verbose) $(MAKE) --no-print-directory cover-report +endif +endif + +clean:: coverdata-clean + +ifneq ($(COVER_REPORT_DIR),) +distclean:: cover-report-clean +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Cover targets:" \ + " cover-report Generate a HTML coverage report from previously collected" \ + " cover data." \ + " all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \ + "" \ + "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \ + "target tests additionally generates a HTML coverage report from the combined" \ + "coverdata files from each of these testing tools. HTML reports can be disabled" \ + "by setting COVER_REPORT_DIR to empty." + +# Plugin specific targets + +COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata)) + +.PHONY: coverdata-clean +coverdata-clean: + $(gen_verbose) rm -f *.coverdata ct.cover.spec + +# Merge all coverdata files into one. +all.coverdata: $(COVERDATA) + $(gen_verbose) $(ERL) -eval ' \ + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \ + cover:export("$@"), halt(0).' + +# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to +# empty if you want the coverdata files but not the HTML report. +ifneq ($(COVER_REPORT_DIR),) + +.PHONY: cover-report-clean cover-report + +cover-report-clean: + $(gen_verbose) rm -rf $(COVER_REPORT_DIR) + +ifeq ($(COVERDATA),) +cover-report: +else + +# Modules which include eunit.hrl always contain one line without coverage +# because eunit defines test/0 which is never called. We compensate for this. +EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \ + grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ + | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq)) + +define cover_report.erl + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) + Ms = cover:imported_modules(), + [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M) + ++ ".COVER.html", [html]) || M <- Ms], + Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms], + EunitHrlMods = [$(EUNIT_HRL_MODS)], + Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of + true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report], + TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]), + TotalN = lists:sum([N || {_, {_, N}} <- Report1]), + TotalPerc = round(100 * TotalY / (TotalY + TotalN)), + {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]), + io:format(F, "~n" + "~n" + "Coverage report~n" + "~n", []), + io:format(F, "

Coverage

~n

Total: ~p%

~n", [TotalPerc]), + io:format(F, "~n", []), + [io:format(F, "" + "~n", + [M, M, round(100 * Y / (Y + N))]) || {M, {Y, N}} <- Report1], + How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))", + Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")", + io:format(F, "
ModuleCoverage
~p~p%
~n" + "

Generated using ~s and erlang.mk on ~s.

~n" + "", [How, Date]), + halt(). +endef + +cover-report: + $(gen_verbose) mkdir -p $(COVER_REPORT_DIR) + $(gen_verbose) $(call erlang,$(cover_report.erl)) + +endif +endif # ifneq ($(COVER_REPORT_DIR),) + +# Copyright (c) 2013-2015, Loïc Hoguin +# Copyright (c) 2015, Jean-Sébastien Pédron +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Fetch dependencies (without building them). + +.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \ + fetch-shell-deps + +ifneq ($(SKIP_DEPS),) +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: + @: +else +# By default, we fetch "normal" dependencies. They are also included no +# matter the type of requested dependencies. +# +# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS). +fetch-deps: $(ALL_DEPS_DIRS) +fetch-doc-deps: $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS) +fetch-rel-deps: $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS) +fetch-test-deps: $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS) +fetch-shell-deps: $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(filter doc,$(DEP_TYPES)),) +fetch-deps: $(ALL_DOC_DEPS_DIRS) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +fetch-deps: $(ALL_REL_DEPS_DIRS) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +fetch-deps: $(ALL_TEST_DEPS_DIRS) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +fetch-deps: $(ALL_SHELL_DEPS_DIRS) +endif + +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep $@ IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/$@.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $^ ; do \ + if ! grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/$@.log; then \ + echo $$dep >> $(ERLANG_MK_TMP)/$@.log; \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C $$dep fetch-deps IS_DEP=1 || exit $$?; \ + fi \ + fi \ + done +endif # ifneq ($(SKIP_DEPS),) + +# List dependencies recursively. + +.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \ + list-shell-deps + +ifneq ($(SKIP_DEPS),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): + $(verbose) :> $@ +else +LIST_DIRS = $(ALL_DEPS_DIRS) +LIST_DEPS = $(BUILD_DEPS) $(DEPS) + +$(ERLANG_MK_RECURSIVE_DEPS_LIST): fetch-deps + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DIRS += $(ALL_DOC_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DEPS += $(DOC_DEPS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-doc-deps +else +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DIRS += $(ALL_REL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DEPS += $(REL_DEPS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-rel-deps +else +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DIRS += $(ALL_TEST_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DEPS += $(TEST_DEPS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-test-deps +else +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DIRS += $(ALL_SHELL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DEPS += $(SHELL_DEPS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-shell-deps +else +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-deps +endif + +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): +ifneq ($(IS_DEP),1) + $(verbose) rm -f $@.orig +endif +ifndef IS_APP + $(verbose) for app in $(filter-out $(CURDIR),$(ALL_APPS_DIRS)); do \ + $(MAKE) -C "$$app" --no-print-directory $@ IS_APP=1 || :; \ + done +endif + $(verbose) for dep in $(filter-out $(CURDIR),$(LIST_DIRS)); do \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C "$$dep" --no-print-directory $@ IS_DEP=1; \ + fi; \ + done + $(verbose) for dep in $(LIST_DEPS); do \ + echo $(DEPS_DIR)/$$dep; \ + done >> $@.orig +ifndef IS_APP +ifneq ($(IS_DEP),1) + $(verbose) sort < $@.orig | uniq > $@ + $(verbose) rm -f $@.orig +endif +endif +endif # ifneq ($(SKIP_DEPS),) + +ifneq ($(SKIP_DEPS),) +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + @: +else +list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST) +list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(IS_DEP),1) +ifneq ($(filter doc,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) +endif +endif + +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + $(verbose) cat $^ | sort | uniq +endif # ifneq ($(SKIP_DEPS),) diff --git a/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/rabbitmq-components.mk b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/rabbitmq-components.mk new file mode 100644 index 0000000..eed26fd --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/rabbitmq-components.mk @@ -0,0 +1,331 @@ +ifeq ($(.DEFAULT_GOAL),) +# Define default goal to `all` because this file defines some targets +# before the inclusion of erlang.mk leading to the wrong target becoming +# the default. +.DEFAULT_GOAL = all +endif + +# Automatically add rabbitmq-common to the dependencies, at least for +# the Makefiles. +ifneq ($(PROJECT),rabbit_common) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +ifeq ($(filter rabbit_common,$(DEPS)),) +DEPS += rabbit_common +endif +endif +endif + +# -------------------------------------------------------------------- +# RabbitMQ components. +# -------------------------------------------------------------------- + +# For RabbitMQ repositories, we want to checkout branches which match +# the parent project. For instance, if the parent project is on a +# release tag, dependencies must be on the same release tag. If the +# parent project is on a topic branch, dependencies must be on the same +# topic branch or fallback to `stable` or `master` whichever was the +# base of the topic branch. + +dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_clusterer = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_lvc = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_visualiser = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master +dep_sockjs = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master +dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master + +dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master + +# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk +# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch +# needs to add "ranch" as a BUILD_DEPS. The list of projects needing +# this workaround are: +# o rabbitmq-web-stomp +dep_ranch = git https://github.com/ninenines/ranch 1.2.1 + +RABBITMQ_COMPONENTS = amqp_client \ + rabbit \ + rabbit_common \ + rabbitmq_amqp1_0 \ + rabbitmq_auth_backend_amqp \ + rabbitmq_auth_backend_http \ + rabbitmq_auth_backend_ldap \ + rabbitmq_auth_mechanism_ssl \ + rabbitmq_boot_steps_visualiser \ + rabbitmq_clusterer \ + rabbitmq_codegen \ + rabbitmq_consistent_hash_exchange \ + rabbitmq_delayed_message_exchange \ + rabbitmq_dotnet_client \ + rabbitmq_event_exchange \ + rabbitmq_federation \ + rabbitmq_federation_management \ + rabbitmq_java_client \ + rabbitmq_lvc \ + rabbitmq_management \ + rabbitmq_management_agent \ + rabbitmq_management_exchange \ + rabbitmq_management_themes \ + rabbitmq_management_visualiser \ + rabbitmq_message_timestamp \ + rabbitmq_metronome \ + rabbitmq_mqtt \ + rabbitmq_recent_history_exchange \ + rabbitmq_rtopic_exchange \ + rabbitmq_sharding \ + rabbitmq_shovel \ + rabbitmq_shovel_management \ + rabbitmq_stomp \ + rabbitmq_test \ + rabbitmq_toke \ + rabbitmq_top \ + rabbitmq_tracing \ + rabbitmq_web_dispatch \ + rabbitmq_web_stomp \ + rabbitmq_web_stomp_examples \ + rabbitmq_website + +# Several components have a custom erlang.mk/build.config, mainly +# to disable eunit. Therefore, we can't use the top-level project's +# erlang.mk copy. +NO_AUTOPATCH += $(RABBITMQ_COMPONENTS) + +ifeq ($(origin current_rmq_ref),undefined) +ifneq ($(wildcard .git),) +current_rmq_ref := $(shell (\ + ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\ + if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi)) +else +current_rmq_ref := master +endif +endif +export current_rmq_ref + +ifeq ($(origin base_rmq_ref),undefined) +ifneq ($(wildcard .git),) +base_rmq_ref := $(shell \ + (git rev-parse --verify -q stable >/dev/null && \ + git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \ + echo stable) || \ + echo master) +else +base_rmq_ref := master +endif +endif +export base_rmq_ref + +# Repository URL selection. +# +# First, we infer other components' location from the current project +# repository URL, if it's a Git repository: +# - We take the "origin" remote URL as the base +# - The current project name and repository name is replaced by the +# target's properties: +# eg. rabbitmq-common is replaced by rabbitmq-codegen +# eg. rabbit_common is replaced by rabbitmq_codegen +# +# If cloning from this computed location fails, we fallback to RabbitMQ +# upstream which is GitHub. + +# Maccro to transform eg. "rabbit_common" to "rabbitmq-common". +rmq_cmp_repo_name = $(word 2,$(dep_$(1))) + +# Upstream URL for the current project. +RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT)) +RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git +RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git + +# Current URL for the current project. If this is not a Git clone, +# default to the upstream Git repository. +ifneq ($(wildcard .git),) +git_origin_fetch_url := $(shell git config remote.origin.url) +git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url) +RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url) +RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url) +else +RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL) +RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL) +endif + +# Macro to replace the following pattern: +# 1. /foo.git -> /bar.git +# 2. /foo -> /bar +# 3. /foo/ -> /bar/ +subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3)))) + +# Macro to replace both the project's name (eg. "rabbit_common") and +# repository name (eg. "rabbitmq-common") by the target's equivalent. +# +# This macro is kept on one line because we don't want whitespaces in +# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell +# single-quoted string. +dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo)) + +dep_rmq_commits = $(if $(dep_$(1)), \ + $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \ + $(pkg_$(1)_commit)) + +define dep_fetch_git_rmq + fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \ + fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \ + if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \ + git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url1"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \ + elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url2"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \ + fi; \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \ + $(foreach ref,$(call dep_rmq_commits,$(1)), \ + git checkout -q $(ref) >/dev/null 2>&1 || \ + ) \ + (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \ + 1>&2 && false) ) && \ + (test "$$$$fetch_url" = "$$$$push_url" || \ + git remote set-url --push origin "$$$$push_url") +endef + +# -------------------------------------------------------------------- +# Component distribution. +# -------------------------------------------------------------------- + +list-dist-deps:: + @: + +prepare-dist:: + @: + +# -------------------------------------------------------------------- +# Run a RabbitMQ node (moved from rabbitmq-run.mk as a workaround). +# -------------------------------------------------------------------- + +# Add "rabbit" to the build dependencies when the user wants to start +# a broker or to the test dependencies when the user wants to test a +# project. +# +# NOTE: This should belong to rabbitmq-run.mk. Unfortunately, it is +# loaded *after* erlang.mk which is too late to add a dependency. That's +# why rabbitmq-components.mk knows the list of targets which start a +# broker and add "rabbit" to the dependencies in this case. + +ifneq ($(PROJECT),rabbit) +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS)),) +RUN_RMQ_TARGETS = run-broker \ + run-background-broker \ + run-node \ + run-background-node \ + start-background-node + +ifneq ($(filter $(RUN_RMQ_TARGETS),$(MAKECMDGOALS)),) +BUILD_DEPS += rabbit +endif +endif + +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +ifneq ($(filter check tests tests-with-broker test,$(MAKECMDGOALS)),) +TEST_DEPS += rabbit +endif +endif +endif + +ifeq ($(filter rabbit_public_umbrella amqp_client rabbit_common rabbitmq_test,$(PROJECT)),) +ifeq ($(filter rabbitmq_test,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +TEST_DEPS += rabbitmq_test +endif +endif + +# -------------------------------------------------------------------- +# rabbitmq-components.mk checks. +# -------------------------------------------------------------------- + +ifeq ($(PROJECT),rabbit_common) +else ifdef SKIP_RMQCOMP_CHECK +else ifeq ($(IS_DEP),1) +else ifneq ($(filter co up,$(MAKECMDGOALS)),) +else +# In all other cases, rabbitmq-components.mk must be in sync. +deps:: check-rabbitmq-components.mk +fetch-deps: check-rabbitmq-components.mk +endif + +# If this project is under the Umbrella project, we override $(DEPS_DIR) +# to point to the Umbrella's one. We also disable `make distclean` so +# $(DEPS_DIR) is not accidentally removed. + +ifneq ($(wildcard ../../UMBRELLA.md),) +UNDER_UMBRELLA = 1 +else ifneq ($(wildcard UMBRELLA.md),) +UNDER_UMBRELLA = 1 +endif + +ifeq ($(UNDER_UMBRELLA),1) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +DEPS_DIR ?= $(abspath ..) + +distclean:: distclean-components + @: + +distclean-components: +endif + +ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),) +SKIP_DEPS = 1 +endif +endif + +UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk + +check-rabbitmq-components.mk: + $(verbose) cmp -s rabbitmq-components.mk \ + $(UPSTREAM_RMQ_COMPONENTS_MK) || \ + (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \ + false) + +ifeq ($(PROJECT),rabbit_common) +rabbitmq-components-mk: + @: +else +rabbitmq-components-mk: + $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) . +ifeq ($(DO_COMMIT),yes) + $(verbose) git diff --quiet rabbitmq-components.mk \ + || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk +endif +endif diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/src/rabbit_exchange_type_consistent_hash.erl b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/src/rabbit_exchange_type_consistent_hash.erl similarity index 74% rename from rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/src/rabbit_exchange_type_consistent_hash.erl rename to rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/src/rabbit_exchange_type_consistent_hash.erl index 36b4cf4..aa6a5e1 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/src/rabbit_exchange_type_consistent_hash.erl +++ b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/src/rabbit_exchange_type_consistent_hash.erl @@ -11,11 +11,12 @@ %% The Original Code is RabbitMQ Consistent Hash Exchange. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2011-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_exchange_type_consistent_hash). -include_lib("rabbit_common/include/rabbit.hrl"). +-include_lib("rabbit_common/include/rabbit_framing.hrl"). -behaviour(rabbit_exchange_type). @@ -46,6 +47,7 @@ -define(TABLE, ?MODULE). -define(PHASH2_RANGE, 134217728). %% 2^27 +-define(PROPERTIES, [<<"correlation_id">>, <<"message_id">>, <<"timestamp">>]). description() -> [{description, <<"Consistent Hashing Exchange">>}]. @@ -67,8 +69,7 @@ route(#exchange { name = Name, %% end up as relatively deep data structures which cost a lot to %% continually copy to the process heap. Consequently, such %% approaches have not been found to be much faster, if at all. - HashOn = rabbit_misc:table_lookup(Args, <<"hash-header">>), - H = erlang:phash2(hash(HashOn, Msg), ?PHASH2_RANGE), + H = erlang:phash2(hash(hash_on(Args), Msg), ?PHASH2_RANGE), case ets:select(?TABLE, [{#bucket { source_number = {Name, '$2'}, destination = '$1', _ = '_' }, @@ -84,7 +85,25 @@ route(#exchange { name = Name, Destinations end. -validate(_X) -> ok. +validate(#exchange { arguments = Args }) -> + case hash_args(Args) of + {undefined, undefined} -> + ok; + {undefined, {_Type, Value}} -> + case lists:member(Value, ?PROPERTIES) of + true -> ok; + false -> + rabbit_misc:protocol_error(precondition_failed, + "Unsupported property: ~s", + [Value]) + end; + {_, undefined} -> + ok; + {_, _} -> + rabbit_misc:protocol_error(precondition_failed, + "hash-header and hash-property are mutually exclusive", + []) + end. validate_binding(_X, #binding { key = K }) -> try @@ -168,9 +187,43 @@ find_numbers(Source, N, Acc) -> hash(undefined, #basic_message { routing_keys = Routes }) -> Routes; -hash({longstr, Header}, #basic_message { content = Content }) -> +hash({header, Header}, #basic_message { content = Content }) -> Headers = rabbit_basic:extract_headers(Content), case Headers of undefined -> undefined; _ -> rabbit_misc:table_lookup(Headers, Header) + end; +hash({property, Property}, #basic_message { content = Content }) -> + #content{properties = #'P_basic'{ correlation_id = CorrId, + message_id = MsgId, + timestamp = Timestamp }} = + rabbit_binary_parser:ensure_content_decoded(Content), + case Property of + <<"correlation_id">> -> CorrId; + <<"message_id">> -> MsgId; + <<"timestamp">> -> + case Timestamp of + undefined -> undefined; + _ -> integer_to_binary(Timestamp) + end + end. + +hash_args(Args) -> + Header = + case rabbit_misc:table_lookup(Args, <<"hash-header">>) of + undefined -> undefined; + {longstr, V1} -> {header, V1} + end, + Property = + case rabbit_misc:table_lookup(Args, <<"hash-property">>) of + undefined -> undefined; + {longstr, V2} -> {property, V2} + end, + {Header, Property}. + +hash_on(Args) -> + case hash_args(Args) of + {undefined, undefined} -> undefined; + {Header, undefined} -> Header; + {undefined, Property} -> Property end. diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/src/rabbitmq_consistent_hash_exchange.app.src b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/src/rabbitmq_consistent_hash_exchange.app.src similarity index 89% rename from rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/src/rabbitmq_consistent_hash_exchange.app.src rename to rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/src/rabbitmq_consistent_hash_exchange.app.src index ab7aab4..1fbcd78 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/src/rabbitmq_consistent_hash_exchange.app.src +++ b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/src/rabbitmq_consistent_hash_exchange.app.src @@ -1,6 +1,6 @@ {application, rabbitmq_consistent_hash_exchange, [{description, "Consistent Hash Exchange Type"}, - {vsn, "%%VSN%%"}, + {vsn, "3.6.1"}, {modules, []}, {registered, []}, {env, []}, diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/test/src/rabbit_exchange_type_consistent_hash_test.erl b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/test/src/rabbit_exchange_type_consistent_hash_test.erl similarity index 62% rename from rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/test/src/rabbit_exchange_type_consistent_hash_test.erl rename to rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/test/src/rabbit_exchange_type_consistent_hash_test.erl index 9bb619d..be068b7 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/test/src/rabbit_exchange_type_consistent_hash_test.erl +++ b/rabbitmq-server/deps/rabbitmq_consistent_hash_exchange/test/src/rabbit_exchange_type_consistent_hash_test.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Consistent Hash Exchange. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2011-2014 GoPivotal, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_exchange_type_consistent_hash_test). @@ -32,6 +32,11 @@ t(Qs) -> ok = test_with_header(Qs), ok = test_binding_with_negative_routing_key(), ok = test_binding_with_non_numeric_routing_key(), + ok = test_with_correlation_id(Qs), + ok = test_with_message_id(Qs), + ok = test_with_timestamp(Qs), + ok = test_non_supported_property(), + ok = test_mutually_exclusive_arguments(), ok. test_with_rk(Qs) -> @@ -51,8 +56,63 @@ test_with_header(Qs) -> #amqp_msg{props = #'P_basic'{headers = H}, payload = <<>>} end, [{<<"hash-header">>, longstr, <<"hashme">>}], Qs). + +test_with_correlation_id(Qs) -> + test0(fun() -> + #'basic.publish'{exchange = <<"e">>} + end, + fun() -> + #amqp_msg{props = #'P_basic'{correlation_id = rnd()}, payload = <<>>} + end, [{<<"hash-property">>, longstr, <<"correlation_id">>}], Qs). + +test_with_message_id(Qs) -> + test0(fun() -> + #'basic.publish'{exchange = <<"e">>} + end, + fun() -> + #amqp_msg{props = #'P_basic'{message_id = rnd()}, payload = <<>>} + end, [{<<"hash-property">>, longstr, <<"message_id">>}], Qs). + +test_with_timestamp(Qs) -> + test0(fun() -> + #'basic.publish'{exchange = <<"e">>} + end, + fun() -> + #amqp_msg{props = #'P_basic'{timestamp = rndint()}, payload = <<>>} + end, [{<<"hash-property">>, longstr, <<"timestamp">>}], Qs). + +test_mutually_exclusive_arguments() -> + {ok, Conn} = amqp_connection:start(#amqp_params_network{}), + {ok, Chan} = amqp_connection:open_channel(Conn), + process_flag(trap_exit, true), + Cmd = #'exchange.declare'{ + exchange = <<"fail">>, + type = <<"x-consistent-hash">>, + arguments = [{<<"hash-header">>, longstr, <<"foo">>}, + {<<"hash-property">>, longstr, <<"bar">>}] + }, + ?assertExit(_, amqp_channel:call(Chan, Cmd)), + amqp_connection:close(Conn), + ok. + +test_non_supported_property() -> + {ok, Conn} = amqp_connection:start(#amqp_params_network{}), + {ok, Chan} = amqp_connection:open_channel(Conn), + process_flag(trap_exit, true), + Cmd = #'exchange.declare'{ + exchange = <<"fail">>, + type = <<"x-consistent-hash">>, + arguments = [{<<"hash-property">>, longstr, <<"app_id">>}] + }, + ?assertExit(_, amqp_channel:call(Chan, Cmd)), + amqp_connection:close(Conn), + ok. + rnd() -> - list_to_binary(integer_to_list(random:uniform(1000000))). + list_to_binary(integer_to_list(rndint())). + +rndint() -> + random:uniform(1000000). test0(MakeMethod, MakeMsg, DeclareArgs, [Q1, Q2, Q3, Q4] = Queues) -> Count = 10000, @@ -69,16 +129,16 @@ test0(MakeMethod, MakeMsg, DeclareArgs, [Q1, Q2, Q3, Q4] = Queues) -> }), [#'queue.declare_ok'{} = amqp_channel:call(Chan, #'queue.declare' { - queue = Q, exclusive = true}) || Q <- Queues], + queue = Q, exclusive = true }) || Q <- Queues], [#'queue.bind_ok'{} = amqp_channel:call(Chan, #'queue.bind' {queue = Q, - exchange = <<"e">>, - routing_key = <<"10">>}) + exchange = <<"e">>, + routing_key = <<"10">>}) || Q <- [Q1, Q2]], [#'queue.bind_ok'{} = amqp_channel:call(Chan, #'queue.bind' {queue = Q, - exchange = <<"e">>, - routing_key = <<"20">>}) + exchange = <<"e">>, + routing_key = <<"20">>}) || Q <- [Q3, Q4]], #'tx.select_ok'{} = amqp_channel:call(Chan, #'tx.select'{}), [amqp_channel:call(Chan, @@ -87,7 +147,7 @@ test0(MakeMethod, MakeMsg, DeclareArgs, [Q1, Q2, Q3, Q4] = Queues) -> amqp_channel:call(Chan, #'tx.commit'{}), Counts = [begin - #'queue.declare_ok'{message_count = M} = + #'queue.declare_ok'{message_count = M} = amqp_channel:call(Chan, #'queue.declare' {queue = Q, exclusive = true}), M @@ -104,14 +164,14 @@ test_binding_with_negative_routing_key() -> {ok, Conn} = amqp_connection:start(#amqp_params_network{}), {ok, Chan} = amqp_connection:open_channel(Conn), Declare1 = #'exchange.declare'{exchange = <<"bind-fail">>, - type = <<"x-consistent-hash">>}, + type = <<"x-consistent-hash">>}, #'exchange.declare_ok'{} = amqp_channel:call(Chan, Declare1), Q = <<"test-queue">>, Declare2 = #'queue.declare'{queue = Q}, #'queue.declare_ok'{} = amqp_channel:call(Chan, Declare2), process_flag(trap_exit, true), Cmd = #'queue.bind'{exchange = <<"bind-fail">>, - routing_key = <<"-1">>}, + routing_key = <<"-1">>}, ?assertExit(_, amqp_channel:call(Chan, Cmd)), {ok, Ch2} = amqp_connection:open_channel(Conn), amqp_channel:call(Ch2, #'queue.delete'{queue = Q}), @@ -122,14 +182,14 @@ test_binding_with_non_numeric_routing_key() -> {ok, Conn} = amqp_connection:start(#amqp_params_network{}), {ok, Chan} = amqp_connection:open_channel(Conn), Declare1 = #'exchange.declare'{exchange = <<"bind-fail">>, - type = <<"x-consistent-hash">>}, + type = <<"x-consistent-hash">>}, #'exchange.declare_ok'{} = amqp_channel:call(Chan, Declare1), Q = <<"test-queue">>, Declare2 = #'queue.declare'{queue = Q}, #'queue.declare_ok'{} = amqp_channel:call(Chan, Declare2), process_flag(trap_exit, true), Cmd = #'queue.bind'{exchange = <<"bind-fail">>, - routing_key = <<"not-a-number">>}, + routing_key = <<"not-a-number">>}, ?assertExit(_, amqp_channel:call(Chan, Cmd)), {ok, Ch2} = amqp_connection:open_channel(Conn), amqp_channel:call(Ch2, #'queue.delete'{queue = Q}), diff --git a/rabbitmq-server/deps/rabbitmq_event_exchange/LICENSE b/rabbitmq-server/deps/rabbitmq_event_exchange/LICENSE new file mode 100644 index 0000000..d810614 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_event_exchange/LICENSE @@ -0,0 +1,5 @@ +This package, the RabbitMQ Event Exchange is licensed under +the MPL. For the MPL, please see LICENSE-MPL-RabbitMQ. + +If you have any questions regarding licensing, please contact us at +info@rabbitmq.com. diff --git a/rabbitmq-server/deps/rabbitmq_event_exchange/LICENSE-MPL-RabbitMQ b/rabbitmq-server/deps/rabbitmq_event_exchange/LICENSE-MPL-RabbitMQ new file mode 100644 index 0000000..9faaa4e --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_event_exchange/LICENSE-MPL-RabbitMQ @@ -0,0 +1,455 @@ + MOZILLA PUBLIC LICENSE + Version 1.1 + + --------------- + +1. Definitions. + + 1.0.1. "Commercial Use" means distribution or otherwise making the + Covered Code available to a third party. + + 1.1. "Contributor" means each entity that creates or contributes to + the creation of Modifications. + + 1.2. "Contributor Version" means the combination of the Original + Code, prior Modifications used by a Contributor, and the Modifications + made by that particular Contributor. + + 1.3. "Covered Code" means the Original Code or Modifications or the + combination of the Original Code and Modifications, in each case + including portions thereof. + + 1.4. "Electronic Distribution Mechanism" means a mechanism generally + accepted in the software development community for the electronic + transfer of data. + + 1.5. "Executable" means Covered Code in any form other than Source + Code. + + 1.6. "Initial Developer" means the individual or entity identified + as the Initial Developer in the Source Code notice required by Exhibit + A. + + 1.7. "Larger Work" means a work which combines Covered Code or + portions thereof with code not governed by the terms of this License. + + 1.8. "License" means this document. + + 1.8.1. "Licensable" means having the right to grant, to the maximum + extent possible, whether at the time of the initial grant or + subsequently acquired, any and all of the rights conveyed herein. + + 1.9. "Modifications" means any addition to or deletion from the + substance or structure of either the Original Code or any previous + Modifications. When Covered Code is released as a series of files, a + Modification is: + A. Any addition to or deletion from the contents of a file + containing Original Code or previous Modifications. + + B. Any new file that contains any part of the Original Code or + previous Modifications. + + 1.10. "Original Code" means Source Code of computer software code + which is described in the Source Code notice required by Exhibit A as + Original Code, and which, at the time of its release under this + License is not already Covered Code governed by this License. + + 1.10.1. "Patent Claims" means any patent claim(s), now owned or + hereafter acquired, including without limitation, method, process, + and apparatus claims, in any patent Licensable by grantor. + + 1.11. "Source Code" means the preferred form of the Covered Code for + making modifications to it, including all modules it contains, plus + any associated interface definition files, scripts used to control + compilation and installation of an Executable, or source code + differential comparisons against either the Original Code or another + well known, available Covered Code of the Contributor's choice. The + Source Code can be in a compressed or archival form, provided the + appropriate decompression or de-archiving software is widely available + for no charge. + + 1.12. "You" (or "Your") means an individual or a legal entity + exercising rights under, and complying with all of the terms of, this + License or a future version of this License issued under Section 6.1. + For legal entities, "You" includes any entity which controls, is + controlled by, or is under common control with You. For purposes of + this definition, "control" means (a) the power, direct or indirect, + to cause the direction or management of such entity, whether by + contract or otherwise, or (b) ownership of more than fifty percent + (50%) of the outstanding shares or beneficial ownership of such + entity. + +2. Source Code License. + + 2.1. The Initial Developer Grant. + The Initial Developer hereby grants You a world-wide, royalty-free, + non-exclusive license, subject to third party intellectual property + claims: + (a) under intellectual property rights (other than patent or + trademark) Licensable by Initial Developer to use, reproduce, + modify, display, perform, sublicense and distribute the Original + Code (or portions thereof) with or without Modifications, and/or + as part of a Larger Work; and + + (b) under Patents Claims infringed by the making, using or + selling of Original Code, to make, have made, use, practice, + sell, and offer for sale, and/or otherwise dispose of the + Original Code (or portions thereof). + + (c) the licenses granted in this Section 2.1(a) and (b) are + effective on the date Initial Developer first distributes + Original Code under the terms of this License. + + (d) Notwithstanding Section 2.1(b) above, no patent license is + granted: 1) for code that You delete from the Original Code; 2) + separate from the Original Code; or 3) for infringements caused + by: i) the modification of the Original Code or ii) the + combination of the Original Code with other software or devices. + + 2.2. Contributor Grant. + Subject to third party intellectual property claims, each Contributor + hereby grants You a world-wide, royalty-free, non-exclusive license + + (a) under intellectual property rights (other than patent or + trademark) Licensable by Contributor, to use, reproduce, modify, + display, perform, sublicense and distribute the Modifications + created by such Contributor (or portions thereof) either on an + unmodified basis, with other Modifications, as Covered Code + and/or as part of a Larger Work; and + + (b) under Patent Claims infringed by the making, using, or + selling of Modifications made by that Contributor either alone + and/or in combination with its Contributor Version (or portions + of such combination), to make, use, sell, offer for sale, have + made, and/or otherwise dispose of: 1) Modifications made by that + Contributor (or portions thereof); and 2) the combination of + Modifications made by that Contributor with its Contributor + Version (or portions of such combination). + + (c) the licenses granted in Sections 2.2(a) and 2.2(b) are + effective on the date Contributor first makes Commercial Use of + the Covered Code. + + (d) Notwithstanding Section 2.2(b) above, no patent license is + granted: 1) for any code that Contributor has deleted from the + Contributor Version; 2) separate from the Contributor Version; + 3) for infringements caused by: i) third party modifications of + Contributor Version or ii) the combination of Modifications made + by that Contributor with other software (except as part of the + Contributor Version) or other devices; or 4) under Patent Claims + infringed by Covered Code in the absence of Modifications made by + that Contributor. + +3. Distribution Obligations. + + 3.1. Application of License. + The Modifications which You create or to which You contribute are + governed by the terms of this License, including without limitation + Section 2.2. The Source Code version of Covered Code may be + distributed only under the terms of this License or a future version + of this License released under Section 6.1, and You must include a + copy of this License with every copy of the Source Code You + distribute. You may not offer or impose any terms on any Source Code + version that alters or restricts the applicable version of this + License or the recipients' rights hereunder. However, You may include + an additional document offering the additional rights described in + Section 3.5. + + 3.2. Availability of Source Code. + Any Modification which You create or to which You contribute must be + made available in Source Code form under the terms of this License + either on the same media as an Executable version or via an accepted + Electronic Distribution Mechanism to anyone to whom you made an + Executable version available; and if made available via Electronic + Distribution Mechanism, must remain available for at least twelve (12) + months after the date it initially became available, or at least six + (6) months after a subsequent version of that particular Modification + has been made available to such recipients. You are responsible for + ensuring that the Source Code version remains available even if the + Electronic Distribution Mechanism is maintained by a third party. + + 3.3. Description of Modifications. + You must cause all Covered Code to which You contribute to contain a + file documenting the changes You made to create that Covered Code and + the date of any change. You must include a prominent statement that + the Modification is derived, directly or indirectly, from Original + Code provided by the Initial Developer and including the name of the + Initial Developer in (a) the Source Code, and (b) in any notice in an + Executable version or related documentation in which You describe the + origin or ownership of the Covered Code. + + 3.4. Intellectual Property Matters + (a) Third Party Claims. + If Contributor has knowledge that a license under a third party's + intellectual property rights is required to exercise the rights + granted by such Contributor under Sections 2.1 or 2.2, + Contributor must include a text file with the Source Code + distribution titled "LEGAL" which describes the claim and the + party making the claim in sufficient detail that a recipient will + know whom to contact. If Contributor obtains such knowledge after + the Modification is made available as described in Section 3.2, + Contributor shall promptly modify the LEGAL file in all copies + Contributor makes available thereafter and shall take other steps + (such as notifying appropriate mailing lists or newsgroups) + reasonably calculated to inform those who received the Covered + Code that new knowledge has been obtained. + + (b) Contributor APIs. + If Contributor's Modifications include an application programming + interface and Contributor has knowledge of patent licenses which + are reasonably necessary to implement that API, Contributor must + also include this information in the LEGAL file. + + (c) Representations. + Contributor represents that, except as disclosed pursuant to + Section 3.4(a) above, Contributor believes that Contributor's + Modifications are Contributor's original creation(s) and/or + Contributor has sufficient rights to grant the rights conveyed by + this License. + + 3.5. Required Notices. + You must duplicate the notice in Exhibit A in each file of the Source + Code. If it is not possible to put such notice in a particular Source + Code file due to its structure, then You must include such notice in a + location (such as a relevant directory) where a user would be likely + to look for such a notice. If You created one or more Modification(s) + You may add your name as a Contributor to the notice described in + Exhibit A. You must also duplicate this License in any documentation + for the Source Code where You describe recipients' rights or ownership + rights relating to Covered Code. You may choose to offer, and to + charge a fee for, warranty, support, indemnity or liability + obligations to one or more recipients of Covered Code. However, You + may do so only on Your own behalf, and not on behalf of the Initial + Developer or any Contributor. You must make it absolutely clear than + any such warranty, support, indemnity or liability obligation is + offered by You alone, and You hereby agree to indemnify the Initial + Developer and every Contributor for any liability incurred by the + Initial Developer or such Contributor as a result of warranty, + support, indemnity or liability terms You offer. + + 3.6. Distribution of Executable Versions. + You may distribute Covered Code in Executable form only if the + requirements of Section 3.1-3.5 have been met for that Covered Code, + and if You include a notice stating that the Source Code version of + the Covered Code is available under the terms of this License, + including a description of how and where You have fulfilled the + obligations of Section 3.2. The notice must be conspicuously included + in any notice in an Executable version, related documentation or + collateral in which You describe recipients' rights relating to the + Covered Code. You may distribute the Executable version of Covered + Code or ownership rights under a license of Your choice, which may + contain terms different from this License, provided that You are in + compliance with the terms of this License and that the license for the + Executable version does not attempt to limit or alter the recipient's + rights in the Source Code version from the rights set forth in this + License. If You distribute the Executable version under a different + license You must make it absolutely clear that any terms which differ + from this License are offered by You alone, not by the Initial + Developer or any Contributor. You hereby agree to indemnify the + Initial Developer and every Contributor for any liability incurred by + the Initial Developer or such Contributor as a result of any such + terms You offer. + + 3.7. Larger Works. + You may create a Larger Work by combining Covered Code with other code + not governed by the terms of this License and distribute the Larger + Work as a single product. In such a case, You must make sure the + requirements of this License are fulfilled for the Covered Code. + +4. Inability to Comply Due to Statute or Regulation. + + If it is impossible for You to comply with any of the terms of this + License with respect to some or all of the Covered Code due to + statute, judicial order, or regulation then You must: (a) comply with + the terms of this License to the maximum extent possible; and (b) + describe the limitations and the code they affect. Such description + must be included in the LEGAL file described in Section 3.4 and must + be included with all distributions of the Source Code. Except to the + extent prohibited by statute or regulation, such description must be + sufficiently detailed for a recipient of ordinary skill to be able to + understand it. + +5. Application of this License. + + This License applies to code to which the Initial Developer has + attached the notice in Exhibit A and to related Covered Code. + +6. Versions of the License. + + 6.1. New Versions. + Netscape Communications Corporation ("Netscape") may publish revised + and/or new versions of the License from time to time. Each version + will be given a distinguishing version number. + + 6.2. Effect of New Versions. + Once Covered Code has been published under a particular version of the + License, You may always continue to use it under the terms of that + version. You may also choose to use such Covered Code under the terms + of any subsequent version of the License published by Netscape. No one + other than Netscape has the right to modify the terms applicable to + Covered Code created under this License. + + 6.3. Derivative Works. + If You create or use a modified version of this License (which you may + only do in order to apply it to code which is not already Covered Code + governed by this License), You must (a) rename Your license so that + the phrases "Mozilla", "MOZILLAPL", "MOZPL", "Netscape", + "MPL", "NPL" or any confusingly similar phrase do not appear in your + license (except to note that your license differs from this License) + and (b) otherwise make it clear that Your version of the license + contains terms which differ from the Mozilla Public License and + Netscape Public License. (Filling in the name of the Initial + Developer, Original Code or Contributor in the notice described in + Exhibit A shall not of themselves be deemed to be modifications of + this License.) + +7. DISCLAIMER OF WARRANTY. + + COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS, + WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, + WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF + DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. + THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED CODE + IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE IN ANY RESPECT, + YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE + COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER + OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF + ANY COVERED CODE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. + +8. TERMINATION. + + 8.1. This License and the rights granted hereunder will terminate + automatically if You fail to comply with terms herein and fail to cure + such breach within 30 days of becoming aware of the breach. All + sublicenses to the Covered Code which are properly granted shall + survive any termination of this License. Provisions which, by their + nature, must remain in effect beyond the termination of this License + shall survive. + + 8.2. If You initiate litigation by asserting a patent infringement + claim (excluding declatory judgment actions) against Initial Developer + or a Contributor (the Initial Developer or Contributor against whom + You file such action is referred to as "Participant") alleging that: + + (a) such Participant's Contributor Version directly or indirectly + infringes any patent, then any and all rights granted by such + Participant to You under Sections 2.1 and/or 2.2 of this License + shall, upon 60 days notice from Participant terminate prospectively, + unless if within 60 days after receipt of notice You either: (i) + agree in writing to pay Participant a mutually agreeable reasonable + royalty for Your past and future use of Modifications made by such + Participant, or (ii) withdraw Your litigation claim with respect to + the Contributor Version against such Participant. If within 60 days + of notice, a reasonable royalty and payment arrangement are not + mutually agreed upon in writing by the parties or the litigation claim + is not withdrawn, the rights granted by Participant to You under + Sections 2.1 and/or 2.2 automatically terminate at the expiration of + the 60 day notice period specified above. + + (b) any software, hardware, or device, other than such Participant's + Contributor Version, directly or indirectly infringes any patent, then + any rights granted to You by such Participant under Sections 2.1(b) + and 2.2(b) are revoked effective as of the date You first made, used, + sold, distributed, or had made, Modifications made by that + Participant. + + 8.3. If You assert a patent infringement claim against Participant + alleging that such Participant's Contributor Version directly or + indirectly infringes any patent where such claim is resolved (such as + by license or settlement) prior to the initiation of patent + infringement litigation, then the reasonable value of the licenses + granted by such Participant under Sections 2.1 or 2.2 shall be taken + into account in determining the amount or value of any payment or + license. + + 8.4. In the event of termination under Sections 8.1 or 8.2 above, + all end user license agreements (excluding distributors and resellers) + which have been validly granted by You or any distributor hereunder + prior to termination shall survive termination. + +9. LIMITATION OF LIABILITY. + + UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT + (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL + DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED CODE, + OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR + ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY + CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL, + WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER + COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN + INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF + LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY + RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW + PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE + EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO + THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU. + +10. U.S. GOVERNMENT END USERS. + + The Covered Code is a "commercial item," as that term is defined in + 48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer + software" and "commercial computer software documentation," as such + terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48 + C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995), + all U.S. Government End Users acquire Covered Code with only those + rights set forth herein. + +11. MISCELLANEOUS. + + This License represents the complete agreement concerning subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. This License shall be governed by + California law provisions (except to the extent applicable law, if + any, provides otherwise), excluding its conflict-of-law provisions. + With respect to disputes in which at least one party is a citizen of, + or an entity chartered or registered to do business in the United + States of America, any litigation relating to this License shall be + subject to the jurisdiction of the Federal Courts of the Northern + District of California, with venue lying in Santa Clara County, + California, with the losing party responsible for costs, including + without limitation, court costs and reasonable attorneys' fees and + expenses. The application of the United Nations Convention on + Contracts for the International Sale of Goods is expressly excluded. + Any law or regulation which provides that the language of a contract + shall be construed against the drafter shall not apply to this + License. + +12. RESPONSIBILITY FOR CLAIMS. + + As between Initial Developer and the Contributors, each party is + responsible for claims and damages arising, directly or indirectly, + out of its utilization of rights under this License and You agree to + work with Initial Developer and Contributors to distribute such + responsibility on an equitable basis. Nothing herein is intended or + shall be deemed to constitute any admission of liability. + +13. MULTIPLE-LICENSED CODE. + + Initial Developer may designate portions of the Covered Code as + "Multiple-Licensed". "Multiple-Licensed" means that the Initial + Developer permits you to utilize portions of the Covered Code under + Your choice of the NPL or the alternative licenses, if any, specified + by the Initial Developer in the file described in Exhibit A. + +EXHIBIT A -Mozilla Public License. + + ``The contents of this file are subject to the Mozilla Public License + Version 1.1 (the "License"); you may not use this file except in + compliance with the License. You may obtain a copy of the License at + http://www.mozilla.org/MPL/ + + Software distributed under the License is distributed on an "AS IS" + basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the + License for the specific language governing rights and limitations + under the License. + + The Original Code is RabbitMQ Consistent Hash Exchange. + + The Initial Developer of the Original Code is GoPivotal, Inc. + Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved.'' + + [NOTE: The text of this Exhibit A may differ slightly from the text of + the notices in the Source Code files of the Original Code. You should + use the text of this Exhibit A rather than the text found in the + Original Code Source Code for Your Modifications.] diff --git a/rabbitmq-server/deps/rabbitmq_event_exchange/Makefile b/rabbitmq-server/deps/rabbitmq_event_exchange/Makefile new file mode 100644 index 0000000..7ce8e2a --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_event_exchange/Makefile @@ -0,0 +1,21 @@ +PROJECT = rabbitmq_event_exchange + +TEST_DEPS = amqp_client + +DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk + +# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be +# reviewed and merged. + +ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git +ERLANG_MK_COMMIT = rabbitmq-tmp + +include rabbitmq-components.mk +include erlang.mk + +# -------------------------------------------------------------------- +# Testing. +# -------------------------------------------------------------------- + +WITH_BROKER_TEST_COMMANDS := \ + rabbit_exchange_type_event_test_all:all_tests() diff --git a/rabbitmq-server/deps/rabbitmq_event_exchange/README.md b/rabbitmq-server/deps/rabbitmq_event_exchange/README.md new file mode 100644 index 0000000..9878bdc --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_event_exchange/README.md @@ -0,0 +1,136 @@ +# RabbitMQ Event Exchange + +## Overview + +This plugin exposes the internal RabbitMQ event mechanism as messages that clients +can consume. It's useful +if you want to keep track of certain events, e.g. when queues, exchanges, bindings, users, +connections, channels are created and deleted. This plugin filters out stats +events, so you are almost certainly going to get better results using +the management plugin for stats. + +## How it Works + +It declares a topic exchange called 'amq.rabbitmq.event' in the default +virtual host. All events are published to this exchange with routing +keys like 'exchange.created', 'binding.deleted' etc, so you can +subscribe to only the events you're interested in. + +The exchange behaves similarly to 'amq.rabbitmq.log': everything gets +published there; if you don't trust a user with the information that +gets published, don't allow them access. + +The plugin requires no configuration, just activate it: + + rabbitmq-plugins enable rabbitmq_event_exchange + + +## Downloading + +You can download a pre-built binary of this plugin from +the [RabbitMQ Community Plugins](http://www.rabbitmq.com/community-plugins.html) page. + + +## Building + +Building is no different from [building other RabbitMQ plugins](http://www.rabbitmq.com/plugin-development.html). + +TL;DR: + + git clone https://github.com.com/rabbitmq/rabbitmq-public-umbrella.git + cd rabbitmq-public-umbrella + make co + git clone https://github.com/rabbitmq/rabbitmq-event-exchange.git + cd rabbitmq-event-exchange + make -j + +## Event format + +Each event has various properties associated with it. These are +translated into AMQP 0-9-1 data encoding and inserted in the message headers. The +**message body is always blank**. + +## Events + +So far RabbitMQ and related plugins emit events with the following routing keys: + +### RabbitMQ Broker + +Queue, Exchange and Binding events: + +- `queue.deleted` +- `queue.created` +- `exchange.created` +- `exchange.deleted` +- `binding.created` +- `binding.deleted` + +Connection and Channel events: + +- `connection.created` +- `connection.closed` +- `channel.created` +- `channel.closed` + +Consumer events: + +- `consumer.created` +- `consumer.deleted` + +Policy and Parameter events: + +- `policy.set` +- `policy.cleared` +- `parameter.set` +- `parameter.cleared` + +Virtual host events: + +- `vhost.created` +- `vhost.deleted` + +User related events: + +- `user.authentication.success` +- `user.authentication.failure` +- `user.created` +- `user.deleted` +- `user.password.changed` +- `user.password.cleared` +- `user.tags.set` + +Permission events: + +- `permission.created` +- `permission.deleted` + +### Shovel Plugin + +Worker events: + +- `shovel.worker.status` +- `shovel.worker.removed` + +### Federation Plugin + +Link events: + +- `federation.link.status` +- `federation.link.removed` + +## Example + +There is a usage example using the Java client in `examples/java`. + +## Uninstalling + +If you want to remove the exchange which this plugin creates, first +disable the plugin and restart the broker. Then you can delete the exchange, +e.g. with : + + rabbitmqctl eval 'rabbit_exchange:delete(rabbit_misc:r(<<"/">>, exchange, <<"amq.rabbitmq.event">>), false).' + +## License + +Released under the Mozilla Public License 1.1, +the same as RabbitMQ. diff --git a/rabbitmq-server/deps/rabbitmq_event_exchange/build.config b/rabbitmq-server/deps/rabbitmq_event_exchange/build.config new file mode 100644 index 0000000..0855303 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_event_exchange/build.config @@ -0,0 +1,43 @@ +# Do *not* comment or remove core modules +# unless you know what you are doing. +# +# Feel free to comment plugins out however. + +# Core modules. +core/core +index/* +core/index +core/deps + +# Plugins that must run before Erlang code gets compiled. +plugins/erlydtl +plugins/protobuffs + +# Core modules, continued. +core/erlc +core/docs +core/rel +core/test +core/compat + +# Plugins. +plugins/asciidoc +plugins/bootstrap +plugins/c_src +plugins/ci +plugins/ct +plugins/dialyzer +plugins/edoc +plugins/elvis +plugins/escript +# plugins/eunit +plugins/relx +plugins/shell +plugins/triq +plugins/xref + +# Plugins enhancing the functionality of other plugins. +plugins/cover + +# Core modules which can use variables from plugins. +core/deps-tools diff --git a/rabbitmq-server/deps/rabbitmq_event_exchange/erlang.mk b/rabbitmq-server/deps/rabbitmq_event_exchange/erlang.mk new file mode 100644 index 0000000..9f0c0c3 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_event_exchange/erlang.mk @@ -0,0 +1,6589 @@ +# Copyright (c) 2013-2015, Loïc Hoguin +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +.PHONY: all app deps search rel docs install-docs check tests clean distclean help erlang-mk + +ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) + +ERLANG_MK_VERSION = 2.0.0-pre.2-16-gb52203c-dirty + +# Core configuration. + +PROJECT ?= $(notdir $(CURDIR)) +PROJECT := $(strip $(PROJECT)) + +PROJECT_VERSION ?= rolling + +# Verbosity. + +V ?= 0 + +verbose_0 = @ +verbose_2 = set -x; +verbose = $(verbose_$(V)) + +gen_verbose_0 = @echo " GEN " $@; +gen_verbose_2 = set -x; +gen_verbose = $(gen_verbose_$(V)) + +# Temporary files directory. + +ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk +export ERLANG_MK_TMP + +# "erl" command. + +ERL = erl +A0 -noinput -boot start_clean + +# Platform detection. + +ifeq ($(PLATFORM),) +UNAME_S := $(shell uname -s) + +ifeq ($(UNAME_S),Linux) +PLATFORM = linux +else ifeq ($(UNAME_S),Darwin) +PLATFORM = darwin +else ifeq ($(UNAME_S),SunOS) +PLATFORM = solaris +else ifeq ($(UNAME_S),GNU) +PLATFORM = gnu +else ifeq ($(UNAME_S),FreeBSD) +PLATFORM = freebsd +else ifeq ($(UNAME_S),NetBSD) +PLATFORM = netbsd +else ifeq ($(UNAME_S),OpenBSD) +PLATFORM = openbsd +else ifeq ($(UNAME_S),DragonFly) +PLATFORM = dragonfly +else ifeq ($(shell uname -o),Msys) +PLATFORM = msys2 +else +$(error Unable to detect platform. Please open a ticket with the output of uname -a.) +endif + +export PLATFORM +endif + +# Core targets. + +all:: deps app rel + +# Noop to avoid a Make warning when there's nothing to do. +rel:: + $(verbose) : + +check:: clean app tests + +clean:: clean-crashdump + +clean-crashdump: +ifneq ($(wildcard erl_crash.dump),) + $(gen_verbose) rm -f erl_crash.dump +endif + +distclean:: clean distclean-tmp + +distclean-tmp: + $(gen_verbose) rm -rf $(ERLANG_MK_TMP) + +help:: + $(verbose) printf "%s\n" \ + "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \ + "Copyright (c) 2013-2015 Loïc Hoguin " \ + "" \ + "Usage: [V=1] $(MAKE) [target]..." \ + "" \ + "Core targets:" \ + " all Run deps, app and rel targets in that order" \ + " app Compile the project" \ + " deps Fetch dependencies (if needed) and compile them" \ + " fetch-deps Fetch dependencies (if needed) without compiling them" \ + " list-deps Fetch dependencies (if needed) and list them" \ + " search q=... Search for a package in the built-in index" \ + " rel Build a release for this project, if applicable" \ + " docs Build the documentation for this project" \ + " install-docs Install the man pages for this project" \ + " check Compile and run all tests and analysis for this project" \ + " tests Run the tests for this project" \ + " clean Delete temporary and output files from most targets" \ + " distclean Delete all temporary and output files" \ + " help Display this help and exit" \ + " erlang-mk Update erlang.mk to the latest version" + +# Core functions. + +empty := +space := $(empty) $(empty) +tab := $(empty) $(empty) +comma := , + +define newline + + +endef + +define comma_list +$(subst $(space),$(comma),$(strip $(1))) +endef + +# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. +define erlang +$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk +endef + +ifeq ($(PLATFORM),msys2) +core_native_path = $(subst \,\\\\,$(shell cygpath -w $1)) +else +core_native_path = $1 +endif + +ifeq ($(shell which wget 2>/dev/null | wc -l), 1) +define core_http_get + wget --no-check-certificate -O $(1) $(2)|| rm $(1) +endef +else +define core_http_get.erl + ssl:start(), + inets:start(), + case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of + {ok, {{_, 200, _}, _, Body}} -> + case file:write_file("$(1)", Body) of + ok -> ok; + {error, R1} -> halt(R1) + end; + {error, R2} -> + halt(R2) + end, + halt(0). +endef + +define core_http_get + $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2)) +endef +endif + +core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1))) + +core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2))) + +core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1))))))))))))))))))))))))))) + +core_ls = $(filter-out $(1),$(shell echo $(1))) + +# @todo Use a solution that does not require using perl. +core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2) + +# Automated update. + +ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk +ERLANG_MK_COMMIT ?= +ERLANG_MK_BUILD_CONFIG ?= build.config +ERLANG_MK_BUILD_DIR ?= .erlang.mk.build + +erlang-mk: + git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) +ifdef ERLANG_MK_COMMIT + cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) +endif + if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi + $(MAKE) -C $(ERLANG_MK_BUILD_DIR) + cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk + rm -rf $(ERLANG_MK_BUILD_DIR) + +# The erlang.mk package index is bundled in the default erlang.mk build. +# Search for the string "copyright" to skip to the rest of the code. + +PACKAGES += aberth +pkg_aberth_name = aberth +pkg_aberth_description = Generic BERT-RPC server in Erlang +pkg_aberth_homepage = https://github.com/a13x/aberth +pkg_aberth_fetch = git +pkg_aberth_repo = https://github.com/a13x/aberth +pkg_aberth_commit = master + +PACKAGES += active +pkg_active_name = active +pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running +pkg_active_homepage = https://github.com/proger/active +pkg_active_fetch = git +pkg_active_repo = https://github.com/proger/active +pkg_active_commit = master + +PACKAGES += actordb_core +pkg_actordb_core_name = actordb_core +pkg_actordb_core_description = ActorDB main source +pkg_actordb_core_homepage = http://www.actordb.com/ +pkg_actordb_core_fetch = git +pkg_actordb_core_repo = https://github.com/biokoda/actordb_core +pkg_actordb_core_commit = master + +PACKAGES += actordb_thrift +pkg_actordb_thrift_name = actordb_thrift +pkg_actordb_thrift_description = Thrift API for ActorDB +pkg_actordb_thrift_homepage = http://www.actordb.com/ +pkg_actordb_thrift_fetch = git +pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift +pkg_actordb_thrift_commit = master + +PACKAGES += aleppo +pkg_aleppo_name = aleppo +pkg_aleppo_description = Alternative Erlang Pre-Processor +pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo +pkg_aleppo_fetch = git +pkg_aleppo_repo = https://github.com/ErlyORM/aleppo +pkg_aleppo_commit = master + +PACKAGES += alog +pkg_alog_name = alog +pkg_alog_description = Simply the best logging framework for Erlang +pkg_alog_homepage = https://github.com/siberian-fast-food/alogger +pkg_alog_fetch = git +pkg_alog_repo = https://github.com/siberian-fast-food/alogger +pkg_alog_commit = master + +PACKAGES += amqp_client +pkg_amqp_client_name = amqp_client +pkg_amqp_client_description = RabbitMQ Erlang AMQP client +pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html +pkg_amqp_client_fetch = git +pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git +pkg_amqp_client_commit = master + +PACKAGES += annotations +pkg_annotations_name = annotations +pkg_annotations_description = Simple code instrumentation utilities +pkg_annotations_homepage = https://github.com/hyperthunk/annotations +pkg_annotations_fetch = git +pkg_annotations_repo = https://github.com/hyperthunk/annotations +pkg_annotations_commit = master + +PACKAGES += antidote +pkg_antidote_name = antidote +pkg_antidote_description = Large-scale computation without synchronisation +pkg_antidote_homepage = https://syncfree.lip6.fr/ +pkg_antidote_fetch = git +pkg_antidote_repo = https://github.com/SyncFree/antidote +pkg_antidote_commit = master + +PACKAGES += apns +pkg_apns_name = apns +pkg_apns_description = Apple Push Notification Server for Erlang +pkg_apns_homepage = http://inaka.github.com/apns4erl +pkg_apns_fetch = git +pkg_apns_repo = https://github.com/inaka/apns4erl +pkg_apns_commit = 1.0.4 + +PACKAGES += azdht +pkg_azdht_name = azdht +pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang +pkg_azdht_homepage = https://github.com/arcusfelis/azdht +pkg_azdht_fetch = git +pkg_azdht_repo = https://github.com/arcusfelis/azdht +pkg_azdht_commit = master + +PACKAGES += backoff +pkg_backoff_name = backoff +pkg_backoff_description = Simple exponential backoffs in Erlang +pkg_backoff_homepage = https://github.com/ferd/backoff +pkg_backoff_fetch = git +pkg_backoff_repo = https://github.com/ferd/backoff +pkg_backoff_commit = master + +PACKAGES += barrel_tcp +pkg_barrel_tcp_name = barrel_tcp +pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang. +pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_fetch = git +pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_commit = master + +PACKAGES += basho_bench +pkg_basho_bench_name = basho_bench +pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for. +pkg_basho_bench_homepage = https://github.com/basho/basho_bench +pkg_basho_bench_fetch = git +pkg_basho_bench_repo = https://github.com/basho/basho_bench +pkg_basho_bench_commit = master + +PACKAGES += bcrypt +pkg_bcrypt_name = bcrypt +pkg_bcrypt_description = Bcrypt Erlang / C library +pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt +pkg_bcrypt_fetch = git +pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt +pkg_bcrypt_commit = master + +PACKAGES += beam +pkg_beam_name = beam +pkg_beam_description = BEAM emulator written in Erlang +pkg_beam_homepage = https://github.com/tonyrog/beam +pkg_beam_fetch = git +pkg_beam_repo = https://github.com/tonyrog/beam +pkg_beam_commit = master + +PACKAGES += beanstalk +pkg_beanstalk_name = beanstalk +pkg_beanstalk_description = An Erlang client for beanstalkd +pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_fetch = git +pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_commit = master + +PACKAGES += bear +pkg_bear_name = bear +pkg_bear_description = a set of statistics functions for erlang +pkg_bear_homepage = https://github.com/boundary/bear +pkg_bear_fetch = git +pkg_bear_repo = https://github.com/boundary/bear +pkg_bear_commit = master + +PACKAGES += bertconf +pkg_bertconf_name = bertconf +pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded +pkg_bertconf_homepage = https://github.com/ferd/bertconf +pkg_bertconf_fetch = git +pkg_bertconf_repo = https://github.com/ferd/bertconf +pkg_bertconf_commit = master + +PACKAGES += bifrost +pkg_bifrost_name = bifrost +pkg_bifrost_description = Erlang FTP Server Framework +pkg_bifrost_homepage = https://github.com/thorstadt/bifrost +pkg_bifrost_fetch = git +pkg_bifrost_repo = https://github.com/thorstadt/bifrost +pkg_bifrost_commit = master + +PACKAGES += binpp +pkg_binpp_name = binpp +pkg_binpp_description = Erlang Binary Pretty Printer +pkg_binpp_homepage = https://github.com/jtendo/binpp +pkg_binpp_fetch = git +pkg_binpp_repo = https://github.com/jtendo/binpp +pkg_binpp_commit = master + +PACKAGES += bisect +pkg_bisect_name = bisect +pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang +pkg_bisect_homepage = https://github.com/knutin/bisect +pkg_bisect_fetch = git +pkg_bisect_repo = https://github.com/knutin/bisect +pkg_bisect_commit = master + +PACKAGES += bitcask +pkg_bitcask_name = bitcask +pkg_bitcask_description = because you need another a key/value storage engine +pkg_bitcask_homepage = https://github.com/basho/bitcask +pkg_bitcask_fetch = git +pkg_bitcask_repo = https://github.com/basho/bitcask +pkg_bitcask_commit = master + +PACKAGES += bitstore +pkg_bitstore_name = bitstore +pkg_bitstore_description = A document based ontology development environment +pkg_bitstore_homepage = https://github.com/bdionne/bitstore +pkg_bitstore_fetch = git +pkg_bitstore_repo = https://github.com/bdionne/bitstore +pkg_bitstore_commit = master + +PACKAGES += bootstrap +pkg_bootstrap_name = bootstrap +pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application. +pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap +pkg_bootstrap_fetch = git +pkg_bootstrap_repo = https://github.com/schlagert/bootstrap +pkg_bootstrap_commit = master + +PACKAGES += boss +pkg_boss_name = boss +pkg_boss_description = Erlang web MVC, now featuring Comet +pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_fetch = git +pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_commit = master + +PACKAGES += boss_db +pkg_boss_db_name = boss_db +pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang +pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db +pkg_boss_db_fetch = git +pkg_boss_db_repo = https://github.com/ErlyORM/boss_db +pkg_boss_db_commit = master + +PACKAGES += bson +pkg_bson_name = bson +pkg_bson_description = BSON documents in Erlang, see bsonspec.org +pkg_bson_homepage = https://github.com/comtihon/bson-erlang +pkg_bson_fetch = git +pkg_bson_repo = https://github.com/comtihon/bson-erlang +pkg_bson_commit = master + +PACKAGES += bullet +pkg_bullet_name = bullet +pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy. +pkg_bullet_homepage = http://ninenines.eu +pkg_bullet_fetch = git +pkg_bullet_repo = https://github.com/ninenines/bullet +pkg_bullet_commit = master + +PACKAGES += cache +pkg_cache_name = cache +pkg_cache_description = Erlang in-memory cache +pkg_cache_homepage = https://github.com/fogfish/cache +pkg_cache_fetch = git +pkg_cache_repo = https://github.com/fogfish/cache +pkg_cache_commit = master + +PACKAGES += cake +pkg_cake_name = cake +pkg_cake_description = Really simple terminal colorization +pkg_cake_homepage = https://github.com/darach/cake-erl +pkg_cake_fetch = git +pkg_cake_repo = https://github.com/darach/cake-erl +pkg_cake_commit = v0.1.2 + +PACKAGES += carotene +pkg_carotene_name = carotene +pkg_carotene_description = Real-time server +pkg_carotene_homepage = https://github.com/carotene/carotene +pkg_carotene_fetch = git +pkg_carotene_repo = https://github.com/carotene/carotene +pkg_carotene_commit = master + +PACKAGES += cberl +pkg_cberl_name = cberl +pkg_cberl_description = NIF based Erlang bindings for Couchbase +pkg_cberl_homepage = https://github.com/chitika/cberl +pkg_cberl_fetch = git +pkg_cberl_repo = https://github.com/chitika/cberl +pkg_cberl_commit = master + +PACKAGES += cecho +pkg_cecho_name = cecho +pkg_cecho_description = An ncurses library for Erlang +pkg_cecho_homepage = https://github.com/mazenharake/cecho +pkg_cecho_fetch = git +pkg_cecho_repo = https://github.com/mazenharake/cecho +pkg_cecho_commit = master + +PACKAGES += cferl +pkg_cferl_name = cferl +pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client +pkg_cferl_homepage = https://github.com/ddossot/cferl +pkg_cferl_fetch = git +pkg_cferl_repo = https://github.com/ddossot/cferl +pkg_cferl_commit = master + +PACKAGES += chaos_monkey +pkg_chaos_monkey_name = chaos_monkey +pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes. +pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_fetch = git +pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_commit = master + +PACKAGES += check_node +pkg_check_node_name = check_node +pkg_check_node_description = Nagios Scripts for monitoring Riak +pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios +pkg_check_node_fetch = git +pkg_check_node_repo = https://github.com/basho-labs/riak_nagios +pkg_check_node_commit = master + +PACKAGES += chronos +pkg_chronos_name = chronos +pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests. +pkg_chronos_homepage = https://github.com/lehoff/chronos +pkg_chronos_fetch = git +pkg_chronos_repo = https://github.com/lehoff/chronos +pkg_chronos_commit = master + +PACKAGES += cl +pkg_cl_name = cl +pkg_cl_description = OpenCL binding for Erlang +pkg_cl_homepage = https://github.com/tonyrog/cl +pkg_cl_fetch = git +pkg_cl_repo = https://github.com/tonyrog/cl +pkg_cl_commit = master + +PACKAGES += classifier +pkg_classifier_name = classifier +pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier +pkg_classifier_homepage = https://github.com/inaka/classifier +pkg_classifier_fetch = git +pkg_classifier_repo = https://github.com/inaka/classifier +pkg_classifier_commit = master + +PACKAGES += clique +pkg_clique_name = clique +pkg_clique_description = CLI Framework for Erlang +pkg_clique_homepage = https://github.com/basho/clique +pkg_clique_fetch = git +pkg_clique_repo = https://github.com/basho/clique +pkg_clique_commit = develop + +PACKAGES += cloudi_core +pkg_cloudi_core_name = cloudi_core +pkg_cloudi_core_description = CloudI internal service runtime +pkg_cloudi_core_homepage = http://cloudi.org/ +pkg_cloudi_core_fetch = git +pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core +pkg_cloudi_core_commit = master + +PACKAGES += cloudi_service_api_requests +pkg_cloudi_service_api_requests_name = cloudi_service_api_requests +pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support) +pkg_cloudi_service_api_requests_homepage = http://cloudi.org/ +pkg_cloudi_service_api_requests_fetch = git +pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests +pkg_cloudi_service_api_requests_commit = master + +PACKAGES += cloudi_service_db +pkg_cloudi_service_db_name = cloudi_service_db +pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) +pkg_cloudi_service_db_homepage = http://cloudi.org/ +pkg_cloudi_service_db_fetch = git +pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db +pkg_cloudi_service_db_commit = master + +PACKAGES += cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service +pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_fetch = git +pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_commit = master + +PACKAGES += cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service +pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_cql_fetch = git +pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_commit = master + +PACKAGES += cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service +pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/ +pkg_cloudi_service_db_couchdb_fetch = git +pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_commit = master + +PACKAGES += cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service +pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/ +pkg_cloudi_service_db_elasticsearch_fetch = git +pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_commit = master + +PACKAGES += cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_description = memcached CloudI Service +pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/ +pkg_cloudi_service_db_memcached_fetch = git +pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_commit = master + +PACKAGES += cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_description = MySQL CloudI Service +pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_mysql_fetch = git +pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_commit = master + +PACKAGES += cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service +pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_pgsql_fetch = git +pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_commit = master + +PACKAGES += cloudi_service_db_riak +pkg_cloudi_service_db_riak_name = cloudi_service_db_riak +pkg_cloudi_service_db_riak_description = Riak CloudI Service +pkg_cloudi_service_db_riak_homepage = http://cloudi.org/ +pkg_cloudi_service_db_riak_fetch = git +pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak +pkg_cloudi_service_db_riak_commit = master + +PACKAGES += cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service +pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/ +pkg_cloudi_service_db_tokyotyrant_fetch = git +pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_commit = master + +PACKAGES += cloudi_service_filesystem +pkg_cloudi_service_filesystem_name = cloudi_service_filesystem +pkg_cloudi_service_filesystem_description = Filesystem CloudI Service +pkg_cloudi_service_filesystem_homepage = http://cloudi.org/ +pkg_cloudi_service_filesystem_fetch = git +pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem +pkg_cloudi_service_filesystem_commit = master + +PACKAGES += cloudi_service_http_client +pkg_cloudi_service_http_client_name = cloudi_service_http_client +pkg_cloudi_service_http_client_description = HTTP client CloudI Service +pkg_cloudi_service_http_client_homepage = http://cloudi.org/ +pkg_cloudi_service_http_client_fetch = git +pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client +pkg_cloudi_service_http_client_commit = master + +PACKAGES += cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service +pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/ +pkg_cloudi_service_http_cowboy_fetch = git +pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_commit = master + +PACKAGES += cloudi_service_http_elli +pkg_cloudi_service_http_elli_name = cloudi_service_http_elli +pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service +pkg_cloudi_service_http_elli_homepage = http://cloudi.org/ +pkg_cloudi_service_http_elli_fetch = git +pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli +pkg_cloudi_service_http_elli_commit = master + +PACKAGES += cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service +pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/ +pkg_cloudi_service_map_reduce_fetch = git +pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_commit = master + +PACKAGES += cloudi_service_oauth1 +pkg_cloudi_service_oauth1_name = cloudi_service_oauth1 +pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service +pkg_cloudi_service_oauth1_homepage = http://cloudi.org/ +pkg_cloudi_service_oauth1_fetch = git +pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1 +pkg_cloudi_service_oauth1_commit = master + +PACKAGES += cloudi_service_queue +pkg_cloudi_service_queue_name = cloudi_service_queue +pkg_cloudi_service_queue_description = Persistent Queue Service +pkg_cloudi_service_queue_homepage = http://cloudi.org/ +pkg_cloudi_service_queue_fetch = git +pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue +pkg_cloudi_service_queue_commit = master + +PACKAGES += cloudi_service_quorum +pkg_cloudi_service_quorum_name = cloudi_service_quorum +pkg_cloudi_service_quorum_description = CloudI Quorum Service +pkg_cloudi_service_quorum_homepage = http://cloudi.org/ +pkg_cloudi_service_quorum_fetch = git +pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum +pkg_cloudi_service_quorum_commit = master + +PACKAGES += cloudi_service_router +pkg_cloudi_service_router_name = cloudi_service_router +pkg_cloudi_service_router_description = CloudI Router Service +pkg_cloudi_service_router_homepage = http://cloudi.org/ +pkg_cloudi_service_router_fetch = git +pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router +pkg_cloudi_service_router_commit = master + +PACKAGES += cloudi_service_tcp +pkg_cloudi_service_tcp_name = cloudi_service_tcp +pkg_cloudi_service_tcp_description = TCP CloudI Service +pkg_cloudi_service_tcp_homepage = http://cloudi.org/ +pkg_cloudi_service_tcp_fetch = git +pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp +pkg_cloudi_service_tcp_commit = master + +PACKAGES += cloudi_service_timers +pkg_cloudi_service_timers_name = cloudi_service_timers +pkg_cloudi_service_timers_description = Timers CloudI Service +pkg_cloudi_service_timers_homepage = http://cloudi.org/ +pkg_cloudi_service_timers_fetch = git +pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers +pkg_cloudi_service_timers_commit = master + +PACKAGES += cloudi_service_udp +pkg_cloudi_service_udp_name = cloudi_service_udp +pkg_cloudi_service_udp_description = UDP CloudI Service +pkg_cloudi_service_udp_homepage = http://cloudi.org/ +pkg_cloudi_service_udp_fetch = git +pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp +pkg_cloudi_service_udp_commit = master + +PACKAGES += cloudi_service_validate +pkg_cloudi_service_validate_name = cloudi_service_validate +pkg_cloudi_service_validate_description = CloudI Validate Service +pkg_cloudi_service_validate_homepage = http://cloudi.org/ +pkg_cloudi_service_validate_fetch = git +pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate +pkg_cloudi_service_validate_commit = master + +PACKAGES += cloudi_service_zeromq +pkg_cloudi_service_zeromq_name = cloudi_service_zeromq +pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service +pkg_cloudi_service_zeromq_homepage = http://cloudi.org/ +pkg_cloudi_service_zeromq_fetch = git +pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq +pkg_cloudi_service_zeromq_commit = master + +PACKAGES += cluster_info +pkg_cluster_info_name = cluster_info +pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app +pkg_cluster_info_homepage = https://github.com/basho/cluster_info +pkg_cluster_info_fetch = git +pkg_cluster_info_repo = https://github.com/basho/cluster_info +pkg_cluster_info_commit = master + +PACKAGES += color +pkg_color_name = color +pkg_color_description = ANSI colors for your Erlang +pkg_color_homepage = https://github.com/julianduque/erlang-color +pkg_color_fetch = git +pkg_color_repo = https://github.com/julianduque/erlang-color +pkg_color_commit = master + +PACKAGES += confetti +pkg_confetti_name = confetti +pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids +pkg_confetti_homepage = https://github.com/jtendo/confetti +pkg_confetti_fetch = git +pkg_confetti_repo = https://github.com/jtendo/confetti +pkg_confetti_commit = master + +PACKAGES += couchbeam +pkg_couchbeam_name = couchbeam +pkg_couchbeam_description = Apache CouchDB client in Erlang +pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam +pkg_couchbeam_fetch = git +pkg_couchbeam_repo = https://github.com/benoitc/couchbeam +pkg_couchbeam_commit = master + +PACKAGES += covertool +pkg_covertool_name = covertool +pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports +pkg_covertool_homepage = https://github.com/idubrov/covertool +pkg_covertool_fetch = git +pkg_covertool_repo = https://github.com/idubrov/covertool +pkg_covertool_commit = master + +PACKAGES += cowboy +pkg_cowboy_name = cowboy +pkg_cowboy_description = Small, fast and modular HTTP server. +pkg_cowboy_homepage = http://ninenines.eu +pkg_cowboy_fetch = git +pkg_cowboy_repo = https://github.com/ninenines/cowboy +pkg_cowboy_commit = 1.0.1 + +PACKAGES += cowdb +pkg_cowdb_name = cowdb +pkg_cowdb_description = Pure Key/Value database library for Erlang Applications +pkg_cowdb_homepage = https://github.com/refuge/cowdb +pkg_cowdb_fetch = git +pkg_cowdb_repo = https://github.com/refuge/cowdb +pkg_cowdb_commit = master + +PACKAGES += cowlib +pkg_cowlib_name = cowlib +pkg_cowlib_description = Support library for manipulating Web protocols. +pkg_cowlib_homepage = http://ninenines.eu +pkg_cowlib_fetch = git +pkg_cowlib_repo = https://github.com/ninenines/cowlib +pkg_cowlib_commit = 1.0.1 + +PACKAGES += cpg +pkg_cpg_name = cpg +pkg_cpg_description = CloudI Process Groups +pkg_cpg_homepage = https://github.com/okeuday/cpg +pkg_cpg_fetch = git +pkg_cpg_repo = https://github.com/okeuday/cpg +pkg_cpg_commit = master + +PACKAGES += cqerl +pkg_cqerl_name = cqerl +pkg_cqerl_description = Native Erlang CQL client for Cassandra +pkg_cqerl_homepage = https://matehat.github.io/cqerl/ +pkg_cqerl_fetch = git +pkg_cqerl_repo = https://github.com/matehat/cqerl +pkg_cqerl_commit = master + +PACKAGES += cr +pkg_cr_name = cr +pkg_cr_description = Chain Replication +pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm +pkg_cr_fetch = git +pkg_cr_repo = https://github.com/spawnproc/cr +pkg_cr_commit = master + +PACKAGES += cuttlefish +pkg_cuttlefish_name = cuttlefish +pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me? +pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish +pkg_cuttlefish_fetch = git +pkg_cuttlefish_repo = https://github.com/basho/cuttlefish +pkg_cuttlefish_commit = master + +PACKAGES += damocles +pkg_damocles_name = damocles +pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box. +pkg_damocles_homepage = https://github.com/lostcolony/damocles +pkg_damocles_fetch = git +pkg_damocles_repo = https://github.com/lostcolony/damocles +pkg_damocles_commit = master + +PACKAGES += debbie +pkg_debbie_name = debbie +pkg_debbie_description = .DEB Built In Erlang +pkg_debbie_homepage = https://github.com/crownedgrouse/debbie +pkg_debbie_fetch = git +pkg_debbie_repo = https://github.com/crownedgrouse/debbie +pkg_debbie_commit = master + +PACKAGES += decimal +pkg_decimal_name = decimal +pkg_decimal_description = An Erlang decimal arithmetic library +pkg_decimal_homepage = https://github.com/tim/erlang-decimal +pkg_decimal_fetch = git +pkg_decimal_repo = https://github.com/tim/erlang-decimal +pkg_decimal_commit = master + +PACKAGES += detergent +pkg_detergent_name = detergent +pkg_detergent_description = An emulsifying Erlang SOAP library +pkg_detergent_homepage = https://github.com/devinus/detergent +pkg_detergent_fetch = git +pkg_detergent_repo = https://github.com/devinus/detergent +pkg_detergent_commit = master + +PACKAGES += detest +pkg_detest_name = detest +pkg_detest_description = Tool for running tests on a cluster of erlang nodes +pkg_detest_homepage = https://github.com/biokoda/detest +pkg_detest_fetch = git +pkg_detest_repo = https://github.com/biokoda/detest +pkg_detest_commit = master + +PACKAGES += dh_date +pkg_dh_date_name = dh_date +pkg_dh_date_description = Date formatting / parsing library for erlang +pkg_dh_date_homepage = https://github.com/daleharvey/dh_date +pkg_dh_date_fetch = git +pkg_dh_date_repo = https://github.com/daleharvey/dh_date +pkg_dh_date_commit = master + +PACKAGES += dhtcrawler +pkg_dhtcrawler_name = dhtcrawler +pkg_dhtcrawler_description = dhtcrawler is a DHT crawler written in erlang. It can join a DHT network and crawl many P2P torrents. +pkg_dhtcrawler_homepage = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_fetch = git +pkg_dhtcrawler_repo = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_commit = master + +PACKAGES += dirbusterl +pkg_dirbusterl_name = dirbusterl +pkg_dirbusterl_description = DirBuster successor in Erlang +pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_fetch = git +pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_commit = master + +PACKAGES += dispcount +pkg_dispcount_name = dispcount +pkg_dispcount_description = Erlang task dispatcher based on ETS counters. +pkg_dispcount_homepage = https://github.com/ferd/dispcount +pkg_dispcount_fetch = git +pkg_dispcount_repo = https://github.com/ferd/dispcount +pkg_dispcount_commit = master + +PACKAGES += dlhttpc +pkg_dlhttpc_name = dlhttpc +pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints +pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc +pkg_dlhttpc_fetch = git +pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc +pkg_dlhttpc_commit = master + +PACKAGES += dns +pkg_dns_name = dns +pkg_dns_description = Erlang DNS library +pkg_dns_homepage = https://github.com/aetrion/dns_erlang +pkg_dns_fetch = git +pkg_dns_repo = https://github.com/aetrion/dns_erlang +pkg_dns_commit = master + +PACKAGES += dnssd +pkg_dnssd_name = dnssd +pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation +pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_fetch = git +pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_commit = master + +PACKAGES += dtl +pkg_dtl_name = dtl +pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang. +pkg_dtl_homepage = https://github.com/oinksoft/dtl +pkg_dtl_fetch = git +pkg_dtl_repo = https://github.com/oinksoft/dtl +pkg_dtl_commit = master + +PACKAGES += dynamic_compile +pkg_dynamic_compile_name = dynamic_compile +pkg_dynamic_compile_description = compile and load erlang modules from string input +pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_fetch = git +pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_commit = master + +PACKAGES += e2 +pkg_e2_name = e2 +pkg_e2_description = Library to simply writing correct OTP applications. +pkg_e2_homepage = http://e2project.org +pkg_e2_fetch = git +pkg_e2_repo = https://github.com/gar1t/e2 +pkg_e2_commit = master + +PACKAGES += eamf +pkg_eamf_name = eamf +pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang +pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf +pkg_eamf_fetch = git +pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf +pkg_eamf_commit = master + +PACKAGES += eavro +pkg_eavro_name = eavro +pkg_eavro_description = Apache Avro encoder/decoder +pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_fetch = git +pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_commit = master + +PACKAGES += ecapnp +pkg_ecapnp_name = ecapnp +pkg_ecapnp_description = Cap'n Proto library for Erlang +pkg_ecapnp_homepage = https://github.com/kaos/ecapnp +pkg_ecapnp_fetch = git +pkg_ecapnp_repo = https://github.com/kaos/ecapnp +pkg_ecapnp_commit = master + +PACKAGES += econfig +pkg_econfig_name = econfig +pkg_econfig_description = simple Erlang config handler using INI files +pkg_econfig_homepage = https://github.com/benoitc/econfig +pkg_econfig_fetch = git +pkg_econfig_repo = https://github.com/benoitc/econfig +pkg_econfig_commit = master + +PACKAGES += edate +pkg_edate_name = edate +pkg_edate_description = date manipulation library for erlang +pkg_edate_homepage = https://github.com/dweldon/edate +pkg_edate_fetch = git +pkg_edate_repo = https://github.com/dweldon/edate +pkg_edate_commit = master + +PACKAGES += edgar +pkg_edgar_name = edgar +pkg_edgar_description = Erlang Does GNU AR +pkg_edgar_homepage = https://github.com/crownedgrouse/edgar +pkg_edgar_fetch = git +pkg_edgar_repo = https://github.com/crownedgrouse/edgar +pkg_edgar_commit = master + +PACKAGES += edis +pkg_edis_name = edis +pkg_edis_description = An Erlang implementation of Redis KV Store +pkg_edis_homepage = http://inaka.github.com/edis/ +pkg_edis_fetch = git +pkg_edis_repo = https://github.com/inaka/edis +pkg_edis_commit = master + +PACKAGES += edns +pkg_edns_name = edns +pkg_edns_description = Erlang/OTP DNS server +pkg_edns_homepage = https://github.com/hcvst/erlang-dns +pkg_edns_fetch = git +pkg_edns_repo = https://github.com/hcvst/erlang-dns +pkg_edns_commit = master + +PACKAGES += edown +pkg_edown_name = edown +pkg_edown_description = EDoc extension for generating Github-flavored Markdown +pkg_edown_homepage = https://github.com/uwiger/edown +pkg_edown_fetch = git +pkg_edown_repo = https://github.com/uwiger/edown +pkg_edown_commit = master + +PACKAGES += eep +pkg_eep_name = eep +pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy +pkg_eep_homepage = https://github.com/virtan/eep +pkg_eep_fetch = git +pkg_eep_repo = https://github.com/virtan/eep +pkg_eep_commit = master + +PACKAGES += eep_app +pkg_eep_app_name = eep_app +pkg_eep_app_description = Embedded Event Processing +pkg_eep_app_homepage = https://github.com/darach/eep-erl +pkg_eep_app_fetch = git +pkg_eep_app_repo = https://github.com/darach/eep-erl +pkg_eep_app_commit = master + +PACKAGES += efene +pkg_efene_name = efene +pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX +pkg_efene_homepage = https://github.com/efene/efene +pkg_efene_fetch = git +pkg_efene_repo = https://github.com/efene/efene +pkg_efene_commit = master + +PACKAGES += eganglia +pkg_eganglia_name = eganglia +pkg_eganglia_description = Erlang library to interact with Ganglia +pkg_eganglia_homepage = https://github.com/inaka/eganglia +pkg_eganglia_fetch = git +pkg_eganglia_repo = https://github.com/inaka/eganglia +pkg_eganglia_commit = v0.9.1 + +PACKAGES += egeoip +pkg_egeoip_name = egeoip +pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database. +pkg_egeoip_homepage = https://github.com/mochi/egeoip +pkg_egeoip_fetch = git +pkg_egeoip_repo = https://github.com/mochi/egeoip +pkg_egeoip_commit = master + +PACKAGES += ehsa +pkg_ehsa_name = ehsa +pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules +pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa +pkg_ehsa_fetch = hg +pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa +pkg_ehsa_commit = 2.0.4 + +PACKAGES += ej +pkg_ej_name = ej +pkg_ej_description = Helper module for working with Erlang terms representing JSON +pkg_ej_homepage = https://github.com/seth/ej +pkg_ej_fetch = git +pkg_ej_repo = https://github.com/seth/ej +pkg_ej_commit = master + +PACKAGES += ejabberd +pkg_ejabberd_name = ejabberd +pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform +pkg_ejabberd_homepage = https://github.com/processone/ejabberd +pkg_ejabberd_fetch = git +pkg_ejabberd_repo = https://github.com/processone/ejabberd +pkg_ejabberd_commit = master + +PACKAGES += ejwt +pkg_ejwt_name = ejwt +pkg_ejwt_description = erlang library for JSON Web Token +pkg_ejwt_homepage = https://github.com/artefactop/ejwt +pkg_ejwt_fetch = git +pkg_ejwt_repo = https://github.com/artefactop/ejwt +pkg_ejwt_commit = master + +PACKAGES += ekaf +pkg_ekaf_name = ekaf +pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang. +pkg_ekaf_homepage = https://github.com/helpshift/ekaf +pkg_ekaf_fetch = git +pkg_ekaf_repo = https://github.com/helpshift/ekaf +pkg_ekaf_commit = master + +PACKAGES += elarm +pkg_elarm_name = elarm +pkg_elarm_description = Alarm Manager for Erlang. +pkg_elarm_homepage = https://github.com/esl/elarm +pkg_elarm_fetch = git +pkg_elarm_repo = https://github.com/esl/elarm +pkg_elarm_commit = master + +PACKAGES += eleveldb +pkg_eleveldb_name = eleveldb +pkg_eleveldb_description = Erlang LevelDB API +pkg_eleveldb_homepage = https://github.com/basho/eleveldb +pkg_eleveldb_fetch = git +pkg_eleveldb_repo = https://github.com/basho/eleveldb +pkg_eleveldb_commit = master + +PACKAGES += elli +pkg_elli_name = elli +pkg_elli_description = Simple, robust and performant Erlang web server +pkg_elli_homepage = https://github.com/knutin/elli +pkg_elli_fetch = git +pkg_elli_repo = https://github.com/knutin/elli +pkg_elli_commit = master + +PACKAGES += elvis +pkg_elvis_name = elvis +pkg_elvis_description = Erlang Style Reviewer +pkg_elvis_homepage = https://github.com/inaka/elvis +pkg_elvis_fetch = git +pkg_elvis_repo = https://github.com/inaka/elvis +pkg_elvis_commit = 0.2.4 + +PACKAGES += emagick +pkg_emagick_name = emagick +pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool. +pkg_emagick_homepage = https://github.com/kivra/emagick +pkg_emagick_fetch = git +pkg_emagick_repo = https://github.com/kivra/emagick +pkg_emagick_commit = master + +PACKAGES += emysql +pkg_emysql_name = emysql +pkg_emysql_description = Stable, pure Erlang MySQL driver. +pkg_emysql_homepage = https://github.com/Eonblast/Emysql +pkg_emysql_fetch = git +pkg_emysql_repo = https://github.com/Eonblast/Emysql +pkg_emysql_commit = master + +PACKAGES += enm +pkg_enm_name = enm +pkg_enm_description = Erlang driver for nanomsg +pkg_enm_homepage = https://github.com/basho/enm +pkg_enm_fetch = git +pkg_enm_repo = https://github.com/basho/enm +pkg_enm_commit = master + +PACKAGES += entop +pkg_entop_name = entop +pkg_entop_description = A top-like tool for monitoring an Erlang node +pkg_entop_homepage = https://github.com/mazenharake/entop +pkg_entop_fetch = git +pkg_entop_repo = https://github.com/mazenharake/entop +pkg_entop_commit = master + +PACKAGES += epcap +pkg_epcap_name = epcap +pkg_epcap_description = Erlang packet capture interface using pcap +pkg_epcap_homepage = https://github.com/msantos/epcap +pkg_epcap_fetch = git +pkg_epcap_repo = https://github.com/msantos/epcap +pkg_epcap_commit = master + +PACKAGES += eper +pkg_eper_name = eper +pkg_eper_description = Erlang performance and debugging tools. +pkg_eper_homepage = https://github.com/massemanet/eper +pkg_eper_fetch = git +pkg_eper_repo = https://github.com/massemanet/eper +pkg_eper_commit = master + +PACKAGES += epgsql +pkg_epgsql_name = epgsql +pkg_epgsql_description = Erlang PostgreSQL client library. +pkg_epgsql_homepage = https://github.com/epgsql/epgsql +pkg_epgsql_fetch = git +pkg_epgsql_repo = https://github.com/epgsql/epgsql +pkg_epgsql_commit = master + +PACKAGES += episcina +pkg_episcina_name = episcina +pkg_episcina_description = A simple non intrusive resource pool for connections +pkg_episcina_homepage = https://github.com/erlware/episcina +pkg_episcina_fetch = git +pkg_episcina_repo = https://github.com/erlware/episcina +pkg_episcina_commit = master + +PACKAGES += eplot +pkg_eplot_name = eplot +pkg_eplot_description = A plot engine written in erlang. +pkg_eplot_homepage = https://github.com/psyeugenic/eplot +pkg_eplot_fetch = git +pkg_eplot_repo = https://github.com/psyeugenic/eplot +pkg_eplot_commit = master + +PACKAGES += epocxy +pkg_epocxy_name = epocxy +pkg_epocxy_description = Erlang Patterns of Concurrency +pkg_epocxy_homepage = https://github.com/duomark/epocxy +pkg_epocxy_fetch = git +pkg_epocxy_repo = https://github.com/duomark/epocxy +pkg_epocxy_commit = master + +PACKAGES += epubnub +pkg_epubnub_name = epubnub +pkg_epubnub_description = Erlang PubNub API +pkg_epubnub_homepage = https://github.com/tsloughter/epubnub +pkg_epubnub_fetch = git +pkg_epubnub_repo = https://github.com/tsloughter/epubnub +pkg_epubnub_commit = master + +PACKAGES += eqm +pkg_eqm_name = eqm +pkg_eqm_description = Erlang pub sub with supply-demand channels +pkg_eqm_homepage = https://github.com/loucash/eqm +pkg_eqm_fetch = git +pkg_eqm_repo = https://github.com/loucash/eqm +pkg_eqm_commit = master + +PACKAGES += eredis +pkg_eredis_name = eredis +pkg_eredis_description = Erlang Redis client +pkg_eredis_homepage = https://github.com/wooga/eredis +pkg_eredis_fetch = git +pkg_eredis_repo = https://github.com/wooga/eredis +pkg_eredis_commit = master + +PACKAGES += eredis_pool +pkg_eredis_pool_name = eredis_pool +pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy. +pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_fetch = git +pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_commit = master + +PACKAGES += erl_streams +pkg_erl_streams_name = erl_streams +pkg_erl_streams_description = Streams in Erlang +pkg_erl_streams_homepage = https://github.com/epappas/erl_streams +pkg_erl_streams_fetch = git +pkg_erl_streams_repo = https://github.com/epappas/erl_streams +pkg_erl_streams_commit = master + +PACKAGES += erlang_cep +pkg_erlang_cep_name = erlang_cep +pkg_erlang_cep_description = A basic CEP package written in erlang +pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_fetch = git +pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_commit = master + +PACKAGES += erlang_js +pkg_erlang_js_name = erlang_js +pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime. +pkg_erlang_js_homepage = https://github.com/basho/erlang_js +pkg_erlang_js_fetch = git +pkg_erlang_js_repo = https://github.com/basho/erlang_js +pkg_erlang_js_commit = master + +PACKAGES += erlang_localtime +pkg_erlang_localtime_name = erlang_localtime +pkg_erlang_localtime_description = Erlang library for conversion from one local time to another +pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_fetch = git +pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_commit = master + +PACKAGES += erlang_smtp +pkg_erlang_smtp_name = erlang_smtp +pkg_erlang_smtp_description = Erlang SMTP and POP3 server code. +pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_fetch = git +pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_commit = master + +PACKAGES += erlang_term +pkg_erlang_term_name = erlang_term +pkg_erlang_term_description = Erlang Term Info +pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term +pkg_erlang_term_fetch = git +pkg_erlang_term_repo = https://github.com/okeuday/erlang_term +pkg_erlang_term_commit = master + +PACKAGES += erlastic_search +pkg_erlastic_search_name = erlastic_search +pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface. +pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_fetch = git +pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_commit = master + +PACKAGES += erlasticsearch +pkg_erlasticsearch_name = erlasticsearch +pkg_erlasticsearch_description = Erlang thrift interface to elastic_search +pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_fetch = git +pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_commit = master + +PACKAGES += erlbrake +pkg_erlbrake_name = erlbrake +pkg_erlbrake_description = Erlang Airbrake notification client +pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake +pkg_erlbrake_fetch = git +pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake +pkg_erlbrake_commit = master + +PACKAGES += erlcloud +pkg_erlcloud_name = erlcloud +pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB) +pkg_erlcloud_homepage = https://github.com/gleber/erlcloud +pkg_erlcloud_fetch = git +pkg_erlcloud_repo = https://github.com/gleber/erlcloud +pkg_erlcloud_commit = master + +PACKAGES += erlcron +pkg_erlcron_name = erlcron +pkg_erlcron_description = Erlang cronish system +pkg_erlcron_homepage = https://github.com/erlware/erlcron +pkg_erlcron_fetch = git +pkg_erlcron_repo = https://github.com/erlware/erlcron +pkg_erlcron_commit = master + +PACKAGES += erldb +pkg_erldb_name = erldb +pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang +pkg_erldb_homepage = http://erldb.org +pkg_erldb_fetch = git +pkg_erldb_repo = https://github.com/erldb/erldb +pkg_erldb_commit = master + +PACKAGES += erldis +pkg_erldis_name = erldis +pkg_erldis_description = redis erlang client library +pkg_erldis_homepage = https://github.com/cstar/erldis +pkg_erldis_fetch = git +pkg_erldis_repo = https://github.com/cstar/erldis +pkg_erldis_commit = master + +PACKAGES += erldns +pkg_erldns_name = erldns +pkg_erldns_description = DNS server, in erlang. +pkg_erldns_homepage = https://github.com/aetrion/erl-dns +pkg_erldns_fetch = git +pkg_erldns_repo = https://github.com/aetrion/erl-dns +pkg_erldns_commit = master + +PACKAGES += erldocker +pkg_erldocker_name = erldocker +pkg_erldocker_description = Docker Remote API client for Erlang +pkg_erldocker_homepage = https://github.com/proger/erldocker +pkg_erldocker_fetch = git +pkg_erldocker_repo = https://github.com/proger/erldocker +pkg_erldocker_commit = master + +PACKAGES += erlfsmon +pkg_erlfsmon_name = erlfsmon +pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX +pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon +pkg_erlfsmon_fetch = git +pkg_erlfsmon_repo = https://github.com/proger/erlfsmon +pkg_erlfsmon_commit = master + +PACKAGES += erlgit +pkg_erlgit_name = erlgit +pkg_erlgit_description = Erlang convenience wrapper around git executable +pkg_erlgit_homepage = https://github.com/gleber/erlgit +pkg_erlgit_fetch = git +pkg_erlgit_repo = https://github.com/gleber/erlgit +pkg_erlgit_commit = master + +PACKAGES += erlguten +pkg_erlguten_name = erlguten +pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang. +pkg_erlguten_homepage = https://github.com/richcarl/erlguten +pkg_erlguten_fetch = git +pkg_erlguten_repo = https://github.com/richcarl/erlguten +pkg_erlguten_commit = master + +PACKAGES += erlmc +pkg_erlmc_name = erlmc +pkg_erlmc_description = Erlang memcached binary protocol client +pkg_erlmc_homepage = https://github.com/jkvor/erlmc +pkg_erlmc_fetch = git +pkg_erlmc_repo = https://github.com/jkvor/erlmc +pkg_erlmc_commit = master + +PACKAGES += erlmongo +pkg_erlmongo_name = erlmongo +pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support +pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_fetch = git +pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_commit = master + +PACKAGES += erlog +pkg_erlog_name = erlog +pkg_erlog_description = Prolog interpreter in and for Erlang +pkg_erlog_homepage = https://github.com/rvirding/erlog +pkg_erlog_fetch = git +pkg_erlog_repo = https://github.com/rvirding/erlog +pkg_erlog_commit = master + +PACKAGES += erlpass +pkg_erlpass_name = erlpass +pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever. +pkg_erlpass_homepage = https://github.com/ferd/erlpass +pkg_erlpass_fetch = git +pkg_erlpass_repo = https://github.com/ferd/erlpass +pkg_erlpass_commit = master + +PACKAGES += erlport +pkg_erlport_name = erlport +pkg_erlport_description = ErlPort - connect Erlang to other languages +pkg_erlport_homepage = https://github.com/hdima/erlport +pkg_erlport_fetch = git +pkg_erlport_repo = https://github.com/hdima/erlport +pkg_erlport_commit = master + +PACKAGES += erlsh +pkg_erlsh_name = erlsh +pkg_erlsh_description = Erlang shell tools +pkg_erlsh_homepage = https://github.com/proger/erlsh +pkg_erlsh_fetch = git +pkg_erlsh_repo = https://github.com/proger/erlsh +pkg_erlsh_commit = master + +PACKAGES += erlsha2 +pkg_erlsha2_name = erlsha2 +pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs. +pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2 +pkg_erlsha2_fetch = git +pkg_erlsha2_repo = https://github.com/vinoski/erlsha2 +pkg_erlsha2_commit = master + +PACKAGES += erlsom +pkg_erlsom_name = erlsom +pkg_erlsom_description = XML parser for Erlang +pkg_erlsom_homepage = https://github.com/willemdj/erlsom +pkg_erlsom_fetch = git +pkg_erlsom_repo = https://github.com/willemdj/erlsom +pkg_erlsom_commit = master + +PACKAGES += erlubi +pkg_erlubi_name = erlubi +pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer) +pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi +pkg_erlubi_fetch = git +pkg_erlubi_repo = https://github.com/krestenkrab/erlubi +pkg_erlubi_commit = master + +PACKAGES += erlvolt +pkg_erlvolt_name = erlvolt +pkg_erlvolt_description = VoltDB Erlang Client Driver +pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_fetch = git +pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_commit = master + +PACKAGES += erlware_commons +pkg_erlware_commons_name = erlware_commons +pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components. +pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons +pkg_erlware_commons_fetch = git +pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons +pkg_erlware_commons_commit = master + +PACKAGES += erlydtl +pkg_erlydtl_name = erlydtl +pkg_erlydtl_description = Django Template Language for Erlang. +pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl +pkg_erlydtl_fetch = git +pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl +pkg_erlydtl_commit = master + +PACKAGES += errd +pkg_errd_name = errd +pkg_errd_description = Erlang RRDTool library +pkg_errd_homepage = https://github.com/archaelus/errd +pkg_errd_fetch = git +pkg_errd_repo = https://github.com/archaelus/errd +pkg_errd_commit = master + +PACKAGES += erserve +pkg_erserve_name = erserve +pkg_erserve_description = Erlang/Rserve communication interface +pkg_erserve_homepage = https://github.com/del/erserve +pkg_erserve_fetch = git +pkg_erserve_repo = https://github.com/del/erserve +pkg_erserve_commit = master + +PACKAGES += erwa +pkg_erwa_name = erwa +pkg_erwa_description = A WAMP router and client written in Erlang. +pkg_erwa_homepage = https://github.com/bwegh/erwa +pkg_erwa_fetch = git +pkg_erwa_repo = https://github.com/bwegh/erwa +pkg_erwa_commit = 0.1.1 + +PACKAGES += espec +pkg_espec_name = espec +pkg_espec_description = ESpec: Behaviour driven development framework for Erlang +pkg_espec_homepage = https://github.com/lucaspiller/espec +pkg_espec_fetch = git +pkg_espec_repo = https://github.com/lucaspiller/espec +pkg_espec_commit = master + +PACKAGES += estatsd +pkg_estatsd_name = estatsd +pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite +pkg_estatsd_homepage = https://github.com/RJ/estatsd +pkg_estatsd_fetch = git +pkg_estatsd_repo = https://github.com/RJ/estatsd +pkg_estatsd_commit = master + +PACKAGES += etap +pkg_etap_name = etap +pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output. +pkg_etap_homepage = https://github.com/ngerakines/etap +pkg_etap_fetch = git +pkg_etap_repo = https://github.com/ngerakines/etap +pkg_etap_commit = master + +PACKAGES += etest +pkg_etest_name = etest +pkg_etest_description = A lightweight, convention over configuration test framework for Erlang +pkg_etest_homepage = https://github.com/wooga/etest +pkg_etest_fetch = git +pkg_etest_repo = https://github.com/wooga/etest +pkg_etest_commit = master + +PACKAGES += etest_http +pkg_etest_http_name = etest_http +pkg_etest_http_description = etest Assertions around HTTP (client-side) +pkg_etest_http_homepage = https://github.com/wooga/etest_http +pkg_etest_http_fetch = git +pkg_etest_http_repo = https://github.com/wooga/etest_http +pkg_etest_http_commit = master + +PACKAGES += etoml +pkg_etoml_name = etoml +pkg_etoml_description = TOML language erlang parser +pkg_etoml_homepage = https://github.com/kalta/etoml +pkg_etoml_fetch = git +pkg_etoml_repo = https://github.com/kalta/etoml +pkg_etoml_commit = master + +PACKAGES += eunit +pkg_eunit_name = eunit +pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. +pkg_eunit_homepage = https://github.com/richcarl/eunit +pkg_eunit_fetch = git +pkg_eunit_repo = https://github.com/richcarl/eunit +pkg_eunit_commit = master + +PACKAGES += eunit_formatters +pkg_eunit_formatters_name = eunit_formatters +pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better. +pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_fetch = git +pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_commit = master + +PACKAGES += euthanasia +pkg_euthanasia_name = euthanasia +pkg_euthanasia_description = Merciful killer for your Erlang processes +pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia +pkg_euthanasia_fetch = git +pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia +pkg_euthanasia_commit = master + +PACKAGES += evum +pkg_evum_name = evum +pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM +pkg_evum_homepage = https://github.com/msantos/evum +pkg_evum_fetch = git +pkg_evum_repo = https://github.com/msantos/evum +pkg_evum_commit = master + +PACKAGES += exec +pkg_exec_name = exec +pkg_exec_description = Execute and control OS processes from Erlang/OTP. +pkg_exec_homepage = http://saleyn.github.com/erlexec +pkg_exec_fetch = git +pkg_exec_repo = https://github.com/saleyn/erlexec +pkg_exec_commit = master + +PACKAGES += exml +pkg_exml_name = exml +pkg_exml_description = XML parsing library in Erlang +pkg_exml_homepage = https://github.com/paulgray/exml +pkg_exml_fetch = git +pkg_exml_repo = https://github.com/paulgray/exml +pkg_exml_commit = master + +PACKAGES += exometer +pkg_exometer_name = exometer +pkg_exometer_description = Basic measurement objects and probe behavior +pkg_exometer_homepage = https://github.com/Feuerlabs/exometer +pkg_exometer_fetch = git +pkg_exometer_repo = https://github.com/Feuerlabs/exometer +pkg_exometer_commit = 1.2 + +PACKAGES += exs1024 +pkg_exs1024_name = exs1024 +pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang. +pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024 +pkg_exs1024_fetch = git +pkg_exs1024_repo = https://github.com/jj1bdx/exs1024 +pkg_exs1024_commit = master + +PACKAGES += exs64 +pkg_exs64_name = exs64 +pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang. +pkg_exs64_homepage = https://github.com/jj1bdx/exs64 +pkg_exs64_fetch = git +pkg_exs64_repo = https://github.com/jj1bdx/exs64 +pkg_exs64_commit = master + +PACKAGES += exsplus116 +pkg_exsplus116_name = exsplus116 +pkg_exsplus116_description = Xorshift116plus for Erlang +pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_fetch = git +pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_commit = master + +PACKAGES += exsplus128 +pkg_exsplus128_name = exsplus128 +pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang. +pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_fetch = git +pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_commit = master + +PACKAGES += ezmq +pkg_ezmq_name = ezmq +pkg_ezmq_description = zMQ implemented in Erlang +pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq +pkg_ezmq_fetch = git +pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq +pkg_ezmq_commit = master + +PACKAGES += ezmtp +pkg_ezmtp_name = ezmtp +pkg_ezmtp_description = ZMTP protocol in pure Erlang. +pkg_ezmtp_homepage = https://github.com/a13x/ezmtp +pkg_ezmtp_fetch = git +pkg_ezmtp_repo = https://github.com/a13x/ezmtp +pkg_ezmtp_commit = master + +PACKAGES += fast_disk_log +pkg_fast_disk_log_name = fast_disk_log +pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger +pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_fetch = git +pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_commit = master + +PACKAGES += feeder +pkg_feeder_name = feeder +pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds. +pkg_feeder_homepage = https://github.com/michaelnisi/feeder +pkg_feeder_fetch = git +pkg_feeder_repo = https://github.com/michaelnisi/feeder +pkg_feeder_commit = v1.4.6 + +PACKAGES += fix +pkg_fix_name = fix +pkg_fix_description = http://fixprotocol.org/ implementation. +pkg_fix_homepage = https://github.com/maxlapshin/fix +pkg_fix_fetch = git +pkg_fix_repo = https://github.com/maxlapshin/fix +pkg_fix_commit = master + +PACKAGES += flower +pkg_flower_name = flower +pkg_flower_description = FlowER - a Erlang OpenFlow development platform +pkg_flower_homepage = https://github.com/travelping/flower +pkg_flower_fetch = git +pkg_flower_repo = https://github.com/travelping/flower +pkg_flower_commit = master + +PACKAGES += fn +pkg_fn_name = fn +pkg_fn_description = Function utilities for Erlang +pkg_fn_homepage = https://github.com/reiddraper/fn +pkg_fn_fetch = git +pkg_fn_repo = https://github.com/reiddraper/fn +pkg_fn_commit = master + +PACKAGES += folsom +pkg_folsom_name = folsom +pkg_folsom_description = Expose Erlang Events and Metrics +pkg_folsom_homepage = https://github.com/boundary/folsom +pkg_folsom_fetch = git +pkg_folsom_repo = https://github.com/boundary/folsom +pkg_folsom_commit = master + +PACKAGES += folsom_cowboy +pkg_folsom_cowboy_name = folsom_cowboy +pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper. +pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_fetch = git +pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_commit = master + +PACKAGES += folsomite +pkg_folsomite_name = folsomite +pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics +pkg_folsomite_homepage = https://github.com/campanja/folsomite +pkg_folsomite_fetch = git +pkg_folsomite_repo = https://github.com/campanja/folsomite +pkg_folsomite_commit = master + +PACKAGES += fs +pkg_fs_name = fs +pkg_fs_description = Erlang FileSystem Listener +pkg_fs_homepage = https://github.com/synrc/fs +pkg_fs_fetch = git +pkg_fs_repo = https://github.com/synrc/fs +pkg_fs_commit = master + +PACKAGES += fuse +pkg_fuse_name = fuse +pkg_fuse_description = A Circuit Breaker for Erlang +pkg_fuse_homepage = https://github.com/jlouis/fuse +pkg_fuse_fetch = git +pkg_fuse_repo = https://github.com/jlouis/fuse +pkg_fuse_commit = master + +PACKAGES += gcm +pkg_gcm_name = gcm +pkg_gcm_description = An Erlang application for Google Cloud Messaging +pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang +pkg_gcm_fetch = git +pkg_gcm_repo = https://github.com/pdincau/gcm-erlang +pkg_gcm_commit = master + +PACKAGES += gcprof +pkg_gcprof_name = gcprof +pkg_gcprof_description = Garbage Collection profiler for Erlang +pkg_gcprof_homepage = https://github.com/knutin/gcprof +pkg_gcprof_fetch = git +pkg_gcprof_repo = https://github.com/knutin/gcprof +pkg_gcprof_commit = master + +PACKAGES += geas +pkg_geas_name = geas +pkg_geas_description = Guess Erlang Application Scattering +pkg_geas_homepage = https://github.com/crownedgrouse/geas +pkg_geas_fetch = git +pkg_geas_repo = https://github.com/crownedgrouse/geas +pkg_geas_commit = master + +PACKAGES += geef +pkg_geef_name = geef +pkg_geef_description = Git NEEEEF (Erlang NIF) +pkg_geef_homepage = https://github.com/carlosmn/geef +pkg_geef_fetch = git +pkg_geef_repo = https://github.com/carlosmn/geef +pkg_geef_commit = master + +PACKAGES += gen_cycle +pkg_gen_cycle_name = gen_cycle +pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks +pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_fetch = git +pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_commit = develop + +PACKAGES += gen_icmp +pkg_gen_icmp_name = gen_icmp +pkg_gen_icmp_description = Erlang interface to ICMP sockets +pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp +pkg_gen_icmp_fetch = git +pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp +pkg_gen_icmp_commit = master + +PACKAGES += gen_nb_server +pkg_gen_nb_server_name = gen_nb_server +pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers +pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_fetch = git +pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_commit = master + +PACKAGES += gen_paxos +pkg_gen_paxos_name = gen_paxos +pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol +pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos +pkg_gen_paxos_fetch = git +pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos +pkg_gen_paxos_commit = master + +PACKAGES += gen_smtp +pkg_gen_smtp_name = gen_smtp +pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules +pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_fetch = git +pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_commit = master + +PACKAGES += gen_tracker +pkg_gen_tracker_name = gen_tracker +pkg_gen_tracker_description = supervisor with ets handling of children and their metadata +pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_fetch = git +pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_commit = master + +PACKAGES += gen_unix +pkg_gen_unix_name = gen_unix +pkg_gen_unix_description = Erlang Unix socket interface +pkg_gen_unix_homepage = https://github.com/msantos/gen_unix +pkg_gen_unix_fetch = git +pkg_gen_unix_repo = https://github.com/msantos/gen_unix +pkg_gen_unix_commit = master + +PACKAGES += getopt +pkg_getopt_name = getopt +pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax +pkg_getopt_homepage = https://github.com/jcomellas/getopt +pkg_getopt_fetch = git +pkg_getopt_repo = https://github.com/jcomellas/getopt +pkg_getopt_commit = master + +PACKAGES += gettext +pkg_gettext_name = gettext +pkg_gettext_description = Erlang internationalization library. +pkg_gettext_homepage = https://github.com/etnt/gettext +pkg_gettext_fetch = git +pkg_gettext_repo = https://github.com/etnt/gettext +pkg_gettext_commit = master + +PACKAGES += giallo +pkg_giallo_name = giallo +pkg_giallo_description = Small and flexible web framework on top of Cowboy +pkg_giallo_homepage = https://github.com/kivra/giallo +pkg_giallo_fetch = git +pkg_giallo_repo = https://github.com/kivra/giallo +pkg_giallo_commit = master + +PACKAGES += gin +pkg_gin_name = gin +pkg_gin_description = The guards and for Erlang parse_transform +pkg_gin_homepage = https://github.com/mad-cocktail/gin +pkg_gin_fetch = git +pkg_gin_repo = https://github.com/mad-cocktail/gin +pkg_gin_commit = master + +PACKAGES += gitty +pkg_gitty_name = gitty +pkg_gitty_description = Git access in erlang +pkg_gitty_homepage = https://github.com/maxlapshin/gitty +pkg_gitty_fetch = git +pkg_gitty_repo = https://github.com/maxlapshin/gitty +pkg_gitty_commit = master + +PACKAGES += gold_fever +pkg_gold_fever_name = gold_fever +pkg_gold_fever_description = A Treasure Hunt for Erlangers +pkg_gold_fever_homepage = https://github.com/inaka/gold_fever +pkg_gold_fever_fetch = git +pkg_gold_fever_repo = https://github.com/inaka/gold_fever +pkg_gold_fever_commit = master + +PACKAGES += gossiperl +pkg_gossiperl_name = gossiperl +pkg_gossiperl_description = Gossip middleware in Erlang +pkg_gossiperl_homepage = http://gossiperl.com/ +pkg_gossiperl_fetch = git +pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl +pkg_gossiperl_commit = master + +PACKAGES += gpb +pkg_gpb_name = gpb +pkg_gpb_description = A Google Protobuf implementation for Erlang +pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_fetch = git +pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_commit = master + +PACKAGES += gproc +pkg_gproc_name = gproc +pkg_gproc_description = Extended process registry for Erlang +pkg_gproc_homepage = https://github.com/uwiger/gproc +pkg_gproc_fetch = git +pkg_gproc_repo = https://github.com/uwiger/gproc +pkg_gproc_commit = master + +PACKAGES += grapherl +pkg_grapherl_name = grapherl +pkg_grapherl_description = Create graphs of Erlang systems and programs +pkg_grapherl_homepage = https://github.com/eproxus/grapherl +pkg_grapherl_fetch = git +pkg_grapherl_repo = https://github.com/eproxus/grapherl +pkg_grapherl_commit = master + +PACKAGES += gun +pkg_gun_name = gun +pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang. +pkg_gun_homepage = http//ninenines.eu +pkg_gun_fetch = git +pkg_gun_repo = https://github.com/ninenines/gun +pkg_gun_commit = master + +PACKAGES += gut +pkg_gut_name = gut +pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman +pkg_gut_homepage = https://github.com/unbalancedparentheses/gut +pkg_gut_fetch = git +pkg_gut_repo = https://github.com/unbalancedparentheses/gut +pkg_gut_commit = master + +PACKAGES += hackney +pkg_hackney_name = hackney +pkg_hackney_description = simple HTTP client in Erlang +pkg_hackney_homepage = https://github.com/benoitc/hackney +pkg_hackney_fetch = git +pkg_hackney_repo = https://github.com/benoitc/hackney +pkg_hackney_commit = master + +PACKAGES += hamcrest +pkg_hamcrest_name = hamcrest +pkg_hamcrest_description = Erlang port of Hamcrest +pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_fetch = git +pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_commit = master + +PACKAGES += hanoidb +pkg_hanoidb_name = hanoidb +pkg_hanoidb_description = Erlang LSM BTree Storage +pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_fetch = git +pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_commit = master + +PACKAGES += hottub +pkg_hottub_name = hottub +pkg_hottub_description = Permanent Erlang Worker Pool +pkg_hottub_homepage = https://github.com/bfrog/hottub +pkg_hottub_fetch = git +pkg_hottub_repo = https://github.com/bfrog/hottub +pkg_hottub_commit = master + +PACKAGES += hpack +pkg_hpack_name = hpack +pkg_hpack_description = HPACK Implementation for Erlang +pkg_hpack_homepage = https://github.com/joedevivo/hpack +pkg_hpack_fetch = git +pkg_hpack_repo = https://github.com/joedevivo/hpack +pkg_hpack_commit = master + +PACKAGES += hyper +pkg_hyper_name = hyper +pkg_hyper_description = Erlang implementation of HyperLogLog +pkg_hyper_homepage = https://github.com/GameAnalytics/hyper +pkg_hyper_fetch = git +pkg_hyper_repo = https://github.com/GameAnalytics/hyper +pkg_hyper_commit = master + +PACKAGES += ibrowse +pkg_ibrowse_name = ibrowse +pkg_ibrowse_description = Erlang HTTP client +pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_fetch = git +pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_commit = v4.1.1 + +PACKAGES += ierlang +pkg_ierlang_name = ierlang +pkg_ierlang_description = An Erlang language kernel for IPython. +pkg_ierlang_homepage = https://github.com/robbielynch/ierlang +pkg_ierlang_fetch = git +pkg_ierlang_repo = https://github.com/robbielynch/ierlang +pkg_ierlang_commit = master + +PACKAGES += iota +pkg_iota_name = iota +pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code +pkg_iota_homepage = https://github.com/jpgneves/iota +pkg_iota_fetch = git +pkg_iota_repo = https://github.com/jpgneves/iota +pkg_iota_commit = master + +PACKAGES += irc_lib +pkg_irc_lib_name = irc_lib +pkg_irc_lib_description = Erlang irc client library +pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_fetch = git +pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_commit = master + +PACKAGES += ircd +pkg_ircd_name = ircd +pkg_ircd_description = A pluggable IRC daemon application/library for Erlang. +pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd +pkg_ircd_fetch = git +pkg_ircd_repo = https://github.com/tonyg/erlang-ircd +pkg_ircd_commit = master + +PACKAGES += iris +pkg_iris_name = iris +pkg_iris_description = Iris Erlang binding +pkg_iris_homepage = https://github.com/project-iris/iris-erl +pkg_iris_fetch = git +pkg_iris_repo = https://github.com/project-iris/iris-erl +pkg_iris_commit = master + +PACKAGES += iso8601 +pkg_iso8601_name = iso8601 +pkg_iso8601_description = Erlang ISO 8601 date formatter/parser +pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_fetch = git +pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_commit = master + +PACKAGES += jamdb_sybase +pkg_jamdb_sybase_name = jamdb_sybase +pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE +pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_fetch = git +pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_commit = 0.6.0 + +PACKAGES += jerg +pkg_jerg_name = jerg +pkg_jerg_description = JSON Schema to Erlang Records Generator +pkg_jerg_homepage = https://github.com/ddossot/jerg +pkg_jerg_fetch = git +pkg_jerg_repo = https://github.com/ddossot/jerg +pkg_jerg_commit = master + +PACKAGES += jesse +pkg_jesse_name = jesse +pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang. +pkg_jesse_homepage = https://github.com/klarna/jesse +pkg_jesse_fetch = git +pkg_jesse_repo = https://github.com/klarna/jesse +pkg_jesse_commit = master + +PACKAGES += jiffy +pkg_jiffy_name = jiffy +pkg_jiffy_description = JSON NIFs for Erlang. +pkg_jiffy_homepage = https://github.com/davisp/jiffy +pkg_jiffy_fetch = git +pkg_jiffy_repo = https://github.com/davisp/jiffy +pkg_jiffy_commit = master + +PACKAGES += jiffy_v +pkg_jiffy_v_name = jiffy_v +pkg_jiffy_v_description = JSON validation utility +pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_fetch = git +pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_commit = 0.3.3 + +PACKAGES += jobs +pkg_jobs_name = jobs +pkg_jobs_description = a Job scheduler for load regulation +pkg_jobs_homepage = https://github.com/esl/jobs +pkg_jobs_fetch = git +pkg_jobs_repo = https://github.com/esl/jobs +pkg_jobs_commit = 0.3 + +PACKAGES += joxa +pkg_joxa_name = joxa +pkg_joxa_description = A Modern Lisp for the Erlang VM +pkg_joxa_homepage = https://github.com/joxa/joxa +pkg_joxa_fetch = git +pkg_joxa_repo = https://github.com/joxa/joxa +pkg_joxa_commit = master + +PACKAGES += json +pkg_json_name = json +pkg_json_description = a high level json library for erlang (17.0+) +pkg_json_homepage = https://github.com/talentdeficit/json +pkg_json_fetch = git +pkg_json_repo = https://github.com/talentdeficit/json +pkg_json_commit = master + +PACKAGES += json_rec +pkg_json_rec_name = json_rec +pkg_json_rec_description = JSON to erlang record +pkg_json_rec_homepage = https://github.com/justinkirby/json_rec +pkg_json_rec_fetch = git +pkg_json_rec_repo = https://github.com/justinkirby/json_rec +pkg_json_rec_commit = master + +PACKAGES += jsonerl +pkg_jsonerl_name = jsonerl +pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder +pkg_jsonerl_homepage = https://github.com/lambder/jsonerl +pkg_jsonerl_fetch = git +pkg_jsonerl_repo = https://github.com/lambder/jsonerl +pkg_jsonerl_commit = master + +PACKAGES += jsonpath +pkg_jsonpath_name = jsonpath +pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation +pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_fetch = git +pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_commit = master + +PACKAGES += jsonx +pkg_jsonx_name = jsonx +pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C. +pkg_jsonx_homepage = https://github.com/iskra/jsonx +pkg_jsonx_fetch = git +pkg_jsonx_repo = https://github.com/iskra/jsonx +pkg_jsonx_commit = master + +PACKAGES += jsx +pkg_jsx_name = jsx +pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON. +pkg_jsx_homepage = https://github.com/talentdeficit/jsx +pkg_jsx_fetch = git +pkg_jsx_repo = https://github.com/talentdeficit/jsx +pkg_jsx_commit = master + +PACKAGES += kafka +pkg_kafka_name = kafka +pkg_kafka_description = Kafka consumer and producer in Erlang +pkg_kafka_homepage = https://github.com/wooga/kafka-erlang +pkg_kafka_fetch = git +pkg_kafka_repo = https://github.com/wooga/kafka-erlang +pkg_kafka_commit = master + +PACKAGES += kai +pkg_kai_name = kai +pkg_kai_description = DHT storage by Takeshi Inoue +pkg_kai_homepage = https://github.com/synrc/kai +pkg_kai_fetch = git +pkg_kai_repo = https://github.com/synrc/kai +pkg_kai_commit = master + +PACKAGES += katja +pkg_katja_name = katja +pkg_katja_description = A simple Riemann client written in Erlang. +pkg_katja_homepage = https://github.com/nifoc/katja +pkg_katja_fetch = git +pkg_katja_repo = https://github.com/nifoc/katja +pkg_katja_commit = master + +PACKAGES += kdht +pkg_kdht_name = kdht +pkg_kdht_description = kdht is an erlang DHT implementation +pkg_kdht_homepage = https://github.com/kevinlynx/kdht +pkg_kdht_fetch = git +pkg_kdht_repo = https://github.com/kevinlynx/kdht +pkg_kdht_commit = master + +PACKAGES += key2value +pkg_key2value_name = key2value +pkg_key2value_description = Erlang 2-way map +pkg_key2value_homepage = https://github.com/okeuday/key2value +pkg_key2value_fetch = git +pkg_key2value_repo = https://github.com/okeuday/key2value +pkg_key2value_commit = master + +PACKAGES += keys1value +pkg_keys1value_name = keys1value +pkg_keys1value_description = Erlang set associative map for key lists +pkg_keys1value_homepage = https://github.com/okeuday/keys1value +pkg_keys1value_fetch = git +pkg_keys1value_repo = https://github.com/okeuday/keys1value +pkg_keys1value_commit = master + +PACKAGES += kinetic +pkg_kinetic_name = kinetic +pkg_kinetic_description = Erlang Kinesis Client +pkg_kinetic_homepage = https://github.com/AdRoll/kinetic +pkg_kinetic_fetch = git +pkg_kinetic_repo = https://github.com/AdRoll/kinetic +pkg_kinetic_commit = master + +PACKAGES += kjell +pkg_kjell_name = kjell +pkg_kjell_description = Erlang Shell +pkg_kjell_homepage = https://github.com/karlll/kjell +pkg_kjell_fetch = git +pkg_kjell_repo = https://github.com/karlll/kjell +pkg_kjell_commit = master + +PACKAGES += kraken +pkg_kraken_name = kraken +pkg_kraken_description = Distributed Pubsub Server for Realtime Apps +pkg_kraken_homepage = https://github.com/Asana/kraken +pkg_kraken_fetch = git +pkg_kraken_repo = https://github.com/Asana/kraken +pkg_kraken_commit = master + +PACKAGES += kucumberl +pkg_kucumberl_name = kucumberl +pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber +pkg_kucumberl_homepage = https://github.com/openshine/kucumberl +pkg_kucumberl_fetch = git +pkg_kucumberl_repo = https://github.com/openshine/kucumberl +pkg_kucumberl_commit = master + +PACKAGES += kvc +pkg_kvc_name = kvc +pkg_kvc_description = KVC - Key Value Coding for Erlang data structures +pkg_kvc_homepage = https://github.com/etrepum/kvc +pkg_kvc_fetch = git +pkg_kvc_repo = https://github.com/etrepum/kvc +pkg_kvc_commit = master + +PACKAGES += kvlists +pkg_kvlists_name = kvlists +pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang +pkg_kvlists_homepage = https://github.com/jcomellas/kvlists +pkg_kvlists_fetch = git +pkg_kvlists_repo = https://github.com/jcomellas/kvlists +pkg_kvlists_commit = master + +PACKAGES += kvs +pkg_kvs_name = kvs +pkg_kvs_description = Container and Iterator +pkg_kvs_homepage = https://github.com/synrc/kvs +pkg_kvs_fetch = git +pkg_kvs_repo = https://github.com/synrc/kvs +pkg_kvs_commit = master + +PACKAGES += lager +pkg_lager_name = lager +pkg_lager_description = A logging framework for Erlang/OTP. +pkg_lager_homepage = https://github.com/basho/lager +pkg_lager_fetch = git +pkg_lager_repo = https://github.com/basho/lager +pkg_lager_commit = master + +PACKAGES += lager_amqp_backend +pkg_lager_amqp_backend_name = lager_amqp_backend +pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend +pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_fetch = git +pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_commit = master + +PACKAGES += lager_syslog +pkg_lager_syslog_name = lager_syslog +pkg_lager_syslog_description = Syslog backend for lager +pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog +pkg_lager_syslog_fetch = git +pkg_lager_syslog_repo = https://github.com/basho/lager_syslog +pkg_lager_syslog_commit = master + +PACKAGES += lambdapad +pkg_lambdapad_name = lambdapad +pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang. +pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad +pkg_lambdapad_fetch = git +pkg_lambdapad_repo = https://github.com/gar1t/lambdapad +pkg_lambdapad_commit = master + +PACKAGES += lasp +pkg_lasp_name = lasp +pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations +pkg_lasp_homepage = http://lasp-lang.org/ +pkg_lasp_fetch = git +pkg_lasp_repo = https://github.com/lasp-lang/lasp +pkg_lasp_commit = master + +PACKAGES += lasse +pkg_lasse_name = lasse +pkg_lasse_description = SSE handler for Cowboy +pkg_lasse_homepage = https://github.com/inaka/lasse +pkg_lasse_fetch = git +pkg_lasse_repo = https://github.com/inaka/lasse +pkg_lasse_commit = 0.1.0 + +PACKAGES += ldap +pkg_ldap_name = ldap +pkg_ldap_description = LDAP server written in Erlang +pkg_ldap_homepage = https://github.com/spawnproc/ldap +pkg_ldap_fetch = git +pkg_ldap_repo = https://github.com/spawnproc/ldap +pkg_ldap_commit = master + +PACKAGES += lethink +pkg_lethink_name = lethink +pkg_lethink_description = erlang driver for rethinkdb +pkg_lethink_homepage = https://github.com/taybin/lethink +pkg_lethink_fetch = git +pkg_lethink_repo = https://github.com/taybin/lethink +pkg_lethink_commit = master + +PACKAGES += lfe +pkg_lfe_name = lfe +pkg_lfe_description = Lisp Flavoured Erlang (LFE) +pkg_lfe_homepage = https://github.com/rvirding/lfe +pkg_lfe_fetch = git +pkg_lfe_repo = https://github.com/rvirding/lfe +pkg_lfe_commit = master + +PACKAGES += ling +pkg_ling_name = ling +pkg_ling_description = Erlang on Xen +pkg_ling_homepage = https://github.com/cloudozer/ling +pkg_ling_fetch = git +pkg_ling_repo = https://github.com/cloudozer/ling +pkg_ling_commit = master + +PACKAGES += live +pkg_live_name = live +pkg_live_description = Automated module and configuration reloader. +pkg_live_homepage = http://ninenines.eu +pkg_live_fetch = git +pkg_live_repo = https://github.com/ninenines/live +pkg_live_commit = master + +PACKAGES += lmq +pkg_lmq_name = lmq +pkg_lmq_description = Lightweight Message Queue +pkg_lmq_homepage = https://github.com/iij/lmq +pkg_lmq_fetch = git +pkg_lmq_repo = https://github.com/iij/lmq +pkg_lmq_commit = master + +PACKAGES += locker +pkg_locker_name = locker +pkg_locker_description = Atomic distributed 'check and set' for short-lived keys +pkg_locker_homepage = https://github.com/wooga/locker +pkg_locker_fetch = git +pkg_locker_repo = https://github.com/wooga/locker +pkg_locker_commit = master + +PACKAGES += locks +pkg_locks_name = locks +pkg_locks_description = A scalable, deadlock-resolving resource locker +pkg_locks_homepage = https://github.com/uwiger/locks +pkg_locks_fetch = git +pkg_locks_repo = https://github.com/uwiger/locks +pkg_locks_commit = master + +PACKAGES += log4erl +pkg_log4erl_name = log4erl +pkg_log4erl_description = A logger for erlang in the spirit of Log4J. +pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl +pkg_log4erl_fetch = git +pkg_log4erl_repo = https://github.com/ahmednawras/log4erl +pkg_log4erl_commit = master + +PACKAGES += lol +pkg_lol_name = lol +pkg_lol_description = Lisp on erLang, and programming is fun again +pkg_lol_homepage = https://github.com/b0oh/lol +pkg_lol_fetch = git +pkg_lol_repo = https://github.com/b0oh/lol +pkg_lol_commit = master + +PACKAGES += lucid +pkg_lucid_name = lucid +pkg_lucid_description = HTTP/2 server written in Erlang +pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid +pkg_lucid_fetch = git +pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid +pkg_lucid_commit = master + +PACKAGES += luerl +pkg_luerl_name = luerl +pkg_luerl_description = Lua in Erlang +pkg_luerl_homepage = https://github.com/rvirding/luerl +pkg_luerl_fetch = git +pkg_luerl_repo = https://github.com/rvirding/luerl +pkg_luerl_commit = develop + +PACKAGES += luwak +pkg_luwak_name = luwak +pkg_luwak_description = Large-object storage interface for Riak +pkg_luwak_homepage = https://github.com/basho/luwak +pkg_luwak_fetch = git +pkg_luwak_repo = https://github.com/basho/luwak +pkg_luwak_commit = master + +PACKAGES += lux +pkg_lux_name = lux +pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands +pkg_lux_homepage = https://github.com/hawk/lux +pkg_lux_fetch = git +pkg_lux_repo = https://github.com/hawk/lux +pkg_lux_commit = master + +PACKAGES += machi +pkg_machi_name = machi +pkg_machi_description = Machi file store +pkg_machi_homepage = https://github.com/basho/machi +pkg_machi_fetch = git +pkg_machi_repo = https://github.com/basho/machi +pkg_machi_commit = master + +PACKAGES += mad +pkg_mad_name = mad +pkg_mad_description = Small and Fast Rebar Replacement +pkg_mad_homepage = https://github.com/synrc/mad +pkg_mad_fetch = git +pkg_mad_repo = https://github.com/synrc/mad +pkg_mad_commit = master + +PACKAGES += marina +pkg_marina_name = marina +pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client +pkg_marina_homepage = https://github.com/lpgauth/marina +pkg_marina_fetch = git +pkg_marina_repo = https://github.com/lpgauth/marina +pkg_marina_commit = master + +PACKAGES += mavg +pkg_mavg_name = mavg +pkg_mavg_description = Erlang :: Exponential moving average library +pkg_mavg_homepage = https://github.com/EchoTeam/mavg +pkg_mavg_fetch = git +pkg_mavg_repo = https://github.com/EchoTeam/mavg +pkg_mavg_commit = master + +PACKAGES += mc_erl +pkg_mc_erl_name = mc_erl +pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. +pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl +pkg_mc_erl_fetch = git +pkg_mc_erl_repo = https://github.com/clonejo/mc-erl +pkg_mc_erl_commit = master + +PACKAGES += mcd +pkg_mcd_name = mcd +pkg_mcd_description = Fast memcached protocol client in pure Erlang +pkg_mcd_homepage = https://github.com/EchoTeam/mcd +pkg_mcd_fetch = git +pkg_mcd_repo = https://github.com/EchoTeam/mcd +pkg_mcd_commit = master + +PACKAGES += mcerlang +pkg_mcerlang_name = mcerlang +pkg_mcerlang_description = The McErlang model checker for Erlang +pkg_mcerlang_homepage = https://github.com/fredlund/McErlang +pkg_mcerlang_fetch = git +pkg_mcerlang_repo = https://github.com/fredlund/McErlang +pkg_mcerlang_commit = master + +PACKAGES += meck +pkg_meck_name = meck +pkg_meck_description = A mocking library for Erlang +pkg_meck_homepage = https://github.com/eproxus/meck +pkg_meck_fetch = git +pkg_meck_repo = https://github.com/eproxus/meck +pkg_meck_commit = master + +PACKAGES += mekao +pkg_mekao_name = mekao +pkg_mekao_description = SQL constructor +pkg_mekao_homepage = https://github.com/ddosia/mekao +pkg_mekao_fetch = git +pkg_mekao_repo = https://github.com/ddosia/mekao +pkg_mekao_commit = master + +PACKAGES += memo +pkg_memo_name = memo +pkg_memo_description = Erlang memoization server +pkg_memo_homepage = https://github.com/tuncer/memo +pkg_memo_fetch = git +pkg_memo_repo = https://github.com/tuncer/memo +pkg_memo_commit = master + +PACKAGES += merge_index +pkg_merge_index_name = merge_index +pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop). +pkg_merge_index_homepage = https://github.com/basho/merge_index +pkg_merge_index_fetch = git +pkg_merge_index_repo = https://github.com/basho/merge_index +pkg_merge_index_commit = master + +PACKAGES += merl +pkg_merl_name = merl +pkg_merl_description = Metaprogramming in Erlang +pkg_merl_homepage = https://github.com/richcarl/merl +pkg_merl_fetch = git +pkg_merl_repo = https://github.com/richcarl/merl +pkg_merl_commit = master + +PACKAGES += mimetypes +pkg_mimetypes_name = mimetypes +pkg_mimetypes_description = Erlang MIME types library +pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes +pkg_mimetypes_fetch = git +pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes +pkg_mimetypes_commit = master + +PACKAGES += mixer +pkg_mixer_name = mixer +pkg_mixer_description = Mix in functions from other modules +pkg_mixer_homepage = https://github.com/chef/mixer +pkg_mixer_fetch = git +pkg_mixer_repo = https://github.com/chef/mixer +pkg_mixer_commit = master + +PACKAGES += mochiweb +pkg_mochiweb_name = mochiweb +pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers. +pkg_mochiweb_homepage = https://github.com/mochi/mochiweb +pkg_mochiweb_fetch = git +pkg_mochiweb_repo = https://github.com/mochi/mochiweb +pkg_mochiweb_commit = master + +PACKAGES += mochiweb_xpath +pkg_mochiweb_xpath_name = mochiweb_xpath +pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser +pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_fetch = git +pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_commit = master + +PACKAGES += mockgyver +pkg_mockgyver_name = mockgyver +pkg_mockgyver_description = A mocking library for Erlang +pkg_mockgyver_homepage = https://github.com/klajo/mockgyver +pkg_mockgyver_fetch = git +pkg_mockgyver_repo = https://github.com/klajo/mockgyver +pkg_mockgyver_commit = master + +PACKAGES += modlib +pkg_modlib_name = modlib +pkg_modlib_description = Web framework based on Erlang's inets httpd +pkg_modlib_homepage = https://github.com/gar1t/modlib +pkg_modlib_fetch = git +pkg_modlib_repo = https://github.com/gar1t/modlib +pkg_modlib_commit = master + +PACKAGES += mongodb +pkg_mongodb_name = mongodb +pkg_mongodb_description = MongoDB driver for Erlang +pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_fetch = git +pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_commit = master + +PACKAGES += mongooseim +pkg_mongooseim_name = mongooseim +pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions +pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform +pkg_mongooseim_fetch = git +pkg_mongooseim_repo = https://github.com/esl/MongooseIM +pkg_mongooseim_commit = master + +PACKAGES += moyo +pkg_moyo_name = moyo +pkg_moyo_description = Erlang utility functions library +pkg_moyo_homepage = https://github.com/dwango/moyo +pkg_moyo_fetch = git +pkg_moyo_repo = https://github.com/dwango/moyo +pkg_moyo_commit = master + +PACKAGES += msgpack +pkg_msgpack_name = msgpack +pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang +pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_fetch = git +pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_commit = master + +PACKAGES += mu2 +pkg_mu2_name = mu2 +pkg_mu2_description = Erlang mutation testing tool +pkg_mu2_homepage = https://github.com/ramsay-t/mu2 +pkg_mu2_fetch = git +pkg_mu2_repo = https://github.com/ramsay-t/mu2 +pkg_mu2_commit = master + +PACKAGES += mustache +pkg_mustache_name = mustache +pkg_mustache_description = Mustache template engine for Erlang. +pkg_mustache_homepage = https://github.com/mojombo/mustache.erl +pkg_mustache_fetch = git +pkg_mustache_repo = https://github.com/mojombo/mustache.erl +pkg_mustache_commit = master + +PACKAGES += myproto +pkg_myproto_name = myproto +pkg_myproto_description = MySQL Server Protocol in Erlang +pkg_myproto_homepage = https://github.com/altenwald/myproto +pkg_myproto_fetch = git +pkg_myproto_repo = https://github.com/altenwald/myproto +pkg_myproto_commit = master + +PACKAGES += mysql +pkg_mysql_name = mysql +pkg_mysql_description = Erlang MySQL Driver (from code.google.com) +pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_fetch = git +pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_commit = master + +PACKAGES += n2o +pkg_n2o_name = n2o +pkg_n2o_description = WebSocket Application Server +pkg_n2o_homepage = https://github.com/5HT/n2o +pkg_n2o_fetch = git +pkg_n2o_repo = https://github.com/5HT/n2o +pkg_n2o_commit = master + +PACKAGES += nat_upnp +pkg_nat_upnp_name = nat_upnp +pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD +pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_fetch = git +pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_commit = master + +PACKAGES += neo4j +pkg_neo4j_name = neo4j +pkg_neo4j_description = Erlang client library for Neo4J. +pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_fetch = git +pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_commit = master + +PACKAGES += neotoma +pkg_neotoma_name = neotoma +pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars. +pkg_neotoma_homepage = https://github.com/seancribbs/neotoma +pkg_neotoma_fetch = git +pkg_neotoma_repo = https://github.com/seancribbs/neotoma +pkg_neotoma_commit = master + +PACKAGES += newrelic +pkg_newrelic_name = newrelic +pkg_newrelic_description = Erlang library for sending metrics to New Relic +pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang +pkg_newrelic_fetch = git +pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang +pkg_newrelic_commit = master + +PACKAGES += nifty +pkg_nifty_name = nifty +pkg_nifty_description = Erlang NIF wrapper generator +pkg_nifty_homepage = https://github.com/parapluu/nifty +pkg_nifty_fetch = git +pkg_nifty_repo = https://github.com/parapluu/nifty +pkg_nifty_commit = master + +PACKAGES += nitrogen_core +pkg_nitrogen_core_name = nitrogen_core +pkg_nitrogen_core_description = The core Nitrogen library. +pkg_nitrogen_core_homepage = http://nitrogenproject.com/ +pkg_nitrogen_core_fetch = git +pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core +pkg_nitrogen_core_commit = master + +PACKAGES += nkbase +pkg_nkbase_name = nkbase +pkg_nkbase_description = NkBASE distributed database +pkg_nkbase_homepage = https://github.com/Nekso/nkbase +pkg_nkbase_fetch = git +pkg_nkbase_repo = https://github.com/Nekso/nkbase +pkg_nkbase_commit = develop + +PACKAGES += nkdocker +pkg_nkdocker_name = nkdocker +pkg_nkdocker_description = Erlang Docker client +pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker +pkg_nkdocker_fetch = git +pkg_nkdocker_repo = https://github.com/Nekso/nkdocker +pkg_nkdocker_commit = master + +PACKAGES += nkpacket +pkg_nkpacket_name = nkpacket +pkg_nkpacket_description = Generic Erlang transport layer +pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket +pkg_nkpacket_fetch = git +pkg_nkpacket_repo = https://github.com/Nekso/nkpacket +pkg_nkpacket_commit = master + +PACKAGES += nksip +pkg_nksip_name = nksip +pkg_nksip_description = Erlang SIP application server +pkg_nksip_homepage = https://github.com/kalta/nksip +pkg_nksip_fetch = git +pkg_nksip_repo = https://github.com/kalta/nksip +pkg_nksip_commit = master + +PACKAGES += nodefinder +pkg_nodefinder_name = nodefinder +pkg_nodefinder_description = automatic node discovery via UDP multicast +pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder +pkg_nodefinder_fetch = git +pkg_nodefinder_repo = https://github.com/okeuday/nodefinder +pkg_nodefinder_commit = master + +PACKAGES += nprocreg +pkg_nprocreg_name = nprocreg +pkg_nprocreg_description = Minimal Distributed Erlang Process Registry +pkg_nprocreg_homepage = http://nitrogenproject.com/ +pkg_nprocreg_fetch = git +pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg +pkg_nprocreg_commit = master + +PACKAGES += oauth +pkg_oauth_name = oauth +pkg_oauth_description = An Erlang OAuth 1.0 implementation +pkg_oauth_homepage = https://github.com/tim/erlang-oauth +pkg_oauth_fetch = git +pkg_oauth_repo = https://github.com/tim/erlang-oauth +pkg_oauth_commit = master + +PACKAGES += oauth2 +pkg_oauth2_name = oauth2 +pkg_oauth2_description = Erlang Oauth2 implementation +pkg_oauth2_homepage = https://github.com/kivra/oauth2 +pkg_oauth2_fetch = git +pkg_oauth2_repo = https://github.com/kivra/oauth2 +pkg_oauth2_commit = master + +PACKAGES += oauth2c +pkg_oauth2c_name = oauth2c +pkg_oauth2c_description = Erlang OAuth2 Client +pkg_oauth2c_homepage = https://github.com/kivra/oauth2_client +pkg_oauth2c_fetch = git +pkg_oauth2c_repo = https://github.com/kivra/oauth2_client +pkg_oauth2c_commit = master + +PACKAGES += octopus +pkg_octopus_name = octopus +pkg_octopus_description = Small and flexible pool manager written in Erlang +pkg_octopus_homepage = https://github.com/erlangbureau/octopus +pkg_octopus_fetch = git +pkg_octopus_repo = https://github.com/erlangbureau/octopus +pkg_octopus_commit = 1.0.0 + +PACKAGES += of_protocol +pkg_of_protocol_name = of_protocol +pkg_of_protocol_description = OpenFlow Protocol Library for Erlang +pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_fetch = git +pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_commit = master + +PACKAGES += opencouch +pkg_opencouch_name = couch +pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB +pkg_opencouch_homepage = https://github.com/benoitc/opencouch +pkg_opencouch_fetch = git +pkg_opencouch_repo = https://github.com/benoitc/opencouch +pkg_opencouch_commit = master + +PACKAGES += openflow +pkg_openflow_name = openflow +pkg_openflow_description = An OpenFlow controller written in pure erlang +pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_fetch = git +pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_commit = master + +PACKAGES += openid +pkg_openid_name = openid +pkg_openid_description = Erlang OpenID +pkg_openid_homepage = https://github.com/brendonh/erl_openid +pkg_openid_fetch = git +pkg_openid_repo = https://github.com/brendonh/erl_openid +pkg_openid_commit = master + +PACKAGES += openpoker +pkg_openpoker_name = openpoker +pkg_openpoker_description = Genesis Texas hold'em Game Server +pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker +pkg_openpoker_fetch = git +pkg_openpoker_repo = https://github.com/hpyhacking/openpoker +pkg_openpoker_commit = master + +PACKAGES += pal +pkg_pal_name = pal +pkg_pal_description = Pragmatic Authentication Library +pkg_pal_homepage = https://github.com/manifest/pal +pkg_pal_fetch = git +pkg_pal_repo = https://github.com/manifest/pal +pkg_pal_commit = master + +PACKAGES += parse_trans +pkg_parse_trans_name = parse_trans +pkg_parse_trans_description = Parse transform utilities for Erlang +pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans +pkg_parse_trans_fetch = git +pkg_parse_trans_repo = https://github.com/uwiger/parse_trans +pkg_parse_trans_commit = master + +PACKAGES += parsexml +pkg_parsexml_name = parsexml +pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API +pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml +pkg_parsexml_fetch = git +pkg_parsexml_repo = https://github.com/maxlapshin/parsexml +pkg_parsexml_commit = master + +PACKAGES += pegjs +pkg_pegjs_name = pegjs +pkg_pegjs_description = An implementation of PEG.js grammar for Erlang. +pkg_pegjs_homepage = https://github.com/dmitriid/pegjs +pkg_pegjs_fetch = git +pkg_pegjs_repo = https://github.com/dmitriid/pegjs +pkg_pegjs_commit = 0.3 + +PACKAGES += percept2 +pkg_percept2_name = percept2 +pkg_percept2_description = Concurrent profiling tool for Erlang +pkg_percept2_homepage = https://github.com/huiqing/percept2 +pkg_percept2_fetch = git +pkg_percept2_repo = https://github.com/huiqing/percept2 +pkg_percept2_commit = master + +PACKAGES += pgsql +pkg_pgsql_name = pgsql +pkg_pgsql_description = Erlang PostgreSQL driver +pkg_pgsql_homepage = https://github.com/semiocast/pgsql +pkg_pgsql_fetch = git +pkg_pgsql_repo = https://github.com/semiocast/pgsql +pkg_pgsql_commit = master + +PACKAGES += pkgx +pkg_pkgx_name = pkgx +pkg_pkgx_description = Build .deb packages from Erlang releases +pkg_pkgx_homepage = https://github.com/arjan/pkgx +pkg_pkgx_fetch = git +pkg_pkgx_repo = https://github.com/arjan/pkgx +pkg_pkgx_commit = master + +PACKAGES += pkt +pkg_pkt_name = pkt +pkg_pkt_description = Erlang network protocol library +pkg_pkt_homepage = https://github.com/msantos/pkt +pkg_pkt_fetch = git +pkg_pkt_repo = https://github.com/msantos/pkt +pkg_pkt_commit = master + +PACKAGES += plain_fsm +pkg_plain_fsm_name = plain_fsm +pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs. +pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_fetch = git +pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_commit = master + +PACKAGES += plumtree +pkg_plumtree_name = plumtree +pkg_plumtree_description = Epidemic Broadcast Trees +pkg_plumtree_homepage = https://github.com/helium/plumtree +pkg_plumtree_fetch = git +pkg_plumtree_repo = https://github.com/helium/plumtree +pkg_plumtree_commit = master + +PACKAGES += pmod_transform +pkg_pmod_transform_name = pmod_transform +pkg_pmod_transform_description = Parse transform for parameterized modules +pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform +pkg_pmod_transform_fetch = git +pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform +pkg_pmod_transform_commit = master + +PACKAGES += pobox +pkg_pobox_name = pobox +pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang +pkg_pobox_homepage = https://github.com/ferd/pobox +pkg_pobox_fetch = git +pkg_pobox_repo = https://github.com/ferd/pobox +pkg_pobox_commit = master + +PACKAGES += ponos +pkg_ponos_name = ponos +pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang +pkg_ponos_homepage = https://github.com/klarna/ponos +pkg_ponos_fetch = git +pkg_ponos_repo = https://github.com/klarna/ponos +pkg_ponos_commit = master + +PACKAGES += poolboy +pkg_poolboy_name = poolboy +pkg_poolboy_description = A hunky Erlang worker pool factory +pkg_poolboy_homepage = https://github.com/devinus/poolboy +pkg_poolboy_fetch = git +pkg_poolboy_repo = https://github.com/devinus/poolboy +pkg_poolboy_commit = master + +PACKAGES += pooler +pkg_pooler_name = pooler +pkg_pooler_description = An OTP Process Pool Application +pkg_pooler_homepage = https://github.com/seth/pooler +pkg_pooler_fetch = git +pkg_pooler_repo = https://github.com/seth/pooler +pkg_pooler_commit = master + +PACKAGES += pqueue +pkg_pqueue_name = pqueue +pkg_pqueue_description = Erlang Priority Queues +pkg_pqueue_homepage = https://github.com/okeuday/pqueue +pkg_pqueue_fetch = git +pkg_pqueue_repo = https://github.com/okeuday/pqueue +pkg_pqueue_commit = master + +PACKAGES += procket +pkg_procket_name = procket +pkg_procket_description = Erlang interface to low level socket operations +pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket +pkg_procket_fetch = git +pkg_procket_repo = https://github.com/msantos/procket +pkg_procket_commit = master + +PACKAGES += prop +pkg_prop_name = prop +pkg_prop_description = An Erlang code scaffolding and generator system. +pkg_prop_homepage = https://github.com/nuex/prop +pkg_prop_fetch = git +pkg_prop_repo = https://github.com/nuex/prop +pkg_prop_commit = master + +PACKAGES += proper +pkg_proper_name = proper +pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang. +pkg_proper_homepage = http://proper.softlab.ntua.gr +pkg_proper_fetch = git +pkg_proper_repo = https://github.com/manopapad/proper +pkg_proper_commit = master + +PACKAGES += props +pkg_props_name = props +pkg_props_description = Property structure library +pkg_props_homepage = https://github.com/greyarea/props +pkg_props_fetch = git +pkg_props_repo = https://github.com/greyarea/props +pkg_props_commit = master + +PACKAGES += protobuffs +pkg_protobuffs_name = protobuffs +pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs. +pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_fetch = git +pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_commit = master + +PACKAGES += psycho +pkg_psycho_name = psycho +pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware. +pkg_psycho_homepage = https://github.com/gar1t/psycho +pkg_psycho_fetch = git +pkg_psycho_repo = https://github.com/gar1t/psycho +pkg_psycho_commit = master + +PACKAGES += purity +pkg_purity_name = purity +pkg_purity_description = A side-effect analyzer for Erlang +pkg_purity_homepage = https://github.com/mpitid/purity +pkg_purity_fetch = git +pkg_purity_repo = https://github.com/mpitid/purity +pkg_purity_commit = master + +PACKAGES += push_service +pkg_push_service_name = push_service +pkg_push_service_description = Push service +pkg_push_service_homepage = https://github.com/hairyhum/push_service +pkg_push_service_fetch = git +pkg_push_service_repo = https://github.com/hairyhum/push_service +pkg_push_service_commit = master + +PACKAGES += qdate +pkg_qdate_name = qdate +pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang. +pkg_qdate_homepage = https://github.com/choptastic/qdate +pkg_qdate_fetch = git +pkg_qdate_repo = https://github.com/choptastic/qdate +pkg_qdate_commit = 0.4.0 + +PACKAGES += qrcode +pkg_qrcode_name = qrcode +pkg_qrcode_description = QR Code encoder in Erlang +pkg_qrcode_homepage = https://github.com/komone/qrcode +pkg_qrcode_fetch = git +pkg_qrcode_repo = https://github.com/komone/qrcode +pkg_qrcode_commit = master + +PACKAGES += quest +pkg_quest_name = quest +pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang. +pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest +pkg_quest_fetch = git +pkg_quest_repo = https://github.com/eriksoe/ErlangQuest +pkg_quest_commit = master + +PACKAGES += quickrand +pkg_quickrand_name = quickrand +pkg_quickrand_description = Quick Erlang Random Number Generation +pkg_quickrand_homepage = https://github.com/okeuday/quickrand +pkg_quickrand_fetch = git +pkg_quickrand_repo = https://github.com/okeuday/quickrand +pkg_quickrand_commit = master + +PACKAGES += rabbit +pkg_rabbit_name = rabbit +pkg_rabbit_description = RabbitMQ Server +pkg_rabbit_homepage = https://www.rabbitmq.com/ +pkg_rabbit_fetch = git +pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git +pkg_rabbit_commit = master + +PACKAGES += rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak +pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_fetch = git +pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_commit = master + +PACKAGES += rack +pkg_rack_name = rack +pkg_rack_description = Rack handler for erlang +pkg_rack_homepage = https://github.com/erlyvideo/rack +pkg_rack_fetch = git +pkg_rack_repo = https://github.com/erlyvideo/rack +pkg_rack_commit = master + +PACKAGES += radierl +pkg_radierl_name = radierl +pkg_radierl_description = RADIUS protocol stack implemented in Erlang. +pkg_radierl_homepage = https://github.com/vances/radierl +pkg_radierl_fetch = git +pkg_radierl_repo = https://github.com/vances/radierl +pkg_radierl_commit = master + +PACKAGES += rafter +pkg_rafter_name = rafter +pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol +pkg_rafter_homepage = https://github.com/andrewjstone/rafter +pkg_rafter_fetch = git +pkg_rafter_repo = https://github.com/andrewjstone/rafter +pkg_rafter_commit = master + +PACKAGES += ranch +pkg_ranch_name = ranch +pkg_ranch_description = Socket acceptor pool for TCP protocols. +pkg_ranch_homepage = http://ninenines.eu +pkg_ranch_fetch = git +pkg_ranch_repo = https://github.com/ninenines/ranch +pkg_ranch_commit = 1.1.0 + +PACKAGES += rbeacon +pkg_rbeacon_name = rbeacon +pkg_rbeacon_description = LAN discovery and presence in Erlang. +pkg_rbeacon_homepage = https://github.com/refuge/rbeacon +pkg_rbeacon_fetch = git +pkg_rbeacon_repo = https://github.com/refuge/rbeacon +pkg_rbeacon_commit = master + +PACKAGES += rebar +pkg_rebar_name = rebar +pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases. +pkg_rebar_homepage = http://www.rebar3.org +pkg_rebar_fetch = git +pkg_rebar_repo = https://github.com/rebar/rebar3 +pkg_rebar_commit = master + +PACKAGES += rebus +pkg_rebus_name = rebus +pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang. +pkg_rebus_homepage = https://github.com/olle/rebus +pkg_rebus_fetch = git +pkg_rebus_repo = https://github.com/olle/rebus +pkg_rebus_commit = master + +PACKAGES += rec2json +pkg_rec2json_name = rec2json +pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily. +pkg_rec2json_homepage = https://github.com/lordnull/rec2json +pkg_rec2json_fetch = git +pkg_rec2json_repo = https://github.com/lordnull/rec2json +pkg_rec2json_commit = master + +PACKAGES += recon +pkg_recon_name = recon +pkg_recon_description = Collection of functions and scripts to debug Erlang in production. +pkg_recon_homepage = https://github.com/ferd/recon +pkg_recon_fetch = git +pkg_recon_repo = https://github.com/ferd/recon +pkg_recon_commit = 2.2.1 + +PACKAGES += record_info +pkg_record_info_name = record_info +pkg_record_info_description = Convert between record and proplist +pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info +pkg_record_info_fetch = git +pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info +pkg_record_info_commit = master + +PACKAGES += redgrid +pkg_redgrid_name = redgrid +pkg_redgrid_description = automatic Erlang node discovery via redis +pkg_redgrid_homepage = https://github.com/jkvor/redgrid +pkg_redgrid_fetch = git +pkg_redgrid_repo = https://github.com/jkvor/redgrid +pkg_redgrid_commit = master + +PACKAGES += redo +pkg_redo_name = redo +pkg_redo_description = pipelined erlang redis client +pkg_redo_homepage = https://github.com/jkvor/redo +pkg_redo_fetch = git +pkg_redo_repo = https://github.com/jkvor/redo +pkg_redo_commit = master + +PACKAGES += reload_mk +pkg_reload_mk_name = reload_mk +pkg_reload_mk_description = Live reload plugin for erlang.mk. +pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk +pkg_reload_mk_fetch = git +pkg_reload_mk_repo = https://github.com/bullno1/reload.mk +pkg_reload_mk_commit = master + +PACKAGES += reltool_util +pkg_reltool_util_name = reltool_util +pkg_reltool_util_description = Erlang reltool utility functionality application +pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util +pkg_reltool_util_fetch = git +pkg_reltool_util_repo = https://github.com/okeuday/reltool_util +pkg_reltool_util_commit = master + +PACKAGES += relx +pkg_relx_name = relx +pkg_relx_description = Sane, simple release creation for Erlang +pkg_relx_homepage = https://github.com/erlware/relx +pkg_relx_fetch = git +pkg_relx_repo = https://github.com/erlware/relx +pkg_relx_commit = master + +PACKAGES += resource_discovery +pkg_resource_discovery_name = resource_discovery +pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster. +pkg_resource_discovery_homepage = http://erlware.org/ +pkg_resource_discovery_fetch = git +pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery +pkg_resource_discovery_commit = master + +PACKAGES += restc +pkg_restc_name = restc +pkg_restc_description = Erlang Rest Client +pkg_restc_homepage = https://github.com/kivra/restclient +pkg_restc_fetch = git +pkg_restc_repo = https://github.com/kivra/restclient +pkg_restc_commit = master + +PACKAGES += rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation. +pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_fetch = git +pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_commit = master + +PACKAGES += riak_control +pkg_riak_control_name = riak_control +pkg_riak_control_description = Webmachine-based administration interface for Riak. +pkg_riak_control_homepage = https://github.com/basho/riak_control +pkg_riak_control_fetch = git +pkg_riak_control_repo = https://github.com/basho/riak_control +pkg_riak_control_commit = master + +PACKAGES += riak_core +pkg_riak_core_name = riak_core +pkg_riak_core_description = Distributed systems infrastructure used by Riak. +pkg_riak_core_homepage = https://github.com/basho/riak_core +pkg_riak_core_fetch = git +pkg_riak_core_repo = https://github.com/basho/riak_core +pkg_riak_core_commit = master + +PACKAGES += riak_dt +pkg_riak_dt_name = riak_dt +pkg_riak_dt_description = Convergent replicated datatypes in Erlang +pkg_riak_dt_homepage = https://github.com/basho/riak_dt +pkg_riak_dt_fetch = git +pkg_riak_dt_repo = https://github.com/basho/riak_dt +pkg_riak_dt_commit = master + +PACKAGES += riak_ensemble +pkg_riak_ensemble_name = riak_ensemble +pkg_riak_ensemble_description = Multi-Paxos framework in Erlang +pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_fetch = git +pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_commit = master + +PACKAGES += riak_kv +pkg_riak_kv_name = riak_kv +pkg_riak_kv_description = Riak Key/Value Store +pkg_riak_kv_homepage = https://github.com/basho/riak_kv +pkg_riak_kv_fetch = git +pkg_riak_kv_repo = https://github.com/basho/riak_kv +pkg_riak_kv_commit = master + +PACKAGES += riak_pg +pkg_riak_pg_name = riak_pg +pkg_riak_pg_description = Distributed process groups with riak_core. +pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_fetch = git +pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_commit = master + +PACKAGES += riak_pipe +pkg_riak_pipe_name = riak_pipe +pkg_riak_pipe_description = Riak Pipelines +pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe +pkg_riak_pipe_fetch = git +pkg_riak_pipe_repo = https://github.com/basho/riak_pipe +pkg_riak_pipe_commit = master + +PACKAGES += riak_sysmon +pkg_riak_sysmon_name = riak_sysmon +pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages +pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_fetch = git +pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_commit = master + +PACKAGES += riak_test +pkg_riak_test_name = riak_test +pkg_riak_test_description = I'm in your cluster, testing your riaks +pkg_riak_test_homepage = https://github.com/basho/riak_test +pkg_riak_test_fetch = git +pkg_riak_test_repo = https://github.com/basho/riak_test +pkg_riak_test_commit = master + +PACKAGES += riakc +pkg_riakc_name = riakc +pkg_riakc_description = Erlang clients for Riak. +pkg_riakc_homepage = https://github.com/basho/riak-erlang-client +pkg_riakc_fetch = git +pkg_riakc_repo = https://github.com/basho/riak-erlang-client +pkg_riakc_commit = master + +PACKAGES += riakhttpc +pkg_riakhttpc_name = riakhttpc +pkg_riakhttpc_description = Riak Erlang client using the HTTP interface +pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_fetch = git +pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_commit = master + +PACKAGES += riaknostic +pkg_riaknostic_name = riaknostic +pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap +pkg_riaknostic_homepage = https://github.com/basho/riaknostic +pkg_riaknostic_fetch = git +pkg_riaknostic_repo = https://github.com/basho/riaknostic +pkg_riaknostic_commit = master + +PACKAGES += riakpool +pkg_riakpool_name = riakpool +pkg_riakpool_description = erlang riak client pool +pkg_riakpool_homepage = https://github.com/dweldon/riakpool +pkg_riakpool_fetch = git +pkg_riakpool_repo = https://github.com/dweldon/riakpool +pkg_riakpool_commit = master + +PACKAGES += rivus_cep +pkg_rivus_cep_name = rivus_cep +pkg_rivus_cep_description = Complex event processing in Erlang +pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_fetch = git +pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_commit = master + +PACKAGES += rlimit +pkg_rlimit_name = rlimit +pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent +pkg_rlimit_homepage = https://github.com/jlouis/rlimit +pkg_rlimit_fetch = git +pkg_rlimit_repo = https://github.com/jlouis/rlimit +pkg_rlimit_commit = master + +PACKAGES += safetyvalve +pkg_safetyvalve_name = safetyvalve +pkg_safetyvalve_description = A safety valve for your erlang node +pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_fetch = git +pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_commit = master + +PACKAGES += seestar +pkg_seestar_name = seestar +pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol +pkg_seestar_homepage = https://github.com/iamaleksey/seestar +pkg_seestar_fetch = git +pkg_seestar_repo = https://github.com/iamaleksey/seestar +pkg_seestar_commit = master + +PACKAGES += service +pkg_service_name = service +pkg_service_description = A minimal Erlang behavior for creating CloudI internal services +pkg_service_homepage = http://cloudi.org/ +pkg_service_fetch = git +pkg_service_repo = https://github.com/CloudI/service +pkg_service_commit = master + +PACKAGES += setup +pkg_setup_name = setup +pkg_setup_description = Generic setup utility for Erlang-based systems +pkg_setup_homepage = https://github.com/uwiger/setup +pkg_setup_fetch = git +pkg_setup_repo = https://github.com/uwiger/setup +pkg_setup_commit = master + +PACKAGES += sext +pkg_sext_name = sext +pkg_sext_description = Sortable Erlang Term Serialization +pkg_sext_homepage = https://github.com/uwiger/sext +pkg_sext_fetch = git +pkg_sext_repo = https://github.com/uwiger/sext +pkg_sext_commit = master + +PACKAGES += sfmt +pkg_sfmt_name = sfmt +pkg_sfmt_description = SFMT pseudo random number generator for Erlang. +pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_fetch = git +pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_commit = master + +PACKAGES += sgte +pkg_sgte_name = sgte +pkg_sgte_description = A simple Erlang Template Engine +pkg_sgte_homepage = https://github.com/filippo/sgte +pkg_sgte_fetch = git +pkg_sgte_repo = https://github.com/filippo/sgte +pkg_sgte_commit = master + +PACKAGES += sheriff +pkg_sheriff_name = sheriff +pkg_sheriff_description = Parse transform for type based validation. +pkg_sheriff_homepage = http://ninenines.eu +pkg_sheriff_fetch = git +pkg_sheriff_repo = https://github.com/extend/sheriff +pkg_sheriff_commit = master + +PACKAGES += shotgun +pkg_shotgun_name = shotgun +pkg_shotgun_description = better than just a gun +pkg_shotgun_homepage = https://github.com/inaka/shotgun +pkg_shotgun_fetch = git +pkg_shotgun_repo = https://github.com/inaka/shotgun +pkg_shotgun_commit = 0.1.0 + +PACKAGES += sidejob +pkg_sidejob_name = sidejob +pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang +pkg_sidejob_homepage = https://github.com/basho/sidejob +pkg_sidejob_fetch = git +pkg_sidejob_repo = https://github.com/basho/sidejob +pkg_sidejob_commit = master + +PACKAGES += sieve +pkg_sieve_name = sieve +pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang +pkg_sieve_homepage = https://github.com/benoitc/sieve +pkg_sieve_fetch = git +pkg_sieve_repo = https://github.com/benoitc/sieve +pkg_sieve_commit = master + +PACKAGES += sighandler +pkg_sighandler_name = sighandler +pkg_sighandler_description = Handle UNIX signals in Er lang +pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler +pkg_sighandler_fetch = git +pkg_sighandler_repo = https://github.com/jkingsbery/sighandler +pkg_sighandler_commit = master + +PACKAGES += simhash +pkg_simhash_name = simhash +pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data. +pkg_simhash_homepage = https://github.com/ferd/simhash +pkg_simhash_fetch = git +pkg_simhash_repo = https://github.com/ferd/simhash +pkg_simhash_commit = master + +PACKAGES += simple_bridge +pkg_simple_bridge_name = simple_bridge +pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers. +pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_fetch = git +pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_commit = master + +PACKAGES += simple_oauth2 +pkg_simple_oauth2_name = simple_oauth2 +pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured) +pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_fetch = git +pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_commit = master + +PACKAGES += skel +pkg_skel_name = skel +pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang +pkg_skel_homepage = https://github.com/ParaPhrase/skel +pkg_skel_fetch = git +pkg_skel_repo = https://github.com/ParaPhrase/skel +pkg_skel_commit = master + +PACKAGES += smother +pkg_smother_name = smother +pkg_smother_description = Extended code coverage metrics for Erlang. +pkg_smother_homepage = https://ramsay-t.github.io/Smother/ +pkg_smother_fetch = git +pkg_smother_repo = https://github.com/ramsay-t/Smother +pkg_smother_commit = master + +PACKAGES += social +pkg_social_name = social +pkg_social_description = Cowboy handler for social login via OAuth2 providers +pkg_social_homepage = https://github.com/dvv/social +pkg_social_fetch = git +pkg_social_repo = https://github.com/dvv/social +pkg_social_commit = master + +PACKAGES += spapi_router +pkg_spapi_router_name = spapi_router +pkg_spapi_router_description = Partially-connected Erlang clustering +pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router +pkg_spapi_router_fetch = git +pkg_spapi_router_repo = https://github.com/spilgames/spapi-router +pkg_spapi_router_commit = master + +PACKAGES += sqerl +pkg_sqerl_name = sqerl +pkg_sqerl_description = An Erlang-flavoured SQL DSL +pkg_sqerl_homepage = https://github.com/hairyhum/sqerl +pkg_sqerl_fetch = git +pkg_sqerl_repo = https://github.com/hairyhum/sqerl +pkg_sqerl_commit = master + +PACKAGES += srly +pkg_srly_name = srly +pkg_srly_description = Native Erlang Unix serial interface +pkg_srly_homepage = https://github.com/msantos/srly +pkg_srly_fetch = git +pkg_srly_repo = https://github.com/msantos/srly +pkg_srly_commit = master + +PACKAGES += sshrpc +pkg_sshrpc_name = sshrpc +pkg_sshrpc_description = Erlang SSH RPC module (experimental) +pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_fetch = git +pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_commit = master + +PACKAGES += stable +pkg_stable_name = stable +pkg_stable_description = Library of assorted helpers for Cowboy web server. +pkg_stable_homepage = https://github.com/dvv/stable +pkg_stable_fetch = git +pkg_stable_repo = https://github.com/dvv/stable +pkg_stable_commit = master + +PACKAGES += statebox +pkg_statebox_name = statebox +pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak. +pkg_statebox_homepage = https://github.com/mochi/statebox +pkg_statebox_fetch = git +pkg_statebox_repo = https://github.com/mochi/statebox +pkg_statebox_commit = master + +PACKAGES += statebox_riak +pkg_statebox_riak_name = statebox_riak +pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media. +pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak +pkg_statebox_riak_fetch = git +pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak +pkg_statebox_riak_commit = master + +PACKAGES += statman +pkg_statman_name = statman +pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM +pkg_statman_homepage = https://github.com/knutin/statman +pkg_statman_fetch = git +pkg_statman_repo = https://github.com/knutin/statman +pkg_statman_commit = master + +PACKAGES += statsderl +pkg_statsderl_name = statsderl +pkg_statsderl_description = StatsD client (erlang) +pkg_statsderl_homepage = https://github.com/lpgauth/statsderl +pkg_statsderl_fetch = git +pkg_statsderl_repo = https://github.com/lpgauth/statsderl +pkg_statsderl_commit = master + +PACKAGES += stdinout_pool +pkg_stdinout_pool_name = stdinout_pool +pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication. +pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_fetch = git +pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_commit = master + +PACKAGES += stockdb +pkg_stockdb_name = stockdb +pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang +pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb +pkg_stockdb_fetch = git +pkg_stockdb_repo = https://github.com/maxlapshin/stockdb +pkg_stockdb_commit = master + +PACKAGES += stripe +pkg_stripe_name = stripe +pkg_stripe_description = Erlang interface to the stripe.com API +pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang +pkg_stripe_fetch = git +pkg_stripe_repo = https://github.com/mattsta/stripe-erlang +pkg_stripe_commit = v1 + +PACKAGES += surrogate +pkg_surrogate_name = surrogate +pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes. +pkg_surrogate_homepage = https://github.com/skruger/Surrogate +pkg_surrogate_fetch = git +pkg_surrogate_repo = https://github.com/skruger/Surrogate +pkg_surrogate_commit = master + +PACKAGES += swab +pkg_swab_name = swab +pkg_swab_description = General purpose buffer handling module +pkg_swab_homepage = https://github.com/crownedgrouse/swab +pkg_swab_fetch = git +pkg_swab_repo = https://github.com/crownedgrouse/swab +pkg_swab_commit = master + +PACKAGES += swarm +pkg_swarm_name = swarm +pkg_swarm_description = Fast and simple acceptor pool for Erlang +pkg_swarm_homepage = https://github.com/jeremey/swarm +pkg_swarm_fetch = git +pkg_swarm_repo = https://github.com/jeremey/swarm +pkg_swarm_commit = master + +PACKAGES += switchboard +pkg_switchboard_name = switchboard +pkg_switchboard_description = A framework for processing email using worker plugins. +pkg_switchboard_homepage = https://github.com/thusfresh/switchboard +pkg_switchboard_fetch = git +pkg_switchboard_repo = https://github.com/thusfresh/switchboard +pkg_switchboard_commit = master + +PACKAGES += syn +pkg_syn_name = syn +pkg_syn_description = A global process registry for Erlang. +pkg_syn_homepage = https://github.com/ostinelli/syn +pkg_syn_fetch = git +pkg_syn_repo = https://github.com/ostinelli/syn +pkg_syn_commit = master + +PACKAGES += sync +pkg_sync_name = sync +pkg_sync_description = On-the-fly recompiling and reloading in Erlang. +pkg_sync_homepage = https://github.com/rustyio/sync +pkg_sync_fetch = git +pkg_sync_repo = https://github.com/rustyio/sync +pkg_sync_commit = master + +PACKAGES += syntaxerl +pkg_syntaxerl_name = syntaxerl +pkg_syntaxerl_description = Syntax checker for Erlang +pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_fetch = git +pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_commit = master + +PACKAGES += syslog +pkg_syslog_name = syslog +pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3) +pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog +pkg_syslog_fetch = git +pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog +pkg_syslog_commit = master + +PACKAGES += taskforce +pkg_taskforce_name = taskforce +pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks. +pkg_taskforce_homepage = https://github.com/g-andrade/taskforce +pkg_taskforce_fetch = git +pkg_taskforce_repo = https://github.com/g-andrade/taskforce +pkg_taskforce_commit = master + +PACKAGES += tddreloader +pkg_tddreloader_name = tddreloader +pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes +pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader +pkg_tddreloader_fetch = git +pkg_tddreloader_repo = https://github.com/version2beta/tddreloader +pkg_tddreloader_commit = master + +PACKAGES += tempo +pkg_tempo_name = tempo +pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang. +pkg_tempo_homepage = https://github.com/selectel/tempo +pkg_tempo_fetch = git +pkg_tempo_repo = https://github.com/selectel/tempo +pkg_tempo_commit = master + +PACKAGES += ticktick +pkg_ticktick_name = ticktick +pkg_ticktick_description = Ticktick is an id generator for message service. +pkg_ticktick_homepage = https://github.com/ericliang/ticktick +pkg_ticktick_fetch = git +pkg_ticktick_repo = https://github.com/ericliang/ticktick +pkg_ticktick_commit = master + +PACKAGES += tinymq +pkg_tinymq_name = tinymq +pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue +pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_fetch = git +pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_commit = master + +PACKAGES += tinymt +pkg_tinymt_name = tinymt +pkg_tinymt_description = TinyMT pseudo random number generator for Erlang. +pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_fetch = git +pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_commit = master + +PACKAGES += tirerl +pkg_tirerl_name = tirerl +pkg_tirerl_description = Erlang interface to Elastic Search +pkg_tirerl_homepage = https://github.com/inaka/tirerl +pkg_tirerl_fetch = git +pkg_tirerl_repo = https://github.com/inaka/tirerl +pkg_tirerl_commit = master + +PACKAGES += traffic_tools +pkg_traffic_tools_name = traffic_tools +pkg_traffic_tools_description = Simple traffic limiting library +pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools +pkg_traffic_tools_fetch = git +pkg_traffic_tools_repo = https://github.com/systra/traffic_tools +pkg_traffic_tools_commit = master + +PACKAGES += trails +pkg_trails_name = trails +pkg_trails_description = A couple of improvements over Cowboy Routes +pkg_trails_homepage = http://inaka.github.io/cowboy-trails/ +pkg_trails_fetch = git +pkg_trails_repo = https://github.com/inaka/cowboy-trails +pkg_trails_commit = master + +PACKAGES += trane +pkg_trane_name = trane +pkg_trane_description = SAX style broken HTML parser in Erlang +pkg_trane_homepage = https://github.com/massemanet/trane +pkg_trane_fetch = git +pkg_trane_repo = https://github.com/massemanet/trane +pkg_trane_commit = master + +PACKAGES += transit +pkg_transit_name = transit +pkg_transit_description = transit format for erlang +pkg_transit_homepage = https://github.com/isaiah/transit-erlang +pkg_transit_fetch = git +pkg_transit_repo = https://github.com/isaiah/transit-erlang +pkg_transit_commit = master + +PACKAGES += trie +pkg_trie_name = trie +pkg_trie_description = Erlang Trie Implementation +pkg_trie_homepage = https://github.com/okeuday/trie +pkg_trie_fetch = git +pkg_trie_repo = https://github.com/okeuday/trie +pkg_trie_commit = master + +PACKAGES += triq +pkg_triq_name = triq +pkg_triq_description = Trifork QuickCheck +pkg_triq_homepage = https://github.com/krestenkrab/triq +pkg_triq_fetch = git +pkg_triq_repo = https://github.com/krestenkrab/triq +pkg_triq_commit = master + +PACKAGES += tunctl +pkg_tunctl_name = tunctl +pkg_tunctl_description = Erlang TUN/TAP interface +pkg_tunctl_homepage = https://github.com/msantos/tunctl +pkg_tunctl_fetch = git +pkg_tunctl_repo = https://github.com/msantos/tunctl +pkg_tunctl_commit = master + +PACKAGES += twerl +pkg_twerl_name = twerl +pkg_twerl_description = Erlang client for the Twitter Streaming API +pkg_twerl_homepage = https://github.com/lucaspiller/twerl +pkg_twerl_fetch = git +pkg_twerl_repo = https://github.com/lucaspiller/twerl +pkg_twerl_commit = oauth + +PACKAGES += twitter_erlang +pkg_twitter_erlang_name = twitter_erlang +pkg_twitter_erlang_description = An Erlang twitter client +pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_fetch = git +pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_commit = master + +PACKAGES += ucol_nif +pkg_ucol_nif_name = ucol_nif +pkg_ucol_nif_description = ICU based collation Erlang module +pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif +pkg_ucol_nif_fetch = git +pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif +pkg_ucol_nif_commit = master + +PACKAGES += unicorn +pkg_unicorn_name = unicorn +pkg_unicorn_description = Generic configuration server +pkg_unicorn_homepage = https://github.com/shizzard/unicorn +pkg_unicorn_fetch = git +pkg_unicorn_repo = https://github.com/shizzard/unicorn +pkg_unicorn_commit = 0.3.0 + +PACKAGES += unsplit +pkg_unsplit_name = unsplit +pkg_unsplit_description = Resolves conflicts in Mnesia after network splits +pkg_unsplit_homepage = https://github.com/uwiger/unsplit +pkg_unsplit_fetch = git +pkg_unsplit_repo = https://github.com/uwiger/unsplit +pkg_unsplit_commit = master + +PACKAGES += uuid +pkg_uuid_name = uuid +pkg_uuid_description = Erlang UUID Implementation +pkg_uuid_homepage = https://github.com/okeuday/uuid +pkg_uuid_fetch = git +pkg_uuid_repo = https://github.com/okeuday/uuid +pkg_uuid_commit = v1.4.0 + +PACKAGES += ux +pkg_ux_name = ux +pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation) +pkg_ux_homepage = https://github.com/erlang-unicode/ux +pkg_ux_fetch = git +pkg_ux_repo = https://github.com/erlang-unicode/ux +pkg_ux_commit = master + +PACKAGES += vert +pkg_vert_name = vert +pkg_vert_description = Erlang binding to libvirt virtualization API +pkg_vert_homepage = https://github.com/msantos/erlang-libvirt +pkg_vert_fetch = git +pkg_vert_repo = https://github.com/msantos/erlang-libvirt +pkg_vert_commit = master + +PACKAGES += verx +pkg_verx_name = verx +pkg_verx_description = Erlang implementation of the libvirtd remote protocol +pkg_verx_homepage = https://github.com/msantos/verx +pkg_verx_fetch = git +pkg_verx_repo = https://github.com/msantos/verx +pkg_verx_commit = master + +PACKAGES += vmq_acl +pkg_vmq_acl_name = vmq_acl +pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_acl_homepage = https://verne.mq/ +pkg_vmq_acl_fetch = git +pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl +pkg_vmq_acl_commit = master + +PACKAGES += vmq_bridge +pkg_vmq_bridge_name = vmq_bridge +pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_bridge_homepage = https://verne.mq/ +pkg_vmq_bridge_fetch = git +pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge +pkg_vmq_bridge_commit = master + +PACKAGES += vmq_graphite +pkg_vmq_graphite_name = vmq_graphite +pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_graphite_homepage = https://verne.mq/ +pkg_vmq_graphite_fetch = git +pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite +pkg_vmq_graphite_commit = master + +PACKAGES += vmq_passwd +pkg_vmq_passwd_name = vmq_passwd +pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_passwd_homepage = https://verne.mq/ +pkg_vmq_passwd_fetch = git +pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd +pkg_vmq_passwd_commit = master + +PACKAGES += vmq_server +pkg_vmq_server_name = vmq_server +pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_server_homepage = https://verne.mq/ +pkg_vmq_server_fetch = git +pkg_vmq_server_repo = https://github.com/erlio/vmq_server +pkg_vmq_server_commit = master + +PACKAGES += vmq_snmp +pkg_vmq_snmp_name = vmq_snmp +pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_snmp_homepage = https://verne.mq/ +pkg_vmq_snmp_fetch = git +pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp +pkg_vmq_snmp_commit = master + +PACKAGES += vmq_systree +pkg_vmq_systree_name = vmq_systree +pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_systree_homepage = https://verne.mq/ +pkg_vmq_systree_fetch = git +pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree +pkg_vmq_systree_commit = master + +PACKAGES += vmstats +pkg_vmstats_name = vmstats +pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs. +pkg_vmstats_homepage = https://github.com/ferd/vmstats +pkg_vmstats_fetch = git +pkg_vmstats_repo = https://github.com/ferd/vmstats +pkg_vmstats_commit = master + +PACKAGES += walrus +pkg_walrus_name = walrus +pkg_walrus_description = Walrus - Mustache-like Templating +pkg_walrus_homepage = https://github.com/devinus/walrus +pkg_walrus_fetch = git +pkg_walrus_repo = https://github.com/devinus/walrus +pkg_walrus_commit = master + +PACKAGES += webmachine +pkg_webmachine_name = webmachine +pkg_webmachine_description = A REST-based system for building web applications. +pkg_webmachine_homepage = https://github.com/basho/webmachine +pkg_webmachine_fetch = git +pkg_webmachine_repo = https://github.com/basho/webmachine +pkg_webmachine_commit = master + +PACKAGES += websocket_client +pkg_websocket_client_name = websocket_client +pkg_websocket_client_description = Erlang websocket client (ws and wss supported) +pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client +pkg_websocket_client_fetch = git +pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client +pkg_websocket_client_commit = master + +PACKAGES += worker_pool +pkg_worker_pool_name = worker_pool +pkg_worker_pool_description = a simple erlang worker pool +pkg_worker_pool_homepage = https://github.com/inaka/worker_pool +pkg_worker_pool_fetch = git +pkg_worker_pool_repo = https://github.com/inaka/worker_pool +pkg_worker_pool_commit = 1.0.3 + +PACKAGES += wrangler +pkg_wrangler_name = wrangler +pkg_wrangler_description = Import of the Wrangler svn repository. +pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html +pkg_wrangler_fetch = git +pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler +pkg_wrangler_commit = master + +PACKAGES += wsock +pkg_wsock_name = wsock +pkg_wsock_description = Erlang library to build WebSocket clients and servers +pkg_wsock_homepage = https://github.com/madtrick/wsock +pkg_wsock_fetch = git +pkg_wsock_repo = https://github.com/madtrick/wsock +pkg_wsock_commit = master + +PACKAGES += xhttpc +pkg_xhttpc_name = xhttpc +pkg_xhttpc_description = Extensible HTTP Client for Erlang +pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc +pkg_xhttpc_fetch = git +pkg_xhttpc_repo = https://github.com/seriyps/xhttpc +pkg_xhttpc_commit = master + +PACKAGES += xref_runner +pkg_xref_runner_name = xref_runner +pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref) +pkg_xref_runner_homepage = https://github.com/inaka/xref_runner +pkg_xref_runner_fetch = git +pkg_xref_runner_repo = https://github.com/inaka/xref_runner +pkg_xref_runner_commit = 0.2.0 + +PACKAGES += yamerl +pkg_yamerl_name = yamerl +pkg_yamerl_description = YAML 1.2 parser in pure Erlang +pkg_yamerl_homepage = https://github.com/yakaz/yamerl +pkg_yamerl_fetch = git +pkg_yamerl_repo = https://github.com/yakaz/yamerl +pkg_yamerl_commit = master + +PACKAGES += yamler +pkg_yamler_name = yamler +pkg_yamler_description = libyaml-based yaml loader for Erlang +pkg_yamler_homepage = https://github.com/goertzenator/yamler +pkg_yamler_fetch = git +pkg_yamler_repo = https://github.com/goertzenator/yamler +pkg_yamler_commit = master + +PACKAGES += yaws +pkg_yaws_name = yaws +pkg_yaws_description = Yaws webserver +pkg_yaws_homepage = http://yaws.hyber.org +pkg_yaws_fetch = git +pkg_yaws_repo = https://github.com/klacke/yaws +pkg_yaws_commit = master + +PACKAGES += zab_engine +pkg_zab_engine_name = zab_engine +pkg_zab_engine_description = zab propotocol implement by erlang +pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_fetch = git +pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_commit = master + +PACKAGES += zeta +pkg_zeta_name = zeta +pkg_zeta_description = HTTP access log parser in Erlang +pkg_zeta_homepage = https://github.com/s1n4/zeta +pkg_zeta_fetch = git +pkg_zeta_repo = https://github.com/s1n4/zeta +pkg_zeta_commit = + +PACKAGES += zippers +pkg_zippers_name = zippers +pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers +pkg_zippers_homepage = https://github.com/ferd/zippers +pkg_zippers_fetch = git +pkg_zippers_repo = https://github.com/ferd/zippers +pkg_zippers_commit = master + +PACKAGES += zlists +pkg_zlists_name = zlists +pkg_zlists_description = Erlang lazy lists library. +pkg_zlists_homepage = https://github.com/vjache/erlang-zlists +pkg_zlists_fetch = git +pkg_zlists_repo = https://github.com/vjache/erlang-zlists +pkg_zlists_commit = master + +PACKAGES += zraft_lib +pkg_zraft_lib_name = zraft_lib +pkg_zraft_lib_description = Erlang raft consensus protocol implementation +pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_fetch = git +pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_commit = master + +PACKAGES += zucchini +pkg_zucchini_name = zucchini +pkg_zucchini_description = An Erlang INI parser +pkg_zucchini_homepage = https://github.com/devinus/zucchini +pkg_zucchini_fetch = git +pkg_zucchini_repo = https://github.com/devinus/zucchini +pkg_zucchini_commit = master + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: search + +define pkg_print + $(verbose) printf "%s\n" \ + $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \ + "App name: $(pkg_$(1)_name)" \ + "Description: $(pkg_$(1)_description)" \ + "Home page: $(pkg_$(1)_homepage)" \ + "Fetch with: $(pkg_$(1)_fetch)" \ + "Repository: $(pkg_$(1)_repo)" \ + "Commit: $(pkg_$(1)_commit)" \ + "" + +endef + +search: +ifdef q + $(foreach p,$(PACKAGES), \ + $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \ + $(call pkg_print,$(p)))) +else + $(foreach p,$(PACKAGES),$(call pkg_print,$(p))) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-deps + +# Configuration. + +ifdef OTP_DEPS +$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.) +endif + +IGNORE_DEPS ?= +export IGNORE_DEPS + +APPS_DIR ?= $(CURDIR)/apps +export APPS_DIR + +DEPS_DIR ?= $(CURDIR)/deps +export DEPS_DIR + +REBAR_DEPS_DIR = $(DEPS_DIR) +export REBAR_DEPS_DIR + +dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) +dep_repo = $(patsubst git://github.com/%,https://github.com/%, \ + $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))) +dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) + +ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) +ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) + +ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),) +ifeq ($(ERL_LIBS),) + ERL_LIBS = $(APPS_DIR):$(DEPS_DIR) +else + ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR) +endif +endif +export ERL_LIBS + +export NO_AUTOPATCH + +# Verbosity. + +dep_verbose_0 = @echo " DEP " $(1); +dep_verbose_2 = set -x; +dep_verbose = $(dep_verbose_$(V)) + +# Core targets. + +ifneq ($(SKIP_DEPS),) +deps:: +else +deps:: $(ALL_DEPS_DIRS) +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \ + if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \ + :; \ + else \ + echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ + if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ + $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \ + else \ + echo "Error: No Makefile to build dependency $$dep."; \ + exit 2; \ + fi \ + fi \ + done +endif + +# Deps related targets. + +# @todo rename GNUmakefile and makefile into Makefile first, if they exist +# While Makefile file could be GNUmakefile or makefile, +# in practice only Makefile is needed so far. +define dep_autopatch + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \ + $(call dep_autopatch2,$(1)); \ + else \ + if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + $(call dep_autopatch_erlang_mk,$(1)); \ + else \ + $(call erlang,$(call dep_autopatch_app.erl,$(1))); \ + fi \ + fi \ + else \ + if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ + $(call dep_autopatch_noop,$(1)); \ + else \ + $(call dep_autopatch2,$(1)); \ + fi \ + fi +endef + +define dep_autopatch2 + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + if [ -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \ + $(call dep_autopatch_fetch_rebar); \ + $(call dep_autopatch_rebar,$(1)); \ + else \ + $(call dep_autopatch_gen,$(1)); \ + fi +endef + +define dep_autopatch_noop + printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile +endef + +# Overwrite erlang.mk with the current file by default. +ifeq ($(NO_AUTOPATCH_ERLANG_MK),) +define dep_autopatch_erlang_mk + echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \ + > $(DEPS_DIR)/$1/erlang.mk +endef +else +define dep_autopatch_erlang_mk + : +endef +endif + +define dep_autopatch_gen + printf "%s\n" \ + "ERLC_OPTS = +debug_info" \ + "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile +endef + +define dep_autopatch_fetch_rebar + mkdir -p $(ERLANG_MK_TMP); \ + if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ + git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \ + cd $(ERLANG_MK_TMP)/rebar; \ + git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \ + $(MAKE); \ + cd -; \ + fi +endef + +define dep_autopatch_rebar + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \ + fi; \ + $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \ + rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app +endef + +define dep_autopatch_rebar.erl + application:load(rebar), + application:set_env(rebar, log_level, debug), + Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of + {ok, Conf0} -> Conf0; + _ -> [] + end, + {Conf, OsEnv} = fun() -> + case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of + false -> {Conf1, []}; + true -> + Bindings0 = erl_eval:new_bindings(), + Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0), + Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1), + Before = os:getenv(), + {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings), + {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)} + end + end(), + Write = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append]) + end, + Escape = fun (Text) -> + re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}]) + end, + Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package " + "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"), + Write("C_SRC_DIR = /path/do/not/exist\n"), + Write("C_SRC_TYPE = rebar\n"), + Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"), + Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]), + fun() -> + Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), + case lists:keyfind(erl_opts, 1, Conf) of + false -> ok; + {_, ErlOpts} -> + lists:foreach(fun + ({d, D}) -> + Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + ({i, I}) -> + Write(["ERLC_OPTS += -I ", I, "\n"]); + ({platform_define, Regex, D}) -> + case rebar_utils:is_arch(Regex) of + true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + false -> ok + end; + ({parse_transform, PT}) -> + Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n"); + (_) -> ok + end, ErlOpts) + end, + Write("\n") + end(), + fun() -> + File = case lists:keyfind(deps, 1, Conf) of + false -> []; + {_, Deps} -> + [begin case case Dep of + {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}}; + {N, S} when is_tuple(S) -> {N, S}; + {N, _, S} -> {N, S}; + {N, _, S, _} -> {N, S}; + _ -> false + end of + false -> ok; + {Name, Source} -> + {Method, Repo, Commit} = case Source of + {hex, V} -> {hex, V, undefined}; + {git, R} -> {git, R, master}; + {M, R, {branch, C}} -> {M, R, C}; + {M, R, {ref, C}} -> {M, R, C}; + {M, R, {tag, C}} -> {M, R, C}; + {M, R, C} -> {M, R, C} + end, + Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit])) + end end || Dep <- Deps] + end + end(), + fun() -> + case lists:keyfind(erl_first_files, 1, Conf) of + false -> ok; + {_, Files} -> + Names = [[" ", case lists:reverse(F) of + "lre." ++ Elif -> lists:reverse(Elif); + Elif -> lists:reverse(Elif) + end] || "src/" ++ F <- Files], + Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) + end + end(), + FindFirst = fun(F, Fd) -> + case io:parse_erl_form(Fd, undefined) of + {ok, {attribute, _, compile, {parse_transform, PT}}, _} -> + [PT, F(F, Fd)]; + {ok, {attribute, _, compile, CompileOpts}, _} when is_list(CompileOpts) -> + case proplists:get_value(parse_transform, CompileOpts) of + undefined -> [F(F, Fd)]; + PT -> [PT, F(F, Fd)] + end; + {ok, {attribute, _, include, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end + end; + {ok, {attribute, _, include_lib, "$(1)/include/" ++ Hrl}, _} -> + {ok, HrlFd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]), + [F(F, HrlFd), F(F, Fd)]; + {ok, {attribute, _, include_lib, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {ok, {attribute, _, import, {Imp, _}}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(Imp) ++ ".erl", [read]) of + {ok, ImpFd} -> [Imp, F(F, ImpFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {eof, _} -> + file:close(Fd), + []; + _ -> + F(F, Fd) + end + end, + fun() -> + ErlFiles = filelib:wildcard("$(call core_native_path,$(DEPS_DIR)/$1/src/)*.erl"), + First0 = lists:usort(lists:flatten([begin + {ok, Fd} = file:open(F, [read]), + FindFirst(FindFirst, Fd) + end || F <- ErlFiles])), + First = lists:flatten([begin + {ok, Fd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", [read]), + FindFirst(FindFirst, Fd) + end || M <- First0, lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)]) ++ First0, + Write(["COMPILE_FIRST +=", [[" ", atom_to_list(M)] || M <- First, + lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)], "\n"]) + end(), + Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"), + Write("\npreprocess::\n"), + Write("\npre-deps::\n"), + Write("\npre-app::\n"), + PatchHook = fun(Cmd) -> + case Cmd of + "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + _ -> Escape(Cmd) + end + end, + fun() -> + case lists:keyfind(pre_hooks, 1, Conf) of + false -> ok; + {_, Hooks} -> + [case H of + {'get-deps', Cmd} -> + Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n"); + {compile, Cmd} -> + Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + {Regex, compile, Cmd} -> + case rebar_utils:is_arch(Regex) of + true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + false -> ok + end; + _ -> ok + end || H <- Hooks] + end + end(), + ShellToMk = fun(V) -> + re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]), + "-Werror\\\\b", "", [{return, list}, global]) + end, + PortSpecs = fun() -> + case lists:keyfind(port_specs, 1, Conf) of + false -> + case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of + false -> []; + true -> + [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"), + proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}] + end; + {_, Specs} -> + lists:flatten([case S of + {Output, Input} -> {ShellToMk(Output), Input, []}; + {Regex, Output, Input} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, []}; + false -> [] + end; + {Regex, Output, Input, [{env, Env}]} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, Env}; + false -> [] + end + end || S <- Specs]) + end + end(), + PortSpecWrite = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append]) + end, + case PortSpecs of + [] -> ok; + _ -> + Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"), + PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I ~s/erts-~s/include -I ~s\n", + [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])), + PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L ~s -lerl_interface -lei\n", + [code:lib_dir(erl_interface, lib)])), + [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv], + FilterEnv = fun(Env) -> + lists:flatten([case E of + {_, _} -> E; + {Regex, K, V} -> + case rebar_utils:is_arch(Regex) of + true -> {K, V}; + false -> [] + end + end || E <- Env]) + end, + MergeEnv = fun(Env) -> + lists:foldl(fun ({K, V}, Acc) -> + case lists:keyfind(K, 1, Acc) of + false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc]; + {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc] + end + end, [], Env) + end, + PortEnv = case lists:keyfind(port_env, 1, Conf) of + false -> []; + {_, PortEnv0} -> FilterEnv(PortEnv0) + end, + PortSpec = fun ({Output, Input0, Env}) -> + filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output), + Input = [[" ", I] || I <- Input0], + PortSpecWrite([ + [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))], + case $(PLATFORM) of + darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress"; + _ -> "" + end, + "\n\nall:: ", Output, "\n\n", + "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))], + Output, ": $$\(foreach ext,.c .C .cc .cpp,", + "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n", + "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)", + case filename:extension(Output) of + [] -> "\n"; + _ -> " -shared\n" + end]) + end, + [PortSpec(S) || S <- PortSpecs] + end, + Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"), + RunPlugin = fun(Plugin, Step) -> + case erlang:function_exported(Plugin, Step, 2) of + false -> ok; + true -> + c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"), + Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(), + dict:store(base_dir, "", dict:new())}, undefined), + io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret]) + end + end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins} -> + [begin + case lists:keyfind(deps, 1, Conf) of + false -> ok; + {_, Deps} -> + case lists:keyfind(P, 1, Deps) of + false -> ok; + _ -> + Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P), + io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]), + io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]), + code:add_patha(Path ++ "/ebin") + end + end + end || P <- Plugins], + [case code:load_file(P) of + {module, P} -> ok; + _ -> + case lists:keyfind(plugin_dir, 1, Conf) of + false -> ok; + {_, PluginsDir} -> + ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl", + {ok, P, Bin} = compile:file(ErlFile, [binary]), + {module, P} = code:load_binary(P, ErlFile, Bin) + end + end || P <- Plugins], + [RunPlugin(P, preprocess) || P <- Plugins], + [RunPlugin(P, pre_compile) || P <- Plugins], + [RunPlugin(P, compile) || P <- Plugins] + end + end(), + halt() +endef + +define dep_autopatch_app.erl + UpdateModules = fun(App) -> + case filelib:is_regular(App) of + false -> ok; + true -> + {ok, [{application, '$(1)', L0}]} = file:consult(App), + Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true, + fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []), + L = lists:keystore(modules, 1, L0, {modules, Mods}), + ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}])) + end + end, + UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"), + halt() +endef + +define dep_autopatch_appsrc.erl + AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", + AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end, + case filelib:is_regular(AppSrcIn) of + false -> ok; + true -> + {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), + L1 = lists:keystore(modules, 1, L0, {modules, []}), + L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end, + L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end, + ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])), + case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end + end, + halt() +endef + +define dep_fetch_git + git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1)); +endef + +define dep_fetch_git-submodule + git submodule update --init -- $(DEPS_DIR)/$1; +endef + +define dep_fetch_hg + hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1)); +endef + +define dep_fetch_svn + svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_cp + cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_hex.erl + ssl:start(), + inets:start(), + {ok, {{_, 200, _}, _, Body}} = httpc:request(get, + {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []}, + [], [{body_format, binary}]), + {ok, Files} = erl_tar:extract({binary, Body}, [memory]), + {_, Source} = lists:keyfind("contents.tar.gz", 1, Files), + ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]), + halt() +endef + +# Hex only has a package version. No need to look in the Erlang.mk packages. +define dep_fetch_hex + $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1)))))); +endef + +define dep_fetch_fail + echo "Error: Unknown or invalid dependency: $(1)." >&2; \ + exit 78; +endef + +# Kept for compatibility purposes with older Erlang.mk configuration. +define dep_fetch_legacy + $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \ + git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \ + cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master); +endef + +define dep_fetch + $(if $(dep_$(1)), \ + $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \ + $(word 1,$(dep_$(1))), \ + $(if $(IS_DEP),legacy,fail)), \ + $(if $(filter $(1),$(PACKAGES)), \ + $(pkg_$(1)_fetch), \ + fail)) +endef + +define dep_target +$(DEPS_DIR)/$(call dep_name,$1): + $(eval DEP_NAME := $(call dep_name,$1)) + $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) + $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ + echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \ + exit 17; \ + fi + $(verbose) mkdir -p $(DEPS_DIR) + $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$1)),$1) + $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure.ac -o -f $(DEPS_DIR)/$(DEP_NAME)/configure.in ]; then \ + echo " AUTO " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && autoreconf -Wall -vif -I m4; \ + fi + - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \ + echo " CONF " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \ + fi +ifeq ($(filter $(1),$(NO_AUTOPATCH)),) + $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi; \ + if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \ + echo " PATCH Downloading rabbitmq-server"; \ + git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \ + fi; \ + ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \ + elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi \ + else \ + $$(call dep_autopatch,$(DEP_NAME)) \ + fi +endif +endef + +$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep)))) + +ifndef IS_APP +clean:: clean-apps + +clean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \ + done + +distclean:: distclean-apps + +distclean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \ + done +endif + +ifndef SKIP_DEPS +distclean:: distclean-deps + +distclean-deps: + $(gen_verbose) rm -rf $(DEPS_DIR) +endif + +# Forward-declare variables used in core/deps-tools.mk. This is required +# in case plugins use them. + +ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/list-deps.log +ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/list-doc-deps.log +ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/list-rel-deps.log +ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/list-test-deps.log +ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/list-shell-deps.log + +# External plugins. + +DEP_PLUGINS ?= + +define core_dep_plugin +-include $(DEPS_DIR)/$(1) + +$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; +endef + +$(foreach p,$(DEP_PLUGINS),\ + $(eval $(if $(findstring /,$p),\ + $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ + $(call core_dep_plugin,$p/plugins.mk,$p)))) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Configuration. + +DTL_FULL_PATH ?= +DTL_PATH ?= templates/ +DTL_SUFFIX ?= _dtl + +# Verbosity. + +dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); +dtl_verbose = $(dtl_verbose_$(V)) + +# Core targets. + +define erlydtl_compile.erl + [begin + Module0 = case "$(strip $(DTL_FULL_PATH))" of + "" -> + filename:basename(F, ".dtl"); + _ -> + "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"), + re:replace(F2, "/", "_", [{return, list}, global]) + end, + Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), + case erlydtl:compile(F, Module, [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of + ok -> ok; + {ok, _} -> ok + end + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) + +DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl)) + +ifdef DTL_FULL_PATH +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%)))) +else +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES)))) +endif + +ifneq ($(words $(DTL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(DTL_FILES); \ + fi + @touch $@ + +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl +endif + +ebin/$(PROJECT).app:: $(DTL_FILES) + $(if $(strip $?),\ + $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?,-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Verbosity. + +proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); +proto_verbose = $(proto_verbose_$(V)) + +# Core targets. + +define compile_proto + $(verbose) mkdir -p ebin/ include/ + $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1))) + $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl + $(verbose) rm ebin/*.erl +endef + +define compile_proto.erl + [begin + Dir = filename:dirname(filename:dirname(F)), + protobuffs_compile:generate_source(F, + [{output_include_dir, Dir ++ "/include"}, + {output_src_dir, Dir ++ "/ebin"}]) + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) +ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto)) + $(if $(strip $?),$(call compile_proto,$?)) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-app + +# Configuration. + +ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \ + +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec +COMPILE_FIRST ?= +COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST))) +ERLC_EXCLUDE ?= +ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE))) + +ERLC_MIB_OPTS ?= +COMPILE_MIB_FIRST ?= +COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST))) + +# Verbosity. + +app_verbose_0 = @echo " APP " $(PROJECT); +app_verbose_2 = set -x; +app_verbose = $(app_verbose_$(V)) + +appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src; +appsrc_verbose_2 = set -x; +appsrc_verbose = $(appsrc_verbose_$(V)) + +makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d; +makedep_verbose_2 = set -x; +makedep_verbose = $(makedep_verbose_$(V)) + +erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\ + $(filter %.erl %.core,$(?F))); +erlc_verbose_2 = set -x; +erlc_verbose = $(erlc_verbose_$(V)) + +xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F)); +xyrl_verbose_2 = set -x; +xyrl_verbose = $(xyrl_verbose_$(V)) + +asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F)); +asn1_verbose_2 = set -x; +asn1_verbose = $(asn1_verbose_$(V)) + +mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F)); +mib_verbose_2 = set -x; +mib_verbose = $(mib_verbose_$(V)) + +ifneq ($(wildcard src/),) + +# Targets. + +ifeq ($(wildcard ebin/test),) +app:: deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +else +app:: clean deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +endif + +ifeq ($(wildcard src/$(PROJECT)_app.erl),) +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, []}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]} +]}. +endef +else +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}, + {mod, {$(PROJECT)_app, []}} +]}. +endef +endif + +app-build: ebin/$(PROJECT).app + $(verbose) : + +# Source files. + +ERL_FILES = $(sort $(call core_find,src/,*.erl)) +CORE_FILES = $(sort $(call core_find,src/,*.core)) + +# ASN.1 files. + +ifneq ($(wildcard asn1/),) +ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1)) +ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +define compile_asn1 + $(verbose) mkdir -p include/ + $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1) + $(verbose) mv asn1/*.erl src/ + $(verbose) mv asn1/*.hrl include/ + $(verbose) mv asn1/*.asn1db include/ +endef + +$(PROJECT).d:: $(ASN1_FILES) + $(if $(strip $?),$(call compile_asn1,$?)) +endif + +# SNMP MIB files. + +ifneq ($(wildcard mibs/),) +MIB_FILES = $(sort $(call core_find,mibs/,*.mib)) + +$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES) + $(verbose) mkdir -p include/ priv/mibs/ + $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $? + $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?))) +endif + +# Leex and Yecc files. + +XRL_FILES = $(sort $(call core_find,src/,*.xrl)) +XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES)))) +ERL_FILES += $(XRL_ERL_FILES) + +YRL_FILES = $(sort $(call core_find,src/,*.yrl)) +YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES)))) +ERL_FILES += $(YRL_ERL_FILES) + +$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES) + $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?) + +# Erlang and Core Erlang files. + +define makedep.erl + ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")), + Modules = [{filename:basename(F, ".erl"), F} || F <- ErlFiles], + Add = fun (Dep, Acc) -> + case lists:keyfind(atom_to_list(Dep), 1, Modules) of + {_, DepFile} -> [DepFile|Acc]; + false -> Acc + end + end, + AddHd = fun (Dep, Acc) -> + case {Dep, lists:keymember(Dep, 2, Modules)} of + {"src/" ++ _, false} -> [Dep|Acc]; + {"include/" ++ _, false} -> [Dep|Acc]; + _ -> Acc + end + end, + CompileFirst = fun (Deps) -> + First0 = [case filename:extension(D) of + ".erl" -> filename:basename(D, ".erl"); + _ -> [] + end || D <- Deps], + case lists:usort(First0) of + [] -> []; + [[]] -> []; + First -> ["COMPILE_FIRST +=", [[" ", F] || F <- First], "\n"] + end + end, + Depend = [begin + case epp:parse_file(F, ["include/"], []) of + {ok, Forms} -> + Deps = lists:usort(lists:foldl(fun + ({attribute, _, behavior, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, behaviour, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, compile, {parse_transform, Dep}}, Acc) -> Add(Dep, Acc); + ({attribute, _, file, {Dep, _}}, Acc) -> AddHd(Dep, Acc); + (_, Acc) -> Acc + end, [], Forms)), + case Deps of + [] -> ""; + _ -> [F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n", CompileFirst(Deps)] + end; + {error, enoent} -> + [] + end + end || F <- ErlFiles], + ok = file:write_file("$(1)", Depend), + halt() +endef + +ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),) +$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST) + $(makedep_verbose) $(call erlang,$(call makedep.erl,$@)) +endif + +ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \ + touch -c $(PROJECT).d; \ + fi + @touch $@ + +$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change +endif + +-include $(PROJECT).d + +ebin/$(PROJECT).app:: ebin/ + +ebin/: + $(verbose) mkdir -p ebin/ + +define compile_erl + $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \ + -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1)) +endef + +ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src) + $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?)) + $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE))) + $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true)) + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES))))))) +ifeq ($(wildcard src/$(PROJECT).app.src),) + $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \ + > ebin/$(PROJECT).app +else + $(verbose) if [ -z "$$(grep -E '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \ + echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \ + exit 1; \ + fi + $(appsrc_verbose) cat src/$(PROJECT).app.src \ + | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \ + | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(GITDESCRIBE)\"}/" \ + > ebin/$(PROJECT).app +endif + +clean:: clean-app + +clean-app: + $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \ + $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \ + $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +endif + +# Copyright (c) 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: docs-deps + +# Configuration. + +ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS)) + +# Targets. + +$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +doc-deps: +else +doc-deps: $(ALL_DOC_DEPS_DIRS) + $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rel-deps + +# Configuration. + +ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS)) + +# Targets. + +$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +rel-deps: +else +rel-deps: $(ALL_REL_DEPS_DIRS) + $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: test-deps test-dir test-build clean-test-dir + +# Configuration. + +TEST_DIR ?= $(CURDIR)/test + +ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS)) + +TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard +TEST_ERLC_OPTS += -DTEST=1 + +# Targets. + +$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +test-deps: +else +test-deps: $(ALL_TEST_DEPS_DIRS) + $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done +endif + +ifneq ($(wildcard $(TEST_DIR)),) +test-dir: + $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \ + $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/ +endif + +ifeq ($(wildcard ebin/test),) +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: clean deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" + $(gen_verbose) touch ebin/test +else +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" +endif + +clean:: clean-test-dir + +clean-test-dir: +ifneq ($(wildcard $(TEST_DIR)/*.beam),) + $(gen_verbose) rm -f $(TEST_DIR)/*.beam +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rebar.config + +# We strip out -Werror because we don't want to fail due to +# warnings when used as a dependency. + +compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/') + +define compat_convert_erlc_opts +$(if $(filter-out -Werror,$1),\ + $(if $(findstring +,$1),\ + $(shell echo $1 | cut -b 2-))) +endef + +define compat_rebar_config +{deps, [$(call comma_list,$(foreach d,$(DEPS),\ + {$(call dep_name,$d),".*",{git,"$(call dep_repo,$d)","$(call dep_commit,$d)"}}))]}. +{erl_opts, [$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$(ERLC_OPTS)),\ + $(call compat_convert_erlc_opts,$o)))]}. +endef + +$(eval _compat_rebar_config = $$(compat_rebar_config)) +$(eval export _compat_rebar_config) + +rebar.config: + $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc + +MAN_INSTALL_PATH ?= /usr/local/share/man +MAN_SECTIONS ?= 3 7 + +docs:: asciidoc + +asciidoc: distclean-asciidoc doc-deps asciidoc-guide asciidoc-manual + +ifeq ($(wildcard doc/src/guide/book.asciidoc),) +asciidoc-guide: +else +asciidoc-guide: + a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf + a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ +endif + +ifeq ($(wildcard doc/src/manual/*.asciidoc),) +asciidoc-manual: +else +asciidoc-manual: + for f in doc/src/manual/*.asciidoc ; do \ + a2x -v -f manpage $$f ; \ + done + for s in $(MAN_SECTIONS); do \ + mkdir -p doc/man$$s/ ; \ + mv doc/src/manual/*.$$s doc/man$$s/ ; \ + gzip doc/man$$s/*.$$s ; \ + done + +install-docs:: install-asciidoc + +install-asciidoc: asciidoc-manual + for s in $(MAN_SECTIONS); do \ + mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \ + install -g 0 -o 0 -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \ + done +endif + +distclean:: distclean-asciidoc + +distclean-asciidoc: + $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/ + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Bootstrap targets:" \ + " bootstrap Generate a skeleton of an OTP application" \ + " bootstrap-lib Generate a skeleton of an OTP library" \ + " bootstrap-rel Generate the files needed to build a release" \ + " new-app n=NAME Create a new local OTP application NAME" \ + " new-lib n=NAME Create a new local OTP library NAME" \ + " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ + " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \ + " list-templates List available templates" + +# Bootstrap templates. + +define bs_appsrc +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]}, + {mod, {$p_app, []}}, + {env, []} +]}. +endef + +define bs_appsrc_lib +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. +endef + +ifdef SP +define bs_Makefile +PROJECT = $p +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.0.1 + +# Whitespace to be used when creating files from templates. +SP = $(SP) + +include erlang.mk +endef +else +define bs_Makefile +PROJECT = $p +include erlang.mk +endef +endif + +define bs_apps_Makefile +PROJECT = $p +include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk +endef + +define bs_app +-module($p_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). + +start(_Type, _Args) -> + $p_sup:start_link(). + +stop(_State) -> + ok. +endef + +define bs_relx_config +{release, {$p_release, "1"}, [$p]}. +{extended_start_script, true}. +{sys_config, "rel/sys.config"}. +{vm_args, "rel/vm.args"}. +endef + +define bs_sys_config +[ +]. +endef + +define bs_vm_args +-name $p@127.0.0.1 +-setcookie $p +-heart +endef + +# Normal templates. + +define tpl_supervisor +-module($(n)). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +init([]) -> + Procs = [], + {ok, {{one_for_one, 1, 5}, Procs}}. +endef + +define tpl_gen_server +-module($(n)). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +%% gen_server. + +init([]) -> + {ok, #state{}}. + +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +endef + +define tpl_cowboy_http +-module($(n)). +-behaviour(cowboy_http_handler). + +-export([init/3]). +-export([handle/2]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {ok, Req, #state{}}. + +handle(Req, State=#state{}) -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_gen_fsm +-module($(n)). +-behaviour(gen_fsm). + +%% API. +-export([start_link/0]). + +%% gen_fsm. +-export([init/1]). +-export([state_name/2]). +-export([handle_event/3]). +-export([state_name/3]). +-export([handle_sync_event/4]). +-export([handle_info/3]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_fsm:start_link(?MODULE, [], []). + +%% gen_fsm. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_Event, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_Event, StateName, StateData) -> + {next_state, StateName, StateData}. + +state_name(_Event, _From, StateData) -> + {reply, ignored, state_name, StateData}. + +handle_sync_event(_Event, _From, StateName, StateData) -> + {reply, ignored, StateName, StateData}. + +handle_info(_Info, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + +define tpl_cowboy_loop +-module($(n)). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {loop, Req, #state{}, 5000, hibernate}. + +info(_Info, Req, State) -> + {loop, Req, State, hibernate}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_cowboy_rest +-module($(n)). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_html/2]). + +init(_, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. + +get_html(Req, State) -> + {<<"This is REST!">>, Req, State}. +endef + +define tpl_cowboy_ws +-module($(n)). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +-record(state, { +}). + +init(_, _, _) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, #state{}}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_ranch_protocol +-module($(n)). +-behaviour(ranch_protocol). + +-export([start_link/4]). +-export([init/4]). + +-type opts() :: []. +-export_type([opts/0]). + +-record(state, { + socket :: inet:socket(), + transport :: module() +}). + +start_link(Ref, Socket, Transport, Opts) -> + Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]), + {ok, Pid}. + +-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Ref, Socket, Transport, _Opts) -> + ok = ranch:accept_ack(Ref), + loop(#state{socket=Socket, transport=Transport}). + +loop(State) -> + loop(State). +endef + +# Plugin-specific targets. + +define render_template + $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) +endef + +ifndef WS +ifdef SP +WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a)) +else +WS = $(tab) +endif +endif + +bootstrap: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(eval n := $(PROJECT)_sup) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc,src/$(PROJECT).app.src) +endif + $(call render_template,bs_app,src/$(PROJECT)_app.erl) + $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl) + +bootstrap-lib: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src) +endif + +bootstrap-rel: +ifneq ($(wildcard relx.config),) + $(error Error: relx.config already exists) +endif +ifneq ($(wildcard rel/),) + $(error Error: rel/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_relx_config,relx.config) + $(verbose) mkdir rel/ + $(call render_template,bs_sys_config,rel/sys.config) + $(call render_template,bs_vm_args,rel/vm.args) + +new-app: +ifndef in + $(error Usage: $(MAKE) new-app in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(eval n := $(in)_sup) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) +endif + $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) + $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) + +new-lib: +ifndef in + $(error Usage: $(MAKE) new-lib in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) +endif + +new: +ifeq ($(wildcard src/)$(in),) + $(error Error: src/ directory does not exist) +endif +ifndef t + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifndef tpl_$(t) + $(error Unknown template) +endif +ifndef n + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in= +else + $(call render_template,tpl_$(t),src/$(n).erl) +endif + +list-templates: + $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-c_src distclean-c_src-env + +# Configuration. + +C_SRC_DIR ?= $(CURDIR)/c_src +C_SRC_ENV ?= $(C_SRC_DIR)/env.mk +C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT).so +C_SRC_TYPE ?= shared + +# System type and C compiler/flags. + +ifeq ($(PLATFORM),darwin) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall + LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress +else ifeq ($(PLATFORM),freebsd) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +else ifeq ($(PLATFORM),linux) + CC ?= gcc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +endif + +CFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) +CXXFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) + +LDLIBS += -L $(ERL_INTERFACE_LIB_DIR) -lerl_interface -lei + +# Verbosity. + +c_verbose_0 = @echo " C " $(?F); +c_verbose = $(c_verbose_$(V)) + +cpp_verbose_0 = @echo " CPP " $(?F); +cpp_verbose = $(cpp_verbose_$(V)) + +link_verbose_0 = @echo " LD " $(@F); +link_verbose = $(link_verbose_$(V)) + +# Targets. + +ifeq ($(wildcard $(C_SRC_DIR)),) +else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),) +app:: app-c_src + +test-build:: app-c_src + +app-c_src: + $(MAKE) -C $(C_SRC_DIR) + +clean:: + $(MAKE) -C $(C_SRC_DIR) clean + +else + +ifeq ($(SOURCES),) +SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat)))) +endif +OBJECTS = $(addsuffix .o, $(basename $(SOURCES))) + +COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c +COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c + +app:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +$(C_SRC_OUTPUT): $(OBJECTS) + $(verbose) mkdir -p priv/ + $(link_verbose) $(CC) $(OBJECTS) \ + $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \ + -o $(C_SRC_OUTPUT) + +%.o: %.c + $(COMPILE_C) $(OUTPUT_OPTION) $< + +%.o: %.cc + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.C + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.cpp + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +clean:: clean-c_src + +clean-c_src: + $(gen_verbose) rm -f $(C_SRC_OUTPUT) $(OBJECTS) + +endif + +ifneq ($(wildcard $(C_SRC_DIR)),) +$(C_SRC_ENV): + $(verbose) $(ERL) -eval "file:write_file(\"$(C_SRC_ENV)\", \ + io_lib:format( \ + \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \ + \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \ + \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \ + [code:root_dir(), erlang:system_info(version), \ + code:lib_dir(erl_interface, include), \ + code:lib_dir(erl_interface, lib)])), \ + halt()." + +distclean:: distclean-c_src-env + +distclean-c_src-env: + $(gen_verbose) rm -f $(C_SRC_ENV) + +-include $(C_SRC_ENV) +endif + +# Templates. + +define bs_c_nif +#include "erl_nif.h" + +static int loads = 0; + +static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info) +{ + /* Initialize private data. */ + *priv_data = NULL; + + loads++; + + return 0; +} + +static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info) +{ + /* Convert the private data to the new version. */ + *priv_data = *old_priv_data; + + loads++; + + return 0; +} + +static void unload(ErlNifEnv* env, void* priv_data) +{ + if (loads == 1) { + /* Destroy the private data. */ + } + + loads--; +} + +static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{ + if (enif_is_atom(env, argv[0])) { + return enif_make_tuple2(env, + enif_make_atom(env, "hello"), + argv[0]); + } + + return enif_make_tuple2(env, + enif_make_atom(env, "error"), + enif_make_atom(env, "badarg")); +} + +static ErlNifFunc nif_funcs[] = { + {"hello", 1, hello} +}; + +ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload) +endef + +define bs_erl_nif +-module($n). + +-export([hello/1]). + +-on_load(on_load/0). +on_load() -> + PrivDir = case code:priv_dir(?MODULE) of + {error, _} -> + AppPath = filename:dirname(filename:dirname(code:which(?MODULE))), + filename:join(AppPath, "priv"); + Path -> + Path + end, + erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0). + +hello(_) -> + erlang:nif_error({not_loaded, ?MODULE}). +endef + +new-nif: +ifneq ($(wildcard $(C_SRC_DIR)/$n.c),) + $(error Error: $(C_SRC_DIR)/$n.c already exists) +endif +ifneq ($(wildcard src/$n.erl),) + $(error Error: src/$n.erl already exists) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in= +else + $(verbose) mkdir -p $(C_SRC_DIR) src/ + $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c) + $(call render_template,bs_erl_nif,src/$n.erl) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ci ci-setup distclean-kerl + +KERL ?= $(CURDIR)/kerl +export KERL + +KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl + +OTP_GIT ?= https://github.com/erlang/otp + +CI_INSTALL_DIR ?= $(HOME)/erlang +CI_OTP ?= + +ifeq ($(strip $(CI_OTP)),) +ci:: +else +ci:: $(addprefix ci-,$(CI_OTP)) + +ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP)) + +ci-setup:: + +ci_verbose_0 = @echo " CI " $(1); +ci_verbose = $(ci_verbose_$(V)) + +define ci_target +ci-$(1): $(CI_INSTALL_DIR)/$(1) + $(ci_verbose) \ + PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \ + CI_OTP_RELEASE="$(1)" \ + CT_OPTS="-label $(1)" \ + $(MAKE) clean ci-setup tests +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp)))) + +define ci_otp_target +ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),) +$(CI_INSTALL_DIR)/$(1): $(KERL) + $(KERL) build git $(OTP_GIT) $(1) $(1) + $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1) +endif +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp)))) + +$(KERL): + $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL)) + $(verbose) chmod +x $(KERL) + +help:: + $(verbose) printf "%s\n" "" \ + "Continuous Integration targets:" \ + " ci Run '$(MAKE) tests' on all configured Erlang versions." \ + "" \ + "The CI_OTP variable must be defined with the Erlang versions" \ + "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3" + +distclean:: distclean-kerl + +distclean-kerl: + $(gen_verbose) rm -rf $(KERL) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ct distclean-ct + +# Configuration. + +CT_OPTS ?= +ifneq ($(wildcard $(TEST_DIR)),) + CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl)))) +else + CT_SUITES ?= +endif + +# Core targets. + +tests:: ct + +distclean:: distclean-ct + +help:: + $(verbose) printf "%s\n" "" \ + "Common_test targets:" \ + " ct Run all the common_test suites for this project" \ + "" \ + "All your common_test suites have their associated targets." \ + "A suite named http_SUITE can be ran using the ct-http target." + +# Plugin-specific targets. + +CT_RUN = ct_run \ + -no_auto_compile \ + -noinput \ + -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(TEST_DIR) \ + -dir $(TEST_DIR) \ + -logdir $(CURDIR)/logs + +ifeq ($(CT_SUITES),) +ct: +else +ct: test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) +endif + +define ct_suite_target +ct-$(1): test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(1)) $(CT_OPTS) +endef + +$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) + +distclean-ct: + $(gen_verbose) rm -rf $(CURDIR)/logs/ + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: plt distclean-plt dialyze + +# Configuration. + +DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt +export DIALYZER_PLT + +PLT_APPS ?= +DIALYZER_DIRS ?= --src -r src +DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions \ + -Wunmatched_returns # -Wunderspecs + +# Core targets. + +check:: dialyze + +distclean:: distclean-plt + +help:: + $(verbose) printf "%s\n" "" \ + "Dialyzer targets:" \ + " plt Build a PLT file for this project" \ + " dialyze Analyze the project using Dialyzer" + +# Plugin-specific targets. + +$(DIALYZER_PLT): deps app + $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS) + +plt: $(DIALYZER_PLT) + +distclean-plt: + $(gen_verbose) rm -f $(DIALYZER_PLT) + +ifneq ($(wildcard $(DIALYZER_PLT)),) +dialyze: +else +dialyze: $(DIALYZER_PLT) +endif + $(verbose) dialyzer --no_native $(DIALYZER_DIRS) $(DIALYZER_OPTS) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-edoc edoc + +# Configuration. + +EDOC_OPTS ?= + +# Core targets. + +docs:: distclean-edoc edoc + +distclean:: distclean-edoc + +# Plugin-specific targets. + +edoc: doc-deps + $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().' + +distclean-edoc: + $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: elvis distclean-elvis + +# Configuration. + +ELVIS_CONFIG ?= $(CURDIR)/elvis.config + +ELVIS ?= $(CURDIR)/elvis +export ELVIS + +ELVIS_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis +ELVIS_CONFIG_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis.config +ELVIS_OPTS ?= + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Elvis targets:" \ + " elvis Run Elvis using the local elvis.config or download the default otherwise" + +distclean:: distclean-elvis + +# Plugin-specific targets. + +$(ELVIS): + $(gen_verbose) $(call core_http_get,$(ELVIS),$(ELVIS_URL)) + $(verbose) chmod +x $(ELVIS) + +$(ELVIS_CONFIG): + $(verbose) $(call core_http_get,$(ELVIS_CONFIG),$(ELVIS_CONFIG_URL)) + +elvis: $(ELVIS) $(ELVIS_CONFIG) + $(verbose) $(ELVIS) rock -c $(ELVIS_CONFIG) $(ELVIS_OPTS) + +distclean-elvis: + $(gen_verbose) rm -rf $(ELVIS) + +# Copyright (c) 2014 Dave Cottlehuber +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-escript escript + +# Configuration. + +ESCRIPT_NAME ?= $(PROJECT) +ESCRIPT_COMMENT ?= This is an -*- erlang -*- file + +ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*" +ESCRIPT_SYS_CONFIG ?= "rel/sys.config" +ESCRIPT_EMU_ARGS ?= -pa . \ + -sasl errlog_type error \ + -escript main $(ESCRIPT_NAME) +ESCRIPT_SHEBANG ?= /usr/bin/env escript +ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**" + +# Core targets. + +distclean:: distclean-escript + +help:: + $(verbose) printf "%s\n" "" \ + "Escript targets:" \ + " escript Build an executable escript archive" \ + +# Plugin-specific targets. + +# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl +# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center +# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE : +# Software may only be used for the great good and the true happiness of all +# sentient beings. + +define ESCRIPT_RAW +'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\ +'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\ +' [F || F <- A, not filelib:is_dir(F) ] end,'\ +'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\ +'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\ +'Ez = fun(Escript) ->'\ +' Static = Files([$(ESCRIPT_STATIC)]),'\ +' Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\ +' Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\ +' escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\ +' {archive, Archive, [memory]},'\ +' {shebang, "$(ESCRIPT_SHEBANG)"},'\ +' {comment, "$(ESCRIPT_COMMENT)"},'\ +' {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\ +' ]),'\ +' file:change_mode(Escript, 8#755)'\ +'end,'\ +'Ez("$(ESCRIPT_NAME)"),'\ +'halt().' +endef + +ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW)) + +escript:: distclean-escript deps app + $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND) + +distclean-escript: + $(gen_verbose) rm -f $(ESCRIPT_NAME) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: relx-rel distclean-relx-rel distclean-relx run + +# Configuration. + +RELX ?= $(CURDIR)/relx +RELX_CONFIG ?= $(CURDIR)/relx.config + +RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.5.0/relx +RELX_OPTS ?= +RELX_OUTPUT_DIR ?= _rel + +ifeq ($(firstword $(RELX_OPTS)),-o) + RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) +else + RELX_OPTS += -o $(RELX_OUTPUT_DIR) +endif + +# Core targets. + +ifeq ($(IS_DEP),) +ifneq ($(wildcard $(RELX_CONFIG)),) +rel:: relx-rel +endif +endif + +distclean:: distclean-relx-rel distclean-relx + +# Plugin-specific targets. + +$(RELX): + $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) + $(verbose) chmod +x $(RELX) + +relx-rel: $(RELX) rel-deps app + $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) + +distclean-relx-rel: + $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) + +distclean-relx: + $(gen_verbose) rm -rf $(RELX) + +# Run target. + +ifeq ($(wildcard $(RELX_CONFIG)),) +run: +else + +define get_relx_release.erl + {ok, Config} = file:consult("$(RELX_CONFIG)"), + {release, {Name, _}, _} = lists:keyfind(release, 1, Config), + io:format("~s", [Name]), + halt(0). +endef + +RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))` + +run: all + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console + +help:: + $(verbose) printf "%s\n" "" \ + "Relx targets:" \ + " run Compile the project, build the release and run it" + +endif + +# Copyright (c) 2014, M Robert Martin +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: shell + +# Configuration. + +SHELL_ERL ?= erl +SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin +SHELL_OPTS ?= + +ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) + +# Core targets + +help:: + $(verbose) printf "%s\n" "" \ + "Shell targets:" \ + " shell Run an erlang shell with SHELL_OPTS or reasonable default" + +# Plugin-specific targets. + +$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) + +build-shell-deps: $(ALL_SHELL_DEPS_DIRS) + $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done + +shell: build-shell-deps + $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS) + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq) +.PHONY: triq + +# Targets. + +tests:: triq + +define triq_check.erl + code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]), + try + case $(1) of + all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]); + module -> triq:check($(2)); + function -> triq:check($(2)) + end + of + true -> halt(0); + _ -> halt(1) + catch error:undef -> + io:format("Undefined property or module~n"), + halt(0) + end. +endef + +ifdef t +ifeq (,$(findstring :,$(t))) +triq: test-build + $(verbose) $(call erlang,$(call triq_check.erl,module,$(t))) +else +triq: test-build + $(verbose) echo Testing $(t)/0 + $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)())) +endif +else +triq: test-build + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam)))))) + $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES))) +endif +endif + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: xref distclean-xref + +# Configuration. + +ifeq ($(XREF_CONFIG),) + XREF_ARGS := +else + XREF_ARGS := -c $(XREF_CONFIG) +endif + +XREFR ?= $(CURDIR)/xrefr +export XREFR + +XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Xref targets:" \ + " xref Run Xrefr using $XREF_CONFIG as config file if defined" + +distclean:: distclean-xref + +# Plugin-specific targets. + +$(XREFR): + $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL)) + $(verbose) chmod +x $(XREFR) + +xref: deps app $(XREFR) + $(gen_verbose) $(XREFR) $(XREFR_ARGS) + +distclean-xref: + $(gen_verbose) rm -rf $(XREFR) + +# Copyright 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +COVER_REPORT_DIR = cover + +# Hook in coverage to ct + +ifdef COVER +ifdef CT_RUN +# All modules in 'ebin' +COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam))) + +test-build:: $(TEST_DIR)/ct.cover.spec + +$(TEST_DIR)/ct.cover.spec: + $(verbose) echo Cover mods: $(COVER_MODS) + $(gen_verbose) printf "%s\n" \ + '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \ + '{export,"$(CURDIR)/ct.coverdata"}.' > $@ + +CT_RUN += -cover $(TEST_DIR)/ct.cover.spec +endif +endif + +# Core targets + +ifdef COVER +ifneq ($(COVER_REPORT_DIR),) +tests:: + $(verbose) $(MAKE) --no-print-directory cover-report +endif +endif + +clean:: coverdata-clean + +ifneq ($(COVER_REPORT_DIR),) +distclean:: cover-report-clean +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Cover targets:" \ + " cover-report Generate a HTML coverage report from previously collected" \ + " cover data." \ + " all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \ + "" \ + "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \ + "target tests additionally generates a HTML coverage report from the combined" \ + "coverdata files from each of these testing tools. HTML reports can be disabled" \ + "by setting COVER_REPORT_DIR to empty." + +# Plugin specific targets + +COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata)) + +.PHONY: coverdata-clean +coverdata-clean: + $(gen_verbose) rm -f *.coverdata ct.cover.spec + +# Merge all coverdata files into one. +all.coverdata: $(COVERDATA) + $(gen_verbose) $(ERL) -eval ' \ + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \ + cover:export("$@"), halt(0).' + +# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to +# empty if you want the coverdata files but not the HTML report. +ifneq ($(COVER_REPORT_DIR),) + +.PHONY: cover-report-clean cover-report + +cover-report-clean: + $(gen_verbose) rm -rf $(COVER_REPORT_DIR) + +ifeq ($(COVERDATA),) +cover-report: +else + +# Modules which include eunit.hrl always contain one line without coverage +# because eunit defines test/0 which is never called. We compensate for this. +EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \ + grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ + | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq)) + +define cover_report.erl + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) + Ms = cover:imported_modules(), + [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M) + ++ ".COVER.html", [html]) || M <- Ms], + Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms], + EunitHrlMods = [$(EUNIT_HRL_MODS)], + Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of + true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report], + TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]), + TotalN = lists:sum([N || {_, {_, N}} <- Report1]), + TotalPerc = round(100 * TotalY / (TotalY + TotalN)), + {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]), + io:format(F, "~n" + "~n" + "Coverage report~n" + "~n", []), + io:format(F, "

Coverage

~n

Total: ~p%

~n", [TotalPerc]), + io:format(F, "~n", []), + [io:format(F, "" + "~n", + [M, M, round(100 * Y / (Y + N))]) || {M, {Y, N}} <- Report1], + How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))", + Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")", + io:format(F, "
ModuleCoverage
~p~p%
~n" + "

Generated using ~s and erlang.mk on ~s.

~n" + "", [How, Date]), + halt(). +endef + +cover-report: + $(gen_verbose) mkdir -p $(COVER_REPORT_DIR) + $(gen_verbose) $(call erlang,$(cover_report.erl)) + +endif +endif # ifneq ($(COVER_REPORT_DIR),) + +# Copyright (c) 2013-2015, Loïc Hoguin +# Copyright (c) 2015, Jean-Sébastien Pédron +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Fetch dependencies (without building them). + +.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \ + fetch-shell-deps + +ifneq ($(SKIP_DEPS),) +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: + @: +else +# By default, we fetch "normal" dependencies. They are also included no +# matter the type of requested dependencies. +# +# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS). +fetch-deps: $(ALL_DEPS_DIRS) +fetch-doc-deps: $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS) +fetch-rel-deps: $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS) +fetch-test-deps: $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS) +fetch-shell-deps: $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(filter doc,$(DEP_TYPES)),) +fetch-deps: $(ALL_DOC_DEPS_DIRS) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +fetch-deps: $(ALL_REL_DEPS_DIRS) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +fetch-deps: $(ALL_TEST_DEPS_DIRS) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +fetch-deps: $(ALL_SHELL_DEPS_DIRS) +endif + +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep $@ IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/$@.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $^ ; do \ + if ! grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/$@.log; then \ + echo $$dep >> $(ERLANG_MK_TMP)/$@.log; \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C $$dep fetch-deps IS_DEP=1 || exit $$?; \ + fi \ + fi \ + done +endif # ifneq ($(SKIP_DEPS),) + +# List dependencies recursively. + +.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \ + list-shell-deps + +ifneq ($(SKIP_DEPS),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): + $(verbose) :> $@ +else +LIST_DIRS = $(ALL_DEPS_DIRS) +LIST_DEPS = $(BUILD_DEPS) $(DEPS) + +$(ERLANG_MK_RECURSIVE_DEPS_LIST): fetch-deps + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DIRS += $(ALL_DOC_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DEPS += $(DOC_DEPS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-doc-deps +else +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DIRS += $(ALL_REL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DEPS += $(REL_DEPS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-rel-deps +else +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DIRS += $(ALL_TEST_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DEPS += $(TEST_DEPS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-test-deps +else +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DIRS += $(ALL_SHELL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DEPS += $(SHELL_DEPS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-shell-deps +else +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-deps +endif + +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): +ifneq ($(IS_DEP),1) + $(verbose) rm -f $@.orig +endif +ifndef IS_APP + $(verbose) for app in $(filter-out $(CURDIR),$(ALL_APPS_DIRS)); do \ + $(MAKE) -C "$$app" --no-print-directory $@ IS_APP=1 || :; \ + done +endif + $(verbose) for dep in $(filter-out $(CURDIR),$(LIST_DIRS)); do \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C "$$dep" --no-print-directory $@ IS_DEP=1; \ + fi; \ + done + $(verbose) for dep in $(LIST_DEPS); do \ + echo $(DEPS_DIR)/$$dep; \ + done >> $@.orig +ifndef IS_APP +ifneq ($(IS_DEP),1) + $(verbose) sort < $@.orig | uniq > $@ + $(verbose) rm -f $@.orig +endif +endif +endif # ifneq ($(SKIP_DEPS),) + +ifneq ($(SKIP_DEPS),) +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + @: +else +list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST) +list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(IS_DEP),1) +ifneq ($(filter doc,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) +endif +endif + +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + $(verbose) cat $^ | sort | uniq +endif # ifneq ($(SKIP_DEPS),) diff --git a/rabbitmq-server/deps/rabbitmq_event_exchange/examples/java/QueueEvents.java b/rabbitmq-server/deps/rabbitmq_event_exchange/examples/java/QueueEvents.java new file mode 100644 index 0000000..aca953c --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_event_exchange/examples/java/QueueEvents.java @@ -0,0 +1,43 @@ + +import com.rabbitmq.client.AMQP; +import com.rabbitmq.client.Channel; +import com.rabbitmq.client.Connection; +import com.rabbitmq.client.ConnectionFactory; +import com.rabbitmq.client.Consumer; +import com.rabbitmq.client.DefaultConsumer; +import com.rabbitmq.client.Envelope; + +import java.io.IOException; +import java.util.Map; + +public class QueueEvents { + public static void main(String[] args) throws IOException, InterruptedException { + ConnectionFactory f = new ConnectionFactory(); + Connection c = f.newConnection(); + Channel ch = c.createChannel(); + String q = ch.queueDeclare().getQueue(); + ch.queueBind(q, "amq.rabbitmq.event", "queue.*"); + + Consumer consumer = new DefaultConsumer(ch) { + @Override + public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException { + String event = envelope.getRoutingKey(); + Map headers = properties.getHeaders(); + String name = headers.get("name").toString(); + String vhost = headers.get("vhost").toString(); + + if (event.equals("queue.created")) { + boolean durable = (Boolean) headers.get("durable"); + String durableString = durable ? " (durable)" : " (transient)"; + System.out.println("Created: " + name + " in " + vhost + durableString); + } + else /* queue.deleted is the only other possibility */ { + System.out.println("Deleted: " + name + " in " + vhost); + } + } + }; + ch.basicConsume(q, true, consumer); + System.out.println("QUEUE EVENTS"); + System.out.println("============\n"); + } +} diff --git a/rabbitmq-server/deps/rabbitmq_event_exchange/rabbitmq-components.mk b/rabbitmq-server/deps/rabbitmq_event_exchange/rabbitmq-components.mk new file mode 100644 index 0000000..eed26fd --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_event_exchange/rabbitmq-components.mk @@ -0,0 +1,331 @@ +ifeq ($(.DEFAULT_GOAL),) +# Define default goal to `all` because this file defines some targets +# before the inclusion of erlang.mk leading to the wrong target becoming +# the default. +.DEFAULT_GOAL = all +endif + +# Automatically add rabbitmq-common to the dependencies, at least for +# the Makefiles. +ifneq ($(PROJECT),rabbit_common) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +ifeq ($(filter rabbit_common,$(DEPS)),) +DEPS += rabbit_common +endif +endif +endif + +# -------------------------------------------------------------------- +# RabbitMQ components. +# -------------------------------------------------------------------- + +# For RabbitMQ repositories, we want to checkout branches which match +# the parent project. For instance, if the parent project is on a +# release tag, dependencies must be on the same release tag. If the +# parent project is on a topic branch, dependencies must be on the same +# topic branch or fallback to `stable` or `master` whichever was the +# base of the topic branch. + +dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_clusterer = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_lvc = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_visualiser = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master +dep_sockjs = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master +dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master + +dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master + +# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk +# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch +# needs to add "ranch" as a BUILD_DEPS. The list of projects needing +# this workaround are: +# o rabbitmq-web-stomp +dep_ranch = git https://github.com/ninenines/ranch 1.2.1 + +RABBITMQ_COMPONENTS = amqp_client \ + rabbit \ + rabbit_common \ + rabbitmq_amqp1_0 \ + rabbitmq_auth_backend_amqp \ + rabbitmq_auth_backend_http \ + rabbitmq_auth_backend_ldap \ + rabbitmq_auth_mechanism_ssl \ + rabbitmq_boot_steps_visualiser \ + rabbitmq_clusterer \ + rabbitmq_codegen \ + rabbitmq_consistent_hash_exchange \ + rabbitmq_delayed_message_exchange \ + rabbitmq_dotnet_client \ + rabbitmq_event_exchange \ + rabbitmq_federation \ + rabbitmq_federation_management \ + rabbitmq_java_client \ + rabbitmq_lvc \ + rabbitmq_management \ + rabbitmq_management_agent \ + rabbitmq_management_exchange \ + rabbitmq_management_themes \ + rabbitmq_management_visualiser \ + rabbitmq_message_timestamp \ + rabbitmq_metronome \ + rabbitmq_mqtt \ + rabbitmq_recent_history_exchange \ + rabbitmq_rtopic_exchange \ + rabbitmq_sharding \ + rabbitmq_shovel \ + rabbitmq_shovel_management \ + rabbitmq_stomp \ + rabbitmq_test \ + rabbitmq_toke \ + rabbitmq_top \ + rabbitmq_tracing \ + rabbitmq_web_dispatch \ + rabbitmq_web_stomp \ + rabbitmq_web_stomp_examples \ + rabbitmq_website + +# Several components have a custom erlang.mk/build.config, mainly +# to disable eunit. Therefore, we can't use the top-level project's +# erlang.mk copy. +NO_AUTOPATCH += $(RABBITMQ_COMPONENTS) + +ifeq ($(origin current_rmq_ref),undefined) +ifneq ($(wildcard .git),) +current_rmq_ref := $(shell (\ + ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\ + if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi)) +else +current_rmq_ref := master +endif +endif +export current_rmq_ref + +ifeq ($(origin base_rmq_ref),undefined) +ifneq ($(wildcard .git),) +base_rmq_ref := $(shell \ + (git rev-parse --verify -q stable >/dev/null && \ + git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \ + echo stable) || \ + echo master) +else +base_rmq_ref := master +endif +endif +export base_rmq_ref + +# Repository URL selection. +# +# First, we infer other components' location from the current project +# repository URL, if it's a Git repository: +# - We take the "origin" remote URL as the base +# - The current project name and repository name is replaced by the +# target's properties: +# eg. rabbitmq-common is replaced by rabbitmq-codegen +# eg. rabbit_common is replaced by rabbitmq_codegen +# +# If cloning from this computed location fails, we fallback to RabbitMQ +# upstream which is GitHub. + +# Maccro to transform eg. "rabbit_common" to "rabbitmq-common". +rmq_cmp_repo_name = $(word 2,$(dep_$(1))) + +# Upstream URL for the current project. +RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT)) +RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git +RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git + +# Current URL for the current project. If this is not a Git clone, +# default to the upstream Git repository. +ifneq ($(wildcard .git),) +git_origin_fetch_url := $(shell git config remote.origin.url) +git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url) +RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url) +RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url) +else +RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL) +RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL) +endif + +# Macro to replace the following pattern: +# 1. /foo.git -> /bar.git +# 2. /foo -> /bar +# 3. /foo/ -> /bar/ +subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3)))) + +# Macro to replace both the project's name (eg. "rabbit_common") and +# repository name (eg. "rabbitmq-common") by the target's equivalent. +# +# This macro is kept on one line because we don't want whitespaces in +# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell +# single-quoted string. +dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo)) + +dep_rmq_commits = $(if $(dep_$(1)), \ + $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \ + $(pkg_$(1)_commit)) + +define dep_fetch_git_rmq + fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \ + fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \ + if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \ + git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url1"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \ + elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url2"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \ + fi; \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \ + $(foreach ref,$(call dep_rmq_commits,$(1)), \ + git checkout -q $(ref) >/dev/null 2>&1 || \ + ) \ + (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \ + 1>&2 && false) ) && \ + (test "$$$$fetch_url" = "$$$$push_url" || \ + git remote set-url --push origin "$$$$push_url") +endef + +# -------------------------------------------------------------------- +# Component distribution. +# -------------------------------------------------------------------- + +list-dist-deps:: + @: + +prepare-dist:: + @: + +# -------------------------------------------------------------------- +# Run a RabbitMQ node (moved from rabbitmq-run.mk as a workaround). +# -------------------------------------------------------------------- + +# Add "rabbit" to the build dependencies when the user wants to start +# a broker or to the test dependencies when the user wants to test a +# project. +# +# NOTE: This should belong to rabbitmq-run.mk. Unfortunately, it is +# loaded *after* erlang.mk which is too late to add a dependency. That's +# why rabbitmq-components.mk knows the list of targets which start a +# broker and add "rabbit" to the dependencies in this case. + +ifneq ($(PROJECT),rabbit) +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS)),) +RUN_RMQ_TARGETS = run-broker \ + run-background-broker \ + run-node \ + run-background-node \ + start-background-node + +ifneq ($(filter $(RUN_RMQ_TARGETS),$(MAKECMDGOALS)),) +BUILD_DEPS += rabbit +endif +endif + +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +ifneq ($(filter check tests tests-with-broker test,$(MAKECMDGOALS)),) +TEST_DEPS += rabbit +endif +endif +endif + +ifeq ($(filter rabbit_public_umbrella amqp_client rabbit_common rabbitmq_test,$(PROJECT)),) +ifeq ($(filter rabbitmq_test,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +TEST_DEPS += rabbitmq_test +endif +endif + +# -------------------------------------------------------------------- +# rabbitmq-components.mk checks. +# -------------------------------------------------------------------- + +ifeq ($(PROJECT),rabbit_common) +else ifdef SKIP_RMQCOMP_CHECK +else ifeq ($(IS_DEP),1) +else ifneq ($(filter co up,$(MAKECMDGOALS)),) +else +# In all other cases, rabbitmq-components.mk must be in sync. +deps:: check-rabbitmq-components.mk +fetch-deps: check-rabbitmq-components.mk +endif + +# If this project is under the Umbrella project, we override $(DEPS_DIR) +# to point to the Umbrella's one. We also disable `make distclean` so +# $(DEPS_DIR) is not accidentally removed. + +ifneq ($(wildcard ../../UMBRELLA.md),) +UNDER_UMBRELLA = 1 +else ifneq ($(wildcard UMBRELLA.md),) +UNDER_UMBRELLA = 1 +endif + +ifeq ($(UNDER_UMBRELLA),1) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +DEPS_DIR ?= $(abspath ..) + +distclean:: distclean-components + @: + +distclean-components: +endif + +ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),) +SKIP_DEPS = 1 +endif +endif + +UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk + +check-rabbitmq-components.mk: + $(verbose) cmp -s rabbitmq-components.mk \ + $(UPSTREAM_RMQ_COMPONENTS_MK) || \ + (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \ + false) + +ifeq ($(PROJECT),rabbit_common) +rabbitmq-components-mk: + @: +else +rabbitmq-components-mk: + $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) . +ifeq ($(DO_COMMIT),yes) + $(verbose) git diff --quiet rabbitmq-components.mk \ + || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk +endif +endif diff --git a/rabbitmq-server/deps/rabbitmq_event_exchange/src/rabbit_exchange_type_event.erl b/rabbitmq-server/deps/rabbitmq_event_exchange/src/rabbit_exchange_type_event.erl new file mode 100644 index 0000000..87095de --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_event_exchange/src/rabbit_exchange_type_event.erl @@ -0,0 +1,131 @@ +%% The contents of this file are subject to the Mozilla Public License +%% Version 1.1 (the "License"); you may not use this file except in +%% compliance with the License. You may obtain a copy of the License +%% at http://www.mozilla.org/MPL/ +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and +%% limitations under the License. +%% +%% The Original Code is RabbitMQ. +%% +%% The Initial Developer of the Original Code is GoPivotal, Inc. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. +%% + +-module(rabbit_exchange_type_event). +-include_lib("rabbit_common/include/rabbit.hrl"). +-include_lib("rabbit_common/include/rabbit_framing.hrl"). + +-export([register/0, unregister/0]). +-export([init/1, handle_call/2, handle_event/2, handle_info/2, + terminate/2, code_change/3]). + +-export([fmt_proplist/1]). %% testing + +-define(EXCH_NAME, <<"amq.rabbitmq.event">>). + +-import(rabbit_misc, [pget/2, pget/3]). + +-rabbit_boot_step({rabbit_event_exchange, + [{description, "event exchange"}, + {mfa, {?MODULE, register, []}}, + {cleanup, {?MODULE, unregister, []}}, + {requires, recovery}, + {enables, routing_ready}]}). + +%%---------------------------------------------------------------------------- + +register() -> + rabbit_exchange:declare(x(), topic, true, false, true, []), + gen_event:add_handler(rabbit_event, ?MODULE, []). + +unregister() -> + gen_event:delete_handler(rabbit_event, ?MODULE, []). + +x() -> + VHost = ensure_vhost_exists(), + rabbit_misc:r(VHost, exchange, ?EXCH_NAME). + +%%---------------------------------------------------------------------------- + +init([]) -> {ok, []}. + +handle_call(_Request, State) -> {ok, not_understood, State}. + +handle_event(#event{type = Type, + props = Props, + timestamp = TS, + reference = none}, State) -> + case key(Type) of + ignore -> ok; + Key -> PBasic = #'P_basic'{delivery_mode = 2, + headers = fmt_proplist(Props), + %% 0-9-1 says the timestamp is a + %% "64 bit POSIX + %% timestamp". That's second + %% resolution, not millisecond. + timestamp = time_compat:convert_time_unit( + TS, milli_seconds, seconds)}, + Msg = rabbit_basic:message(x(), Key, PBasic, <<>>), + rabbit_basic:publish( + rabbit_basic:delivery(false, false, Msg, undefined)) + end, + {ok, State}; +handle_event(_Event, State) -> + {ok, State}. + +handle_info(_Info, State) -> {ok, State}. + +terminate(_Arg, _State) -> ok. + +code_change(_OldVsn, State, _Extra) -> {ok, State}. + +%%---------------------------------------------------------------------------- + +ensure_vhost_exists() -> + VHost = case application:get_env(rabbitmq_event_exchange, vhost) of + undefined -> + {ok, V} = application:get_env(rabbit, default_vhost), + V; + {ok, V} -> + V + end, + case rabbit_vhost:exists(VHost) of + false -> rabbit_vhost:add(VHost); + _ -> ok + end, + VHost. + +key(S) -> + case string:tokens(atom_to_list(S), "_") of + [_, "stats"] -> ignore; + Tokens -> list_to_binary(string:join(Tokens, ".")) + end. + +fmt_proplist(Props) -> + lists:append([fmt(a2b(K), V) || {K, V} <- Props]). + +fmt(K, #resource{virtual_host = VHost, + name = Name}) -> [{K, longstr, Name}, + {<<"vhost">>, longstr, VHost}]; +fmt(K, V) -> {T, Enc} = fmt(V), + [{K, T, Enc}]. + +fmt(true) -> {bool, true}; +fmt(false) -> {bool, false}; +fmt(V) when is_atom(V) -> {longstr, a2b(V)}; +fmt(V) when is_integer(V) -> {long, V}; +fmt(V) when is_number(V) -> {float, V}; +fmt(V) when is_binary(V) -> {longstr, V}; +fmt([{_, _}|_] = Vs) -> {table, fmt_proplist(Vs)}; +fmt(Vs) when is_list(Vs) -> {array, [fmt(V) || V <- Vs]}; +fmt(V) when is_pid(V) -> {longstr, + list_to_binary(rabbit_misc:pid_to_string(V))}; +fmt(V) -> {longstr, + list_to_binary( + rabbit_misc:format("~1000000000p", [V]))}. + +a2b(A) when is_atom(A) -> list_to_binary(atom_to_list(A)); +a2b(B) when is_binary(B) -> B. diff --git a/rabbitmq-server/deps/rabbitmq_event_exchange/src/rabbitmq_event_exchange.app.src b/rabbitmq-server/deps/rabbitmq_event_exchange/src/rabbitmq_event_exchange.app.src new file mode 100644 index 0000000..f1dc8f5 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_event_exchange/src/rabbitmq_event_exchange.app.src @@ -0,0 +1,7 @@ +{application, rabbitmq_event_exchange, + [{description, "Event Exchange Type"}, + {vsn, "3.6.1"}, + {modules, []}, + {registered, []}, + {env, []}, + {applications, [rabbit]}]}. diff --git a/rabbitmq-server/deps/rabbitmq_event_exchange/test/src/rabbit_exchange_type_event_test.erl b/rabbitmq-server/deps/rabbitmq_event_exchange/test/src/rabbit_exchange_type_event_test.erl new file mode 100644 index 0000000..80a79ec --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_event_exchange/test/src/rabbit_exchange_type_event_test.erl @@ -0,0 +1,52 @@ +%% The contents of this file are subject to the Mozilla Public License +%% Version 1.1 (the "License"); you may not use this file except in +%% compliance with the License. You may obtain a copy of the License +%% at http://www.mozilla.org/MPL/ +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and +%% limitations under the License. +%% +%% The Original Code is RabbitMQ Consistent Hash Exchange. +%% +%% The Initial Developer of the Original Code is GoPivotal, Inc. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. +%% + +-module(rabbit_exchange_type_event_test). +-include_lib("eunit/include/eunit.hrl"). + +-include_lib("amqp_client/include/amqp_client.hrl"). + +%% Only really tests that we're not completely broken. +simple_test() -> + Now = time_compat:os_system_time(seconds), + {ok, Conn} = amqp_connection:start(#amqp_params_network{}), + {ok, Ch} = amqp_connection:open_channel(Conn), + #'queue.declare_ok'{queue = Q} = + amqp_channel:call(Ch, #'queue.declare'{exclusive = true}), + amqp_channel:call(Ch, #'queue.bind'{queue = Q, + exchange = <<"amq.rabbitmq.event">>, + routing_key = <<"queue.*">>}), + amqp_channel:subscribe(Ch, #'basic.consume'{queue = Q, no_ack = true}, + self()), + receive + #'basic.consume_ok'{} -> ok + end, + + #'queue.declare_ok'{queue = Q2} = + amqp_channel:call(Ch, #'queue.declare'{exclusive = true}), + + receive + {#'basic.deliver'{routing_key = Key}, + #amqp_msg{props = #'P_basic'{headers = Headers, timestamp = TS}}} -> + %% timestamp is within the last 5 seconds + ?assert((TS - Now) =< 5), + ?assertMatch(<<"queue.created">>, Key), + ?assertMatch({longstr, Q2}, rabbit_misc:table_lookup( + Headers, <<"name">>)) + end, + + amqp_connection:close(Conn), + ok. diff --git a/rabbitmq-server/deps/rabbitmq_event_exchange/test/src/rabbit_exchange_type_event_test_all.erl b/rabbitmq-server/deps/rabbitmq_event_exchange/test/src/rabbit_exchange_type_event_test_all.erl new file mode 100644 index 0000000..278b9e0 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_event_exchange/test/src/rabbit_exchange_type_event_test_all.erl @@ -0,0 +1,29 @@ +%% The contents of this file are subject to the Mozilla Public License +%% Version 1.1 (the "License"); you may not use this file except in +%% compliance with the License. You may obtain a copy of the License +%% at http://www.mozilla.org/MPL/ +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and +%% limitations under the License. +%% +%% The Original Code is RabbitMQ Consistent Hash Exchange. +%% +%% The Initial Developer of the Original Code is GoPivotal, Inc. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. +%% + +-module(rabbit_exchange_type_event_test_all). + +-export([all_tests/0]). + +all_tests() -> + ok = eunit:test(tests(rabbit_exchange_type_event_unit_test, 60), [verbose]), + ok = eunit:test(tests(rabbit_exchange_type_event_test, 60), [verbose]). + +tests(Module, Timeout) -> + {foreach, fun() -> ok end, + [{timeout, Timeout, fun () -> Module:F() end} || + {F, _Arity} <- proplists:get_value(exports, Module:module_info()), + string:right(atom_to_list(F), 5) =:= "_test"]}. diff --git a/rabbitmq-server/deps/rabbitmq_event_exchange/test/src/rabbit_exchange_type_event_unit_test.erl b/rabbitmq-server/deps/rabbitmq_event_exchange/test/src/rabbit_exchange_type_event_unit_test.erl new file mode 100644 index 0000000..89e5b23 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_event_exchange/test/src/rabbit_exchange_type_event_unit_test.erl @@ -0,0 +1,56 @@ +%% The contents of this file are subject to the Mozilla Public License +%% Version 1.1 (the "License"); you may not use this file except in +%% compliance with the License. You may obtain a copy of the License +%% at http://www.mozilla.org/MPL/ +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and +%% limitations under the License. +%% +%% The Original Code is RabbitMQ Consistent Hash Exchange. +%% +%% The Initial Developer of the Original Code is GoPivotal, Inc. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. +%% + +-module(rabbit_exchange_type_event_unit_test). + +-include_lib("eunit/include/eunit.hrl"). + +encoding_test() -> + T = fun (In, Exp) -> + ?assertEqual(rabbit_exchange_type_event:fmt_proplist(In), Exp) + end, + T([{name, <<"test">>}], + [{<<"name">>, longstr, <<"test">>}]), + T([{name, rabbit_misc:r(<<"/">>, exchange, <<"test">>)}], + [{<<"name">>, longstr, <<"test">>}, + {<<"vhost">>, longstr, <<"/">>}]), + T([{name, <<"test">>}, + {number, 1}, + {real, 1.0}, + {bool, true}, + {atom, hydrogen}, + {weird, {1,2,3,[a|1],"a"}}, + {list, [1,2,[a,b]]}, + {proplist, [{foo, a}, + {bar, [{baz, b}, + {bash, c}]}]} + ], + [{<<"name">>, longstr, <<"test">>}, + {<<"number">>, long, 1}, + {<<"real">>, float, 1.0}, + {<<"bool">>, bool, true}, + {<<"atom">>, longstr, <<"hydrogen">>}, + {<<"weird">>, longstr, <<"{1,2,3,[a|1],\"a\"}">>}, + {<<"list">>, array, [{long, 1}, + {long, 2}, + {array, [{longstr, <<"a">>}, + {longstr, <<"b">>}]}]}, + {<<"proplist">>, table, + [{<<"foo">>, longstr, <<"a">>}, + {<<"bar">>, table, [{<<"baz">>, longstr, <<"b">>}, + {<<"bash">>, longstr, <<"c">>}]}]} + ]), + ok. diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/CONTRIBUTING.md b/rabbitmq-server/deps/rabbitmq_federation/CONTRIBUTING.md similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/CONTRIBUTING.md rename to rabbitmq-server/deps/rabbitmq_federation/CONTRIBUTING.md diff --git a/rabbitmq-server/deps/rabbitmq_federation/Makefile b/rabbitmq-server/deps/rabbitmq_federation/Makefile new file mode 100644 index 0000000..c1f7530 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_federation/Makefile @@ -0,0 +1,36 @@ +PROJECT = rabbitmq_federation + +DEPS = amqp_client +TEST_DEPS = rabbit + +DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk + +# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be +# reviewed and merged. + +ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git +ERLANG_MK_COMMIT = rabbitmq-tmp + +include rabbitmq-components.mk +include erlang.mk + +# -------------------------------------------------------------------- +# Testing. +# -------------------------------------------------------------------- + +FILTER := all +COVER := false + +WITH_BROKER_TEST_COMMANDS := \ + rabbit_test_runner:run_in_broker(\"$(CURDIR)/test\",\"$(FILTER)\") +WITH_BROKER_SETUP_SCRIPTS := $(CURDIR)/etc/setup-rabbit-test.sh + +TEST_PLUGINS_ROOTDIR := $(TEST_TMPDIR)/plugins + +STANDALONE_TEST_COMMANDS := \ + rabbit_test_runner:run_multi(\"$(DEPS_DIR)\",\"$(CURDIR)/test\",\"$(FILTER)\",$(COVER),\"$(TEST_PLUGINS_ROOTDIR)\") + +pre-standalone-tests:: test-tmpdir test-dist + $(verbose) rm -rf $(TEST_PLUGINS_ROOTDIR) + $(exec_verbose) mkdir -p $(TEST_PLUGINS_ROOTDIR) + $(verbose) cp -a $(DIST_DIR) $(TEST_PLUGINS_ROOTDIR) diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/README-hacking b/rabbitmq-server/deps/rabbitmq_federation/README-hacking similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-federation/README-hacking rename to rabbitmq-server/deps/rabbitmq_federation/README-hacking diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/README.md b/rabbitmq-server/deps/rabbitmq_federation/README.md similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-federation/README.md rename to rabbitmq-server/deps/rabbitmq_federation/README.md diff --git a/rabbitmq-server/deps/rabbitmq_federation/build.config b/rabbitmq-server/deps/rabbitmq_federation/build.config new file mode 100644 index 0000000..0855303 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_federation/build.config @@ -0,0 +1,43 @@ +# Do *not* comment or remove core modules +# unless you know what you are doing. +# +# Feel free to comment plugins out however. + +# Core modules. +core/core +index/* +core/index +core/deps + +# Plugins that must run before Erlang code gets compiled. +plugins/erlydtl +plugins/protobuffs + +# Core modules, continued. +core/erlc +core/docs +core/rel +core/test +core/compat + +# Plugins. +plugins/asciidoc +plugins/bootstrap +plugins/c_src +plugins/ci +plugins/ct +plugins/dialyzer +plugins/edoc +plugins/elvis +plugins/escript +# plugins/eunit +plugins/relx +plugins/shell +plugins/triq +plugins/xref + +# Plugins enhancing the functionality of other plugins. +plugins/cover + +# Core modules which can use variables from plugins. +core/deps-tools diff --git a/rabbitmq-server/deps/rabbitmq_federation/erlang.mk b/rabbitmq-server/deps/rabbitmq_federation/erlang.mk new file mode 100644 index 0000000..9f0c0c3 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_federation/erlang.mk @@ -0,0 +1,6589 @@ +# Copyright (c) 2013-2015, Loïc Hoguin +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +.PHONY: all app deps search rel docs install-docs check tests clean distclean help erlang-mk + +ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) + +ERLANG_MK_VERSION = 2.0.0-pre.2-16-gb52203c-dirty + +# Core configuration. + +PROJECT ?= $(notdir $(CURDIR)) +PROJECT := $(strip $(PROJECT)) + +PROJECT_VERSION ?= rolling + +# Verbosity. + +V ?= 0 + +verbose_0 = @ +verbose_2 = set -x; +verbose = $(verbose_$(V)) + +gen_verbose_0 = @echo " GEN " $@; +gen_verbose_2 = set -x; +gen_verbose = $(gen_verbose_$(V)) + +# Temporary files directory. + +ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk +export ERLANG_MK_TMP + +# "erl" command. + +ERL = erl +A0 -noinput -boot start_clean + +# Platform detection. + +ifeq ($(PLATFORM),) +UNAME_S := $(shell uname -s) + +ifeq ($(UNAME_S),Linux) +PLATFORM = linux +else ifeq ($(UNAME_S),Darwin) +PLATFORM = darwin +else ifeq ($(UNAME_S),SunOS) +PLATFORM = solaris +else ifeq ($(UNAME_S),GNU) +PLATFORM = gnu +else ifeq ($(UNAME_S),FreeBSD) +PLATFORM = freebsd +else ifeq ($(UNAME_S),NetBSD) +PLATFORM = netbsd +else ifeq ($(UNAME_S),OpenBSD) +PLATFORM = openbsd +else ifeq ($(UNAME_S),DragonFly) +PLATFORM = dragonfly +else ifeq ($(shell uname -o),Msys) +PLATFORM = msys2 +else +$(error Unable to detect platform. Please open a ticket with the output of uname -a.) +endif + +export PLATFORM +endif + +# Core targets. + +all:: deps app rel + +# Noop to avoid a Make warning when there's nothing to do. +rel:: + $(verbose) : + +check:: clean app tests + +clean:: clean-crashdump + +clean-crashdump: +ifneq ($(wildcard erl_crash.dump),) + $(gen_verbose) rm -f erl_crash.dump +endif + +distclean:: clean distclean-tmp + +distclean-tmp: + $(gen_verbose) rm -rf $(ERLANG_MK_TMP) + +help:: + $(verbose) printf "%s\n" \ + "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \ + "Copyright (c) 2013-2015 Loïc Hoguin " \ + "" \ + "Usage: [V=1] $(MAKE) [target]..." \ + "" \ + "Core targets:" \ + " all Run deps, app and rel targets in that order" \ + " app Compile the project" \ + " deps Fetch dependencies (if needed) and compile them" \ + " fetch-deps Fetch dependencies (if needed) without compiling them" \ + " list-deps Fetch dependencies (if needed) and list them" \ + " search q=... Search for a package in the built-in index" \ + " rel Build a release for this project, if applicable" \ + " docs Build the documentation for this project" \ + " install-docs Install the man pages for this project" \ + " check Compile and run all tests and analysis for this project" \ + " tests Run the tests for this project" \ + " clean Delete temporary and output files from most targets" \ + " distclean Delete all temporary and output files" \ + " help Display this help and exit" \ + " erlang-mk Update erlang.mk to the latest version" + +# Core functions. + +empty := +space := $(empty) $(empty) +tab := $(empty) $(empty) +comma := , + +define newline + + +endef + +define comma_list +$(subst $(space),$(comma),$(strip $(1))) +endef + +# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. +define erlang +$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk +endef + +ifeq ($(PLATFORM),msys2) +core_native_path = $(subst \,\\\\,$(shell cygpath -w $1)) +else +core_native_path = $1 +endif + +ifeq ($(shell which wget 2>/dev/null | wc -l), 1) +define core_http_get + wget --no-check-certificate -O $(1) $(2)|| rm $(1) +endef +else +define core_http_get.erl + ssl:start(), + inets:start(), + case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of + {ok, {{_, 200, _}, _, Body}} -> + case file:write_file("$(1)", Body) of + ok -> ok; + {error, R1} -> halt(R1) + end; + {error, R2} -> + halt(R2) + end, + halt(0). +endef + +define core_http_get + $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2)) +endef +endif + +core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1))) + +core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2))) + +core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1))))))))))))))))))))))))))) + +core_ls = $(filter-out $(1),$(shell echo $(1))) + +# @todo Use a solution that does not require using perl. +core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2) + +# Automated update. + +ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk +ERLANG_MK_COMMIT ?= +ERLANG_MK_BUILD_CONFIG ?= build.config +ERLANG_MK_BUILD_DIR ?= .erlang.mk.build + +erlang-mk: + git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) +ifdef ERLANG_MK_COMMIT + cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) +endif + if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi + $(MAKE) -C $(ERLANG_MK_BUILD_DIR) + cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk + rm -rf $(ERLANG_MK_BUILD_DIR) + +# The erlang.mk package index is bundled in the default erlang.mk build. +# Search for the string "copyright" to skip to the rest of the code. + +PACKAGES += aberth +pkg_aberth_name = aberth +pkg_aberth_description = Generic BERT-RPC server in Erlang +pkg_aberth_homepage = https://github.com/a13x/aberth +pkg_aberth_fetch = git +pkg_aberth_repo = https://github.com/a13x/aberth +pkg_aberth_commit = master + +PACKAGES += active +pkg_active_name = active +pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running +pkg_active_homepage = https://github.com/proger/active +pkg_active_fetch = git +pkg_active_repo = https://github.com/proger/active +pkg_active_commit = master + +PACKAGES += actordb_core +pkg_actordb_core_name = actordb_core +pkg_actordb_core_description = ActorDB main source +pkg_actordb_core_homepage = http://www.actordb.com/ +pkg_actordb_core_fetch = git +pkg_actordb_core_repo = https://github.com/biokoda/actordb_core +pkg_actordb_core_commit = master + +PACKAGES += actordb_thrift +pkg_actordb_thrift_name = actordb_thrift +pkg_actordb_thrift_description = Thrift API for ActorDB +pkg_actordb_thrift_homepage = http://www.actordb.com/ +pkg_actordb_thrift_fetch = git +pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift +pkg_actordb_thrift_commit = master + +PACKAGES += aleppo +pkg_aleppo_name = aleppo +pkg_aleppo_description = Alternative Erlang Pre-Processor +pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo +pkg_aleppo_fetch = git +pkg_aleppo_repo = https://github.com/ErlyORM/aleppo +pkg_aleppo_commit = master + +PACKAGES += alog +pkg_alog_name = alog +pkg_alog_description = Simply the best logging framework for Erlang +pkg_alog_homepage = https://github.com/siberian-fast-food/alogger +pkg_alog_fetch = git +pkg_alog_repo = https://github.com/siberian-fast-food/alogger +pkg_alog_commit = master + +PACKAGES += amqp_client +pkg_amqp_client_name = amqp_client +pkg_amqp_client_description = RabbitMQ Erlang AMQP client +pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html +pkg_amqp_client_fetch = git +pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git +pkg_amqp_client_commit = master + +PACKAGES += annotations +pkg_annotations_name = annotations +pkg_annotations_description = Simple code instrumentation utilities +pkg_annotations_homepage = https://github.com/hyperthunk/annotations +pkg_annotations_fetch = git +pkg_annotations_repo = https://github.com/hyperthunk/annotations +pkg_annotations_commit = master + +PACKAGES += antidote +pkg_antidote_name = antidote +pkg_antidote_description = Large-scale computation without synchronisation +pkg_antidote_homepage = https://syncfree.lip6.fr/ +pkg_antidote_fetch = git +pkg_antidote_repo = https://github.com/SyncFree/antidote +pkg_antidote_commit = master + +PACKAGES += apns +pkg_apns_name = apns +pkg_apns_description = Apple Push Notification Server for Erlang +pkg_apns_homepage = http://inaka.github.com/apns4erl +pkg_apns_fetch = git +pkg_apns_repo = https://github.com/inaka/apns4erl +pkg_apns_commit = 1.0.4 + +PACKAGES += azdht +pkg_azdht_name = azdht +pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang +pkg_azdht_homepage = https://github.com/arcusfelis/azdht +pkg_azdht_fetch = git +pkg_azdht_repo = https://github.com/arcusfelis/azdht +pkg_azdht_commit = master + +PACKAGES += backoff +pkg_backoff_name = backoff +pkg_backoff_description = Simple exponential backoffs in Erlang +pkg_backoff_homepage = https://github.com/ferd/backoff +pkg_backoff_fetch = git +pkg_backoff_repo = https://github.com/ferd/backoff +pkg_backoff_commit = master + +PACKAGES += barrel_tcp +pkg_barrel_tcp_name = barrel_tcp +pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang. +pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_fetch = git +pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_commit = master + +PACKAGES += basho_bench +pkg_basho_bench_name = basho_bench +pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for. +pkg_basho_bench_homepage = https://github.com/basho/basho_bench +pkg_basho_bench_fetch = git +pkg_basho_bench_repo = https://github.com/basho/basho_bench +pkg_basho_bench_commit = master + +PACKAGES += bcrypt +pkg_bcrypt_name = bcrypt +pkg_bcrypt_description = Bcrypt Erlang / C library +pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt +pkg_bcrypt_fetch = git +pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt +pkg_bcrypt_commit = master + +PACKAGES += beam +pkg_beam_name = beam +pkg_beam_description = BEAM emulator written in Erlang +pkg_beam_homepage = https://github.com/tonyrog/beam +pkg_beam_fetch = git +pkg_beam_repo = https://github.com/tonyrog/beam +pkg_beam_commit = master + +PACKAGES += beanstalk +pkg_beanstalk_name = beanstalk +pkg_beanstalk_description = An Erlang client for beanstalkd +pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_fetch = git +pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_commit = master + +PACKAGES += bear +pkg_bear_name = bear +pkg_bear_description = a set of statistics functions for erlang +pkg_bear_homepage = https://github.com/boundary/bear +pkg_bear_fetch = git +pkg_bear_repo = https://github.com/boundary/bear +pkg_bear_commit = master + +PACKAGES += bertconf +pkg_bertconf_name = bertconf +pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded +pkg_bertconf_homepage = https://github.com/ferd/bertconf +pkg_bertconf_fetch = git +pkg_bertconf_repo = https://github.com/ferd/bertconf +pkg_bertconf_commit = master + +PACKAGES += bifrost +pkg_bifrost_name = bifrost +pkg_bifrost_description = Erlang FTP Server Framework +pkg_bifrost_homepage = https://github.com/thorstadt/bifrost +pkg_bifrost_fetch = git +pkg_bifrost_repo = https://github.com/thorstadt/bifrost +pkg_bifrost_commit = master + +PACKAGES += binpp +pkg_binpp_name = binpp +pkg_binpp_description = Erlang Binary Pretty Printer +pkg_binpp_homepage = https://github.com/jtendo/binpp +pkg_binpp_fetch = git +pkg_binpp_repo = https://github.com/jtendo/binpp +pkg_binpp_commit = master + +PACKAGES += bisect +pkg_bisect_name = bisect +pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang +pkg_bisect_homepage = https://github.com/knutin/bisect +pkg_bisect_fetch = git +pkg_bisect_repo = https://github.com/knutin/bisect +pkg_bisect_commit = master + +PACKAGES += bitcask +pkg_bitcask_name = bitcask +pkg_bitcask_description = because you need another a key/value storage engine +pkg_bitcask_homepage = https://github.com/basho/bitcask +pkg_bitcask_fetch = git +pkg_bitcask_repo = https://github.com/basho/bitcask +pkg_bitcask_commit = master + +PACKAGES += bitstore +pkg_bitstore_name = bitstore +pkg_bitstore_description = A document based ontology development environment +pkg_bitstore_homepage = https://github.com/bdionne/bitstore +pkg_bitstore_fetch = git +pkg_bitstore_repo = https://github.com/bdionne/bitstore +pkg_bitstore_commit = master + +PACKAGES += bootstrap +pkg_bootstrap_name = bootstrap +pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application. +pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap +pkg_bootstrap_fetch = git +pkg_bootstrap_repo = https://github.com/schlagert/bootstrap +pkg_bootstrap_commit = master + +PACKAGES += boss +pkg_boss_name = boss +pkg_boss_description = Erlang web MVC, now featuring Comet +pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_fetch = git +pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_commit = master + +PACKAGES += boss_db +pkg_boss_db_name = boss_db +pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang +pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db +pkg_boss_db_fetch = git +pkg_boss_db_repo = https://github.com/ErlyORM/boss_db +pkg_boss_db_commit = master + +PACKAGES += bson +pkg_bson_name = bson +pkg_bson_description = BSON documents in Erlang, see bsonspec.org +pkg_bson_homepage = https://github.com/comtihon/bson-erlang +pkg_bson_fetch = git +pkg_bson_repo = https://github.com/comtihon/bson-erlang +pkg_bson_commit = master + +PACKAGES += bullet +pkg_bullet_name = bullet +pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy. +pkg_bullet_homepage = http://ninenines.eu +pkg_bullet_fetch = git +pkg_bullet_repo = https://github.com/ninenines/bullet +pkg_bullet_commit = master + +PACKAGES += cache +pkg_cache_name = cache +pkg_cache_description = Erlang in-memory cache +pkg_cache_homepage = https://github.com/fogfish/cache +pkg_cache_fetch = git +pkg_cache_repo = https://github.com/fogfish/cache +pkg_cache_commit = master + +PACKAGES += cake +pkg_cake_name = cake +pkg_cake_description = Really simple terminal colorization +pkg_cake_homepage = https://github.com/darach/cake-erl +pkg_cake_fetch = git +pkg_cake_repo = https://github.com/darach/cake-erl +pkg_cake_commit = v0.1.2 + +PACKAGES += carotene +pkg_carotene_name = carotene +pkg_carotene_description = Real-time server +pkg_carotene_homepage = https://github.com/carotene/carotene +pkg_carotene_fetch = git +pkg_carotene_repo = https://github.com/carotene/carotene +pkg_carotene_commit = master + +PACKAGES += cberl +pkg_cberl_name = cberl +pkg_cberl_description = NIF based Erlang bindings for Couchbase +pkg_cberl_homepage = https://github.com/chitika/cberl +pkg_cberl_fetch = git +pkg_cberl_repo = https://github.com/chitika/cberl +pkg_cberl_commit = master + +PACKAGES += cecho +pkg_cecho_name = cecho +pkg_cecho_description = An ncurses library for Erlang +pkg_cecho_homepage = https://github.com/mazenharake/cecho +pkg_cecho_fetch = git +pkg_cecho_repo = https://github.com/mazenharake/cecho +pkg_cecho_commit = master + +PACKAGES += cferl +pkg_cferl_name = cferl +pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client +pkg_cferl_homepage = https://github.com/ddossot/cferl +pkg_cferl_fetch = git +pkg_cferl_repo = https://github.com/ddossot/cferl +pkg_cferl_commit = master + +PACKAGES += chaos_monkey +pkg_chaos_monkey_name = chaos_monkey +pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes. +pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_fetch = git +pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_commit = master + +PACKAGES += check_node +pkg_check_node_name = check_node +pkg_check_node_description = Nagios Scripts for monitoring Riak +pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios +pkg_check_node_fetch = git +pkg_check_node_repo = https://github.com/basho-labs/riak_nagios +pkg_check_node_commit = master + +PACKAGES += chronos +pkg_chronos_name = chronos +pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests. +pkg_chronos_homepage = https://github.com/lehoff/chronos +pkg_chronos_fetch = git +pkg_chronos_repo = https://github.com/lehoff/chronos +pkg_chronos_commit = master + +PACKAGES += cl +pkg_cl_name = cl +pkg_cl_description = OpenCL binding for Erlang +pkg_cl_homepage = https://github.com/tonyrog/cl +pkg_cl_fetch = git +pkg_cl_repo = https://github.com/tonyrog/cl +pkg_cl_commit = master + +PACKAGES += classifier +pkg_classifier_name = classifier +pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier +pkg_classifier_homepage = https://github.com/inaka/classifier +pkg_classifier_fetch = git +pkg_classifier_repo = https://github.com/inaka/classifier +pkg_classifier_commit = master + +PACKAGES += clique +pkg_clique_name = clique +pkg_clique_description = CLI Framework for Erlang +pkg_clique_homepage = https://github.com/basho/clique +pkg_clique_fetch = git +pkg_clique_repo = https://github.com/basho/clique +pkg_clique_commit = develop + +PACKAGES += cloudi_core +pkg_cloudi_core_name = cloudi_core +pkg_cloudi_core_description = CloudI internal service runtime +pkg_cloudi_core_homepage = http://cloudi.org/ +pkg_cloudi_core_fetch = git +pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core +pkg_cloudi_core_commit = master + +PACKAGES += cloudi_service_api_requests +pkg_cloudi_service_api_requests_name = cloudi_service_api_requests +pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support) +pkg_cloudi_service_api_requests_homepage = http://cloudi.org/ +pkg_cloudi_service_api_requests_fetch = git +pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests +pkg_cloudi_service_api_requests_commit = master + +PACKAGES += cloudi_service_db +pkg_cloudi_service_db_name = cloudi_service_db +pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) +pkg_cloudi_service_db_homepage = http://cloudi.org/ +pkg_cloudi_service_db_fetch = git +pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db +pkg_cloudi_service_db_commit = master + +PACKAGES += cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service +pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_fetch = git +pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_commit = master + +PACKAGES += cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service +pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_cql_fetch = git +pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_commit = master + +PACKAGES += cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service +pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/ +pkg_cloudi_service_db_couchdb_fetch = git +pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_commit = master + +PACKAGES += cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service +pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/ +pkg_cloudi_service_db_elasticsearch_fetch = git +pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_commit = master + +PACKAGES += cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_description = memcached CloudI Service +pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/ +pkg_cloudi_service_db_memcached_fetch = git +pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_commit = master + +PACKAGES += cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_description = MySQL CloudI Service +pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_mysql_fetch = git +pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_commit = master + +PACKAGES += cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service +pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_pgsql_fetch = git +pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_commit = master + +PACKAGES += cloudi_service_db_riak +pkg_cloudi_service_db_riak_name = cloudi_service_db_riak +pkg_cloudi_service_db_riak_description = Riak CloudI Service +pkg_cloudi_service_db_riak_homepage = http://cloudi.org/ +pkg_cloudi_service_db_riak_fetch = git +pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak +pkg_cloudi_service_db_riak_commit = master + +PACKAGES += cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service +pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/ +pkg_cloudi_service_db_tokyotyrant_fetch = git +pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_commit = master + +PACKAGES += cloudi_service_filesystem +pkg_cloudi_service_filesystem_name = cloudi_service_filesystem +pkg_cloudi_service_filesystem_description = Filesystem CloudI Service +pkg_cloudi_service_filesystem_homepage = http://cloudi.org/ +pkg_cloudi_service_filesystem_fetch = git +pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem +pkg_cloudi_service_filesystem_commit = master + +PACKAGES += cloudi_service_http_client +pkg_cloudi_service_http_client_name = cloudi_service_http_client +pkg_cloudi_service_http_client_description = HTTP client CloudI Service +pkg_cloudi_service_http_client_homepage = http://cloudi.org/ +pkg_cloudi_service_http_client_fetch = git +pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client +pkg_cloudi_service_http_client_commit = master + +PACKAGES += cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service +pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/ +pkg_cloudi_service_http_cowboy_fetch = git +pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_commit = master + +PACKAGES += cloudi_service_http_elli +pkg_cloudi_service_http_elli_name = cloudi_service_http_elli +pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service +pkg_cloudi_service_http_elli_homepage = http://cloudi.org/ +pkg_cloudi_service_http_elli_fetch = git +pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli +pkg_cloudi_service_http_elli_commit = master + +PACKAGES += cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service +pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/ +pkg_cloudi_service_map_reduce_fetch = git +pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_commit = master + +PACKAGES += cloudi_service_oauth1 +pkg_cloudi_service_oauth1_name = cloudi_service_oauth1 +pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service +pkg_cloudi_service_oauth1_homepage = http://cloudi.org/ +pkg_cloudi_service_oauth1_fetch = git +pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1 +pkg_cloudi_service_oauth1_commit = master + +PACKAGES += cloudi_service_queue +pkg_cloudi_service_queue_name = cloudi_service_queue +pkg_cloudi_service_queue_description = Persistent Queue Service +pkg_cloudi_service_queue_homepage = http://cloudi.org/ +pkg_cloudi_service_queue_fetch = git +pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue +pkg_cloudi_service_queue_commit = master + +PACKAGES += cloudi_service_quorum +pkg_cloudi_service_quorum_name = cloudi_service_quorum +pkg_cloudi_service_quorum_description = CloudI Quorum Service +pkg_cloudi_service_quorum_homepage = http://cloudi.org/ +pkg_cloudi_service_quorum_fetch = git +pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum +pkg_cloudi_service_quorum_commit = master + +PACKAGES += cloudi_service_router +pkg_cloudi_service_router_name = cloudi_service_router +pkg_cloudi_service_router_description = CloudI Router Service +pkg_cloudi_service_router_homepage = http://cloudi.org/ +pkg_cloudi_service_router_fetch = git +pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router +pkg_cloudi_service_router_commit = master + +PACKAGES += cloudi_service_tcp +pkg_cloudi_service_tcp_name = cloudi_service_tcp +pkg_cloudi_service_tcp_description = TCP CloudI Service +pkg_cloudi_service_tcp_homepage = http://cloudi.org/ +pkg_cloudi_service_tcp_fetch = git +pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp +pkg_cloudi_service_tcp_commit = master + +PACKAGES += cloudi_service_timers +pkg_cloudi_service_timers_name = cloudi_service_timers +pkg_cloudi_service_timers_description = Timers CloudI Service +pkg_cloudi_service_timers_homepage = http://cloudi.org/ +pkg_cloudi_service_timers_fetch = git +pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers +pkg_cloudi_service_timers_commit = master + +PACKAGES += cloudi_service_udp +pkg_cloudi_service_udp_name = cloudi_service_udp +pkg_cloudi_service_udp_description = UDP CloudI Service +pkg_cloudi_service_udp_homepage = http://cloudi.org/ +pkg_cloudi_service_udp_fetch = git +pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp +pkg_cloudi_service_udp_commit = master + +PACKAGES += cloudi_service_validate +pkg_cloudi_service_validate_name = cloudi_service_validate +pkg_cloudi_service_validate_description = CloudI Validate Service +pkg_cloudi_service_validate_homepage = http://cloudi.org/ +pkg_cloudi_service_validate_fetch = git +pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate +pkg_cloudi_service_validate_commit = master + +PACKAGES += cloudi_service_zeromq +pkg_cloudi_service_zeromq_name = cloudi_service_zeromq +pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service +pkg_cloudi_service_zeromq_homepage = http://cloudi.org/ +pkg_cloudi_service_zeromq_fetch = git +pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq +pkg_cloudi_service_zeromq_commit = master + +PACKAGES += cluster_info +pkg_cluster_info_name = cluster_info +pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app +pkg_cluster_info_homepage = https://github.com/basho/cluster_info +pkg_cluster_info_fetch = git +pkg_cluster_info_repo = https://github.com/basho/cluster_info +pkg_cluster_info_commit = master + +PACKAGES += color +pkg_color_name = color +pkg_color_description = ANSI colors for your Erlang +pkg_color_homepage = https://github.com/julianduque/erlang-color +pkg_color_fetch = git +pkg_color_repo = https://github.com/julianduque/erlang-color +pkg_color_commit = master + +PACKAGES += confetti +pkg_confetti_name = confetti +pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids +pkg_confetti_homepage = https://github.com/jtendo/confetti +pkg_confetti_fetch = git +pkg_confetti_repo = https://github.com/jtendo/confetti +pkg_confetti_commit = master + +PACKAGES += couchbeam +pkg_couchbeam_name = couchbeam +pkg_couchbeam_description = Apache CouchDB client in Erlang +pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam +pkg_couchbeam_fetch = git +pkg_couchbeam_repo = https://github.com/benoitc/couchbeam +pkg_couchbeam_commit = master + +PACKAGES += covertool +pkg_covertool_name = covertool +pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports +pkg_covertool_homepage = https://github.com/idubrov/covertool +pkg_covertool_fetch = git +pkg_covertool_repo = https://github.com/idubrov/covertool +pkg_covertool_commit = master + +PACKAGES += cowboy +pkg_cowboy_name = cowboy +pkg_cowboy_description = Small, fast and modular HTTP server. +pkg_cowboy_homepage = http://ninenines.eu +pkg_cowboy_fetch = git +pkg_cowboy_repo = https://github.com/ninenines/cowboy +pkg_cowboy_commit = 1.0.1 + +PACKAGES += cowdb +pkg_cowdb_name = cowdb +pkg_cowdb_description = Pure Key/Value database library for Erlang Applications +pkg_cowdb_homepage = https://github.com/refuge/cowdb +pkg_cowdb_fetch = git +pkg_cowdb_repo = https://github.com/refuge/cowdb +pkg_cowdb_commit = master + +PACKAGES += cowlib +pkg_cowlib_name = cowlib +pkg_cowlib_description = Support library for manipulating Web protocols. +pkg_cowlib_homepage = http://ninenines.eu +pkg_cowlib_fetch = git +pkg_cowlib_repo = https://github.com/ninenines/cowlib +pkg_cowlib_commit = 1.0.1 + +PACKAGES += cpg +pkg_cpg_name = cpg +pkg_cpg_description = CloudI Process Groups +pkg_cpg_homepage = https://github.com/okeuday/cpg +pkg_cpg_fetch = git +pkg_cpg_repo = https://github.com/okeuday/cpg +pkg_cpg_commit = master + +PACKAGES += cqerl +pkg_cqerl_name = cqerl +pkg_cqerl_description = Native Erlang CQL client for Cassandra +pkg_cqerl_homepage = https://matehat.github.io/cqerl/ +pkg_cqerl_fetch = git +pkg_cqerl_repo = https://github.com/matehat/cqerl +pkg_cqerl_commit = master + +PACKAGES += cr +pkg_cr_name = cr +pkg_cr_description = Chain Replication +pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm +pkg_cr_fetch = git +pkg_cr_repo = https://github.com/spawnproc/cr +pkg_cr_commit = master + +PACKAGES += cuttlefish +pkg_cuttlefish_name = cuttlefish +pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me? +pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish +pkg_cuttlefish_fetch = git +pkg_cuttlefish_repo = https://github.com/basho/cuttlefish +pkg_cuttlefish_commit = master + +PACKAGES += damocles +pkg_damocles_name = damocles +pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box. +pkg_damocles_homepage = https://github.com/lostcolony/damocles +pkg_damocles_fetch = git +pkg_damocles_repo = https://github.com/lostcolony/damocles +pkg_damocles_commit = master + +PACKAGES += debbie +pkg_debbie_name = debbie +pkg_debbie_description = .DEB Built In Erlang +pkg_debbie_homepage = https://github.com/crownedgrouse/debbie +pkg_debbie_fetch = git +pkg_debbie_repo = https://github.com/crownedgrouse/debbie +pkg_debbie_commit = master + +PACKAGES += decimal +pkg_decimal_name = decimal +pkg_decimal_description = An Erlang decimal arithmetic library +pkg_decimal_homepage = https://github.com/tim/erlang-decimal +pkg_decimal_fetch = git +pkg_decimal_repo = https://github.com/tim/erlang-decimal +pkg_decimal_commit = master + +PACKAGES += detergent +pkg_detergent_name = detergent +pkg_detergent_description = An emulsifying Erlang SOAP library +pkg_detergent_homepage = https://github.com/devinus/detergent +pkg_detergent_fetch = git +pkg_detergent_repo = https://github.com/devinus/detergent +pkg_detergent_commit = master + +PACKAGES += detest +pkg_detest_name = detest +pkg_detest_description = Tool for running tests on a cluster of erlang nodes +pkg_detest_homepage = https://github.com/biokoda/detest +pkg_detest_fetch = git +pkg_detest_repo = https://github.com/biokoda/detest +pkg_detest_commit = master + +PACKAGES += dh_date +pkg_dh_date_name = dh_date +pkg_dh_date_description = Date formatting / parsing library for erlang +pkg_dh_date_homepage = https://github.com/daleharvey/dh_date +pkg_dh_date_fetch = git +pkg_dh_date_repo = https://github.com/daleharvey/dh_date +pkg_dh_date_commit = master + +PACKAGES += dhtcrawler +pkg_dhtcrawler_name = dhtcrawler +pkg_dhtcrawler_description = dhtcrawler is a DHT crawler written in erlang. It can join a DHT network and crawl many P2P torrents. +pkg_dhtcrawler_homepage = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_fetch = git +pkg_dhtcrawler_repo = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_commit = master + +PACKAGES += dirbusterl +pkg_dirbusterl_name = dirbusterl +pkg_dirbusterl_description = DirBuster successor in Erlang +pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_fetch = git +pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_commit = master + +PACKAGES += dispcount +pkg_dispcount_name = dispcount +pkg_dispcount_description = Erlang task dispatcher based on ETS counters. +pkg_dispcount_homepage = https://github.com/ferd/dispcount +pkg_dispcount_fetch = git +pkg_dispcount_repo = https://github.com/ferd/dispcount +pkg_dispcount_commit = master + +PACKAGES += dlhttpc +pkg_dlhttpc_name = dlhttpc +pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints +pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc +pkg_dlhttpc_fetch = git +pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc +pkg_dlhttpc_commit = master + +PACKAGES += dns +pkg_dns_name = dns +pkg_dns_description = Erlang DNS library +pkg_dns_homepage = https://github.com/aetrion/dns_erlang +pkg_dns_fetch = git +pkg_dns_repo = https://github.com/aetrion/dns_erlang +pkg_dns_commit = master + +PACKAGES += dnssd +pkg_dnssd_name = dnssd +pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation +pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_fetch = git +pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_commit = master + +PACKAGES += dtl +pkg_dtl_name = dtl +pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang. +pkg_dtl_homepage = https://github.com/oinksoft/dtl +pkg_dtl_fetch = git +pkg_dtl_repo = https://github.com/oinksoft/dtl +pkg_dtl_commit = master + +PACKAGES += dynamic_compile +pkg_dynamic_compile_name = dynamic_compile +pkg_dynamic_compile_description = compile and load erlang modules from string input +pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_fetch = git +pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_commit = master + +PACKAGES += e2 +pkg_e2_name = e2 +pkg_e2_description = Library to simply writing correct OTP applications. +pkg_e2_homepage = http://e2project.org +pkg_e2_fetch = git +pkg_e2_repo = https://github.com/gar1t/e2 +pkg_e2_commit = master + +PACKAGES += eamf +pkg_eamf_name = eamf +pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang +pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf +pkg_eamf_fetch = git +pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf +pkg_eamf_commit = master + +PACKAGES += eavro +pkg_eavro_name = eavro +pkg_eavro_description = Apache Avro encoder/decoder +pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_fetch = git +pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_commit = master + +PACKAGES += ecapnp +pkg_ecapnp_name = ecapnp +pkg_ecapnp_description = Cap'n Proto library for Erlang +pkg_ecapnp_homepage = https://github.com/kaos/ecapnp +pkg_ecapnp_fetch = git +pkg_ecapnp_repo = https://github.com/kaos/ecapnp +pkg_ecapnp_commit = master + +PACKAGES += econfig +pkg_econfig_name = econfig +pkg_econfig_description = simple Erlang config handler using INI files +pkg_econfig_homepage = https://github.com/benoitc/econfig +pkg_econfig_fetch = git +pkg_econfig_repo = https://github.com/benoitc/econfig +pkg_econfig_commit = master + +PACKAGES += edate +pkg_edate_name = edate +pkg_edate_description = date manipulation library for erlang +pkg_edate_homepage = https://github.com/dweldon/edate +pkg_edate_fetch = git +pkg_edate_repo = https://github.com/dweldon/edate +pkg_edate_commit = master + +PACKAGES += edgar +pkg_edgar_name = edgar +pkg_edgar_description = Erlang Does GNU AR +pkg_edgar_homepage = https://github.com/crownedgrouse/edgar +pkg_edgar_fetch = git +pkg_edgar_repo = https://github.com/crownedgrouse/edgar +pkg_edgar_commit = master + +PACKAGES += edis +pkg_edis_name = edis +pkg_edis_description = An Erlang implementation of Redis KV Store +pkg_edis_homepage = http://inaka.github.com/edis/ +pkg_edis_fetch = git +pkg_edis_repo = https://github.com/inaka/edis +pkg_edis_commit = master + +PACKAGES += edns +pkg_edns_name = edns +pkg_edns_description = Erlang/OTP DNS server +pkg_edns_homepage = https://github.com/hcvst/erlang-dns +pkg_edns_fetch = git +pkg_edns_repo = https://github.com/hcvst/erlang-dns +pkg_edns_commit = master + +PACKAGES += edown +pkg_edown_name = edown +pkg_edown_description = EDoc extension for generating Github-flavored Markdown +pkg_edown_homepage = https://github.com/uwiger/edown +pkg_edown_fetch = git +pkg_edown_repo = https://github.com/uwiger/edown +pkg_edown_commit = master + +PACKAGES += eep +pkg_eep_name = eep +pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy +pkg_eep_homepage = https://github.com/virtan/eep +pkg_eep_fetch = git +pkg_eep_repo = https://github.com/virtan/eep +pkg_eep_commit = master + +PACKAGES += eep_app +pkg_eep_app_name = eep_app +pkg_eep_app_description = Embedded Event Processing +pkg_eep_app_homepage = https://github.com/darach/eep-erl +pkg_eep_app_fetch = git +pkg_eep_app_repo = https://github.com/darach/eep-erl +pkg_eep_app_commit = master + +PACKAGES += efene +pkg_efene_name = efene +pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX +pkg_efene_homepage = https://github.com/efene/efene +pkg_efene_fetch = git +pkg_efene_repo = https://github.com/efene/efene +pkg_efene_commit = master + +PACKAGES += eganglia +pkg_eganglia_name = eganglia +pkg_eganglia_description = Erlang library to interact with Ganglia +pkg_eganglia_homepage = https://github.com/inaka/eganglia +pkg_eganglia_fetch = git +pkg_eganglia_repo = https://github.com/inaka/eganglia +pkg_eganglia_commit = v0.9.1 + +PACKAGES += egeoip +pkg_egeoip_name = egeoip +pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database. +pkg_egeoip_homepage = https://github.com/mochi/egeoip +pkg_egeoip_fetch = git +pkg_egeoip_repo = https://github.com/mochi/egeoip +pkg_egeoip_commit = master + +PACKAGES += ehsa +pkg_ehsa_name = ehsa +pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules +pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa +pkg_ehsa_fetch = hg +pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa +pkg_ehsa_commit = 2.0.4 + +PACKAGES += ej +pkg_ej_name = ej +pkg_ej_description = Helper module for working with Erlang terms representing JSON +pkg_ej_homepage = https://github.com/seth/ej +pkg_ej_fetch = git +pkg_ej_repo = https://github.com/seth/ej +pkg_ej_commit = master + +PACKAGES += ejabberd +pkg_ejabberd_name = ejabberd +pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform +pkg_ejabberd_homepage = https://github.com/processone/ejabberd +pkg_ejabberd_fetch = git +pkg_ejabberd_repo = https://github.com/processone/ejabberd +pkg_ejabberd_commit = master + +PACKAGES += ejwt +pkg_ejwt_name = ejwt +pkg_ejwt_description = erlang library for JSON Web Token +pkg_ejwt_homepage = https://github.com/artefactop/ejwt +pkg_ejwt_fetch = git +pkg_ejwt_repo = https://github.com/artefactop/ejwt +pkg_ejwt_commit = master + +PACKAGES += ekaf +pkg_ekaf_name = ekaf +pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang. +pkg_ekaf_homepage = https://github.com/helpshift/ekaf +pkg_ekaf_fetch = git +pkg_ekaf_repo = https://github.com/helpshift/ekaf +pkg_ekaf_commit = master + +PACKAGES += elarm +pkg_elarm_name = elarm +pkg_elarm_description = Alarm Manager for Erlang. +pkg_elarm_homepage = https://github.com/esl/elarm +pkg_elarm_fetch = git +pkg_elarm_repo = https://github.com/esl/elarm +pkg_elarm_commit = master + +PACKAGES += eleveldb +pkg_eleveldb_name = eleveldb +pkg_eleveldb_description = Erlang LevelDB API +pkg_eleveldb_homepage = https://github.com/basho/eleveldb +pkg_eleveldb_fetch = git +pkg_eleveldb_repo = https://github.com/basho/eleveldb +pkg_eleveldb_commit = master + +PACKAGES += elli +pkg_elli_name = elli +pkg_elli_description = Simple, robust and performant Erlang web server +pkg_elli_homepage = https://github.com/knutin/elli +pkg_elli_fetch = git +pkg_elli_repo = https://github.com/knutin/elli +pkg_elli_commit = master + +PACKAGES += elvis +pkg_elvis_name = elvis +pkg_elvis_description = Erlang Style Reviewer +pkg_elvis_homepage = https://github.com/inaka/elvis +pkg_elvis_fetch = git +pkg_elvis_repo = https://github.com/inaka/elvis +pkg_elvis_commit = 0.2.4 + +PACKAGES += emagick +pkg_emagick_name = emagick +pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool. +pkg_emagick_homepage = https://github.com/kivra/emagick +pkg_emagick_fetch = git +pkg_emagick_repo = https://github.com/kivra/emagick +pkg_emagick_commit = master + +PACKAGES += emysql +pkg_emysql_name = emysql +pkg_emysql_description = Stable, pure Erlang MySQL driver. +pkg_emysql_homepage = https://github.com/Eonblast/Emysql +pkg_emysql_fetch = git +pkg_emysql_repo = https://github.com/Eonblast/Emysql +pkg_emysql_commit = master + +PACKAGES += enm +pkg_enm_name = enm +pkg_enm_description = Erlang driver for nanomsg +pkg_enm_homepage = https://github.com/basho/enm +pkg_enm_fetch = git +pkg_enm_repo = https://github.com/basho/enm +pkg_enm_commit = master + +PACKAGES += entop +pkg_entop_name = entop +pkg_entop_description = A top-like tool for monitoring an Erlang node +pkg_entop_homepage = https://github.com/mazenharake/entop +pkg_entop_fetch = git +pkg_entop_repo = https://github.com/mazenharake/entop +pkg_entop_commit = master + +PACKAGES += epcap +pkg_epcap_name = epcap +pkg_epcap_description = Erlang packet capture interface using pcap +pkg_epcap_homepage = https://github.com/msantos/epcap +pkg_epcap_fetch = git +pkg_epcap_repo = https://github.com/msantos/epcap +pkg_epcap_commit = master + +PACKAGES += eper +pkg_eper_name = eper +pkg_eper_description = Erlang performance and debugging tools. +pkg_eper_homepage = https://github.com/massemanet/eper +pkg_eper_fetch = git +pkg_eper_repo = https://github.com/massemanet/eper +pkg_eper_commit = master + +PACKAGES += epgsql +pkg_epgsql_name = epgsql +pkg_epgsql_description = Erlang PostgreSQL client library. +pkg_epgsql_homepage = https://github.com/epgsql/epgsql +pkg_epgsql_fetch = git +pkg_epgsql_repo = https://github.com/epgsql/epgsql +pkg_epgsql_commit = master + +PACKAGES += episcina +pkg_episcina_name = episcina +pkg_episcina_description = A simple non intrusive resource pool for connections +pkg_episcina_homepage = https://github.com/erlware/episcina +pkg_episcina_fetch = git +pkg_episcina_repo = https://github.com/erlware/episcina +pkg_episcina_commit = master + +PACKAGES += eplot +pkg_eplot_name = eplot +pkg_eplot_description = A plot engine written in erlang. +pkg_eplot_homepage = https://github.com/psyeugenic/eplot +pkg_eplot_fetch = git +pkg_eplot_repo = https://github.com/psyeugenic/eplot +pkg_eplot_commit = master + +PACKAGES += epocxy +pkg_epocxy_name = epocxy +pkg_epocxy_description = Erlang Patterns of Concurrency +pkg_epocxy_homepage = https://github.com/duomark/epocxy +pkg_epocxy_fetch = git +pkg_epocxy_repo = https://github.com/duomark/epocxy +pkg_epocxy_commit = master + +PACKAGES += epubnub +pkg_epubnub_name = epubnub +pkg_epubnub_description = Erlang PubNub API +pkg_epubnub_homepage = https://github.com/tsloughter/epubnub +pkg_epubnub_fetch = git +pkg_epubnub_repo = https://github.com/tsloughter/epubnub +pkg_epubnub_commit = master + +PACKAGES += eqm +pkg_eqm_name = eqm +pkg_eqm_description = Erlang pub sub with supply-demand channels +pkg_eqm_homepage = https://github.com/loucash/eqm +pkg_eqm_fetch = git +pkg_eqm_repo = https://github.com/loucash/eqm +pkg_eqm_commit = master + +PACKAGES += eredis +pkg_eredis_name = eredis +pkg_eredis_description = Erlang Redis client +pkg_eredis_homepage = https://github.com/wooga/eredis +pkg_eredis_fetch = git +pkg_eredis_repo = https://github.com/wooga/eredis +pkg_eredis_commit = master + +PACKAGES += eredis_pool +pkg_eredis_pool_name = eredis_pool +pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy. +pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_fetch = git +pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_commit = master + +PACKAGES += erl_streams +pkg_erl_streams_name = erl_streams +pkg_erl_streams_description = Streams in Erlang +pkg_erl_streams_homepage = https://github.com/epappas/erl_streams +pkg_erl_streams_fetch = git +pkg_erl_streams_repo = https://github.com/epappas/erl_streams +pkg_erl_streams_commit = master + +PACKAGES += erlang_cep +pkg_erlang_cep_name = erlang_cep +pkg_erlang_cep_description = A basic CEP package written in erlang +pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_fetch = git +pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_commit = master + +PACKAGES += erlang_js +pkg_erlang_js_name = erlang_js +pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime. +pkg_erlang_js_homepage = https://github.com/basho/erlang_js +pkg_erlang_js_fetch = git +pkg_erlang_js_repo = https://github.com/basho/erlang_js +pkg_erlang_js_commit = master + +PACKAGES += erlang_localtime +pkg_erlang_localtime_name = erlang_localtime +pkg_erlang_localtime_description = Erlang library for conversion from one local time to another +pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_fetch = git +pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_commit = master + +PACKAGES += erlang_smtp +pkg_erlang_smtp_name = erlang_smtp +pkg_erlang_smtp_description = Erlang SMTP and POP3 server code. +pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_fetch = git +pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_commit = master + +PACKAGES += erlang_term +pkg_erlang_term_name = erlang_term +pkg_erlang_term_description = Erlang Term Info +pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term +pkg_erlang_term_fetch = git +pkg_erlang_term_repo = https://github.com/okeuday/erlang_term +pkg_erlang_term_commit = master + +PACKAGES += erlastic_search +pkg_erlastic_search_name = erlastic_search +pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface. +pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_fetch = git +pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_commit = master + +PACKAGES += erlasticsearch +pkg_erlasticsearch_name = erlasticsearch +pkg_erlasticsearch_description = Erlang thrift interface to elastic_search +pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_fetch = git +pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_commit = master + +PACKAGES += erlbrake +pkg_erlbrake_name = erlbrake +pkg_erlbrake_description = Erlang Airbrake notification client +pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake +pkg_erlbrake_fetch = git +pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake +pkg_erlbrake_commit = master + +PACKAGES += erlcloud +pkg_erlcloud_name = erlcloud +pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB) +pkg_erlcloud_homepage = https://github.com/gleber/erlcloud +pkg_erlcloud_fetch = git +pkg_erlcloud_repo = https://github.com/gleber/erlcloud +pkg_erlcloud_commit = master + +PACKAGES += erlcron +pkg_erlcron_name = erlcron +pkg_erlcron_description = Erlang cronish system +pkg_erlcron_homepage = https://github.com/erlware/erlcron +pkg_erlcron_fetch = git +pkg_erlcron_repo = https://github.com/erlware/erlcron +pkg_erlcron_commit = master + +PACKAGES += erldb +pkg_erldb_name = erldb +pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang +pkg_erldb_homepage = http://erldb.org +pkg_erldb_fetch = git +pkg_erldb_repo = https://github.com/erldb/erldb +pkg_erldb_commit = master + +PACKAGES += erldis +pkg_erldis_name = erldis +pkg_erldis_description = redis erlang client library +pkg_erldis_homepage = https://github.com/cstar/erldis +pkg_erldis_fetch = git +pkg_erldis_repo = https://github.com/cstar/erldis +pkg_erldis_commit = master + +PACKAGES += erldns +pkg_erldns_name = erldns +pkg_erldns_description = DNS server, in erlang. +pkg_erldns_homepage = https://github.com/aetrion/erl-dns +pkg_erldns_fetch = git +pkg_erldns_repo = https://github.com/aetrion/erl-dns +pkg_erldns_commit = master + +PACKAGES += erldocker +pkg_erldocker_name = erldocker +pkg_erldocker_description = Docker Remote API client for Erlang +pkg_erldocker_homepage = https://github.com/proger/erldocker +pkg_erldocker_fetch = git +pkg_erldocker_repo = https://github.com/proger/erldocker +pkg_erldocker_commit = master + +PACKAGES += erlfsmon +pkg_erlfsmon_name = erlfsmon +pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX +pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon +pkg_erlfsmon_fetch = git +pkg_erlfsmon_repo = https://github.com/proger/erlfsmon +pkg_erlfsmon_commit = master + +PACKAGES += erlgit +pkg_erlgit_name = erlgit +pkg_erlgit_description = Erlang convenience wrapper around git executable +pkg_erlgit_homepage = https://github.com/gleber/erlgit +pkg_erlgit_fetch = git +pkg_erlgit_repo = https://github.com/gleber/erlgit +pkg_erlgit_commit = master + +PACKAGES += erlguten +pkg_erlguten_name = erlguten +pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang. +pkg_erlguten_homepage = https://github.com/richcarl/erlguten +pkg_erlguten_fetch = git +pkg_erlguten_repo = https://github.com/richcarl/erlguten +pkg_erlguten_commit = master + +PACKAGES += erlmc +pkg_erlmc_name = erlmc +pkg_erlmc_description = Erlang memcached binary protocol client +pkg_erlmc_homepage = https://github.com/jkvor/erlmc +pkg_erlmc_fetch = git +pkg_erlmc_repo = https://github.com/jkvor/erlmc +pkg_erlmc_commit = master + +PACKAGES += erlmongo +pkg_erlmongo_name = erlmongo +pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support +pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_fetch = git +pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_commit = master + +PACKAGES += erlog +pkg_erlog_name = erlog +pkg_erlog_description = Prolog interpreter in and for Erlang +pkg_erlog_homepage = https://github.com/rvirding/erlog +pkg_erlog_fetch = git +pkg_erlog_repo = https://github.com/rvirding/erlog +pkg_erlog_commit = master + +PACKAGES += erlpass +pkg_erlpass_name = erlpass +pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever. +pkg_erlpass_homepage = https://github.com/ferd/erlpass +pkg_erlpass_fetch = git +pkg_erlpass_repo = https://github.com/ferd/erlpass +pkg_erlpass_commit = master + +PACKAGES += erlport +pkg_erlport_name = erlport +pkg_erlport_description = ErlPort - connect Erlang to other languages +pkg_erlport_homepage = https://github.com/hdima/erlport +pkg_erlport_fetch = git +pkg_erlport_repo = https://github.com/hdima/erlport +pkg_erlport_commit = master + +PACKAGES += erlsh +pkg_erlsh_name = erlsh +pkg_erlsh_description = Erlang shell tools +pkg_erlsh_homepage = https://github.com/proger/erlsh +pkg_erlsh_fetch = git +pkg_erlsh_repo = https://github.com/proger/erlsh +pkg_erlsh_commit = master + +PACKAGES += erlsha2 +pkg_erlsha2_name = erlsha2 +pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs. +pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2 +pkg_erlsha2_fetch = git +pkg_erlsha2_repo = https://github.com/vinoski/erlsha2 +pkg_erlsha2_commit = master + +PACKAGES += erlsom +pkg_erlsom_name = erlsom +pkg_erlsom_description = XML parser for Erlang +pkg_erlsom_homepage = https://github.com/willemdj/erlsom +pkg_erlsom_fetch = git +pkg_erlsom_repo = https://github.com/willemdj/erlsom +pkg_erlsom_commit = master + +PACKAGES += erlubi +pkg_erlubi_name = erlubi +pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer) +pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi +pkg_erlubi_fetch = git +pkg_erlubi_repo = https://github.com/krestenkrab/erlubi +pkg_erlubi_commit = master + +PACKAGES += erlvolt +pkg_erlvolt_name = erlvolt +pkg_erlvolt_description = VoltDB Erlang Client Driver +pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_fetch = git +pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_commit = master + +PACKAGES += erlware_commons +pkg_erlware_commons_name = erlware_commons +pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components. +pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons +pkg_erlware_commons_fetch = git +pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons +pkg_erlware_commons_commit = master + +PACKAGES += erlydtl +pkg_erlydtl_name = erlydtl +pkg_erlydtl_description = Django Template Language for Erlang. +pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl +pkg_erlydtl_fetch = git +pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl +pkg_erlydtl_commit = master + +PACKAGES += errd +pkg_errd_name = errd +pkg_errd_description = Erlang RRDTool library +pkg_errd_homepage = https://github.com/archaelus/errd +pkg_errd_fetch = git +pkg_errd_repo = https://github.com/archaelus/errd +pkg_errd_commit = master + +PACKAGES += erserve +pkg_erserve_name = erserve +pkg_erserve_description = Erlang/Rserve communication interface +pkg_erserve_homepage = https://github.com/del/erserve +pkg_erserve_fetch = git +pkg_erserve_repo = https://github.com/del/erserve +pkg_erserve_commit = master + +PACKAGES += erwa +pkg_erwa_name = erwa +pkg_erwa_description = A WAMP router and client written in Erlang. +pkg_erwa_homepage = https://github.com/bwegh/erwa +pkg_erwa_fetch = git +pkg_erwa_repo = https://github.com/bwegh/erwa +pkg_erwa_commit = 0.1.1 + +PACKAGES += espec +pkg_espec_name = espec +pkg_espec_description = ESpec: Behaviour driven development framework for Erlang +pkg_espec_homepage = https://github.com/lucaspiller/espec +pkg_espec_fetch = git +pkg_espec_repo = https://github.com/lucaspiller/espec +pkg_espec_commit = master + +PACKAGES += estatsd +pkg_estatsd_name = estatsd +pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite +pkg_estatsd_homepage = https://github.com/RJ/estatsd +pkg_estatsd_fetch = git +pkg_estatsd_repo = https://github.com/RJ/estatsd +pkg_estatsd_commit = master + +PACKAGES += etap +pkg_etap_name = etap +pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output. +pkg_etap_homepage = https://github.com/ngerakines/etap +pkg_etap_fetch = git +pkg_etap_repo = https://github.com/ngerakines/etap +pkg_etap_commit = master + +PACKAGES += etest +pkg_etest_name = etest +pkg_etest_description = A lightweight, convention over configuration test framework for Erlang +pkg_etest_homepage = https://github.com/wooga/etest +pkg_etest_fetch = git +pkg_etest_repo = https://github.com/wooga/etest +pkg_etest_commit = master + +PACKAGES += etest_http +pkg_etest_http_name = etest_http +pkg_etest_http_description = etest Assertions around HTTP (client-side) +pkg_etest_http_homepage = https://github.com/wooga/etest_http +pkg_etest_http_fetch = git +pkg_etest_http_repo = https://github.com/wooga/etest_http +pkg_etest_http_commit = master + +PACKAGES += etoml +pkg_etoml_name = etoml +pkg_etoml_description = TOML language erlang parser +pkg_etoml_homepage = https://github.com/kalta/etoml +pkg_etoml_fetch = git +pkg_etoml_repo = https://github.com/kalta/etoml +pkg_etoml_commit = master + +PACKAGES += eunit +pkg_eunit_name = eunit +pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. +pkg_eunit_homepage = https://github.com/richcarl/eunit +pkg_eunit_fetch = git +pkg_eunit_repo = https://github.com/richcarl/eunit +pkg_eunit_commit = master + +PACKAGES += eunit_formatters +pkg_eunit_formatters_name = eunit_formatters +pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better. +pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_fetch = git +pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_commit = master + +PACKAGES += euthanasia +pkg_euthanasia_name = euthanasia +pkg_euthanasia_description = Merciful killer for your Erlang processes +pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia +pkg_euthanasia_fetch = git +pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia +pkg_euthanasia_commit = master + +PACKAGES += evum +pkg_evum_name = evum +pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM +pkg_evum_homepage = https://github.com/msantos/evum +pkg_evum_fetch = git +pkg_evum_repo = https://github.com/msantos/evum +pkg_evum_commit = master + +PACKAGES += exec +pkg_exec_name = exec +pkg_exec_description = Execute and control OS processes from Erlang/OTP. +pkg_exec_homepage = http://saleyn.github.com/erlexec +pkg_exec_fetch = git +pkg_exec_repo = https://github.com/saleyn/erlexec +pkg_exec_commit = master + +PACKAGES += exml +pkg_exml_name = exml +pkg_exml_description = XML parsing library in Erlang +pkg_exml_homepage = https://github.com/paulgray/exml +pkg_exml_fetch = git +pkg_exml_repo = https://github.com/paulgray/exml +pkg_exml_commit = master + +PACKAGES += exometer +pkg_exometer_name = exometer +pkg_exometer_description = Basic measurement objects and probe behavior +pkg_exometer_homepage = https://github.com/Feuerlabs/exometer +pkg_exometer_fetch = git +pkg_exometer_repo = https://github.com/Feuerlabs/exometer +pkg_exometer_commit = 1.2 + +PACKAGES += exs1024 +pkg_exs1024_name = exs1024 +pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang. +pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024 +pkg_exs1024_fetch = git +pkg_exs1024_repo = https://github.com/jj1bdx/exs1024 +pkg_exs1024_commit = master + +PACKAGES += exs64 +pkg_exs64_name = exs64 +pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang. +pkg_exs64_homepage = https://github.com/jj1bdx/exs64 +pkg_exs64_fetch = git +pkg_exs64_repo = https://github.com/jj1bdx/exs64 +pkg_exs64_commit = master + +PACKAGES += exsplus116 +pkg_exsplus116_name = exsplus116 +pkg_exsplus116_description = Xorshift116plus for Erlang +pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_fetch = git +pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_commit = master + +PACKAGES += exsplus128 +pkg_exsplus128_name = exsplus128 +pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang. +pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_fetch = git +pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_commit = master + +PACKAGES += ezmq +pkg_ezmq_name = ezmq +pkg_ezmq_description = zMQ implemented in Erlang +pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq +pkg_ezmq_fetch = git +pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq +pkg_ezmq_commit = master + +PACKAGES += ezmtp +pkg_ezmtp_name = ezmtp +pkg_ezmtp_description = ZMTP protocol in pure Erlang. +pkg_ezmtp_homepage = https://github.com/a13x/ezmtp +pkg_ezmtp_fetch = git +pkg_ezmtp_repo = https://github.com/a13x/ezmtp +pkg_ezmtp_commit = master + +PACKAGES += fast_disk_log +pkg_fast_disk_log_name = fast_disk_log +pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger +pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_fetch = git +pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_commit = master + +PACKAGES += feeder +pkg_feeder_name = feeder +pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds. +pkg_feeder_homepage = https://github.com/michaelnisi/feeder +pkg_feeder_fetch = git +pkg_feeder_repo = https://github.com/michaelnisi/feeder +pkg_feeder_commit = v1.4.6 + +PACKAGES += fix +pkg_fix_name = fix +pkg_fix_description = http://fixprotocol.org/ implementation. +pkg_fix_homepage = https://github.com/maxlapshin/fix +pkg_fix_fetch = git +pkg_fix_repo = https://github.com/maxlapshin/fix +pkg_fix_commit = master + +PACKAGES += flower +pkg_flower_name = flower +pkg_flower_description = FlowER - a Erlang OpenFlow development platform +pkg_flower_homepage = https://github.com/travelping/flower +pkg_flower_fetch = git +pkg_flower_repo = https://github.com/travelping/flower +pkg_flower_commit = master + +PACKAGES += fn +pkg_fn_name = fn +pkg_fn_description = Function utilities for Erlang +pkg_fn_homepage = https://github.com/reiddraper/fn +pkg_fn_fetch = git +pkg_fn_repo = https://github.com/reiddraper/fn +pkg_fn_commit = master + +PACKAGES += folsom +pkg_folsom_name = folsom +pkg_folsom_description = Expose Erlang Events and Metrics +pkg_folsom_homepage = https://github.com/boundary/folsom +pkg_folsom_fetch = git +pkg_folsom_repo = https://github.com/boundary/folsom +pkg_folsom_commit = master + +PACKAGES += folsom_cowboy +pkg_folsom_cowboy_name = folsom_cowboy +pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper. +pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_fetch = git +pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_commit = master + +PACKAGES += folsomite +pkg_folsomite_name = folsomite +pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics +pkg_folsomite_homepage = https://github.com/campanja/folsomite +pkg_folsomite_fetch = git +pkg_folsomite_repo = https://github.com/campanja/folsomite +pkg_folsomite_commit = master + +PACKAGES += fs +pkg_fs_name = fs +pkg_fs_description = Erlang FileSystem Listener +pkg_fs_homepage = https://github.com/synrc/fs +pkg_fs_fetch = git +pkg_fs_repo = https://github.com/synrc/fs +pkg_fs_commit = master + +PACKAGES += fuse +pkg_fuse_name = fuse +pkg_fuse_description = A Circuit Breaker for Erlang +pkg_fuse_homepage = https://github.com/jlouis/fuse +pkg_fuse_fetch = git +pkg_fuse_repo = https://github.com/jlouis/fuse +pkg_fuse_commit = master + +PACKAGES += gcm +pkg_gcm_name = gcm +pkg_gcm_description = An Erlang application for Google Cloud Messaging +pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang +pkg_gcm_fetch = git +pkg_gcm_repo = https://github.com/pdincau/gcm-erlang +pkg_gcm_commit = master + +PACKAGES += gcprof +pkg_gcprof_name = gcprof +pkg_gcprof_description = Garbage Collection profiler for Erlang +pkg_gcprof_homepage = https://github.com/knutin/gcprof +pkg_gcprof_fetch = git +pkg_gcprof_repo = https://github.com/knutin/gcprof +pkg_gcprof_commit = master + +PACKAGES += geas +pkg_geas_name = geas +pkg_geas_description = Guess Erlang Application Scattering +pkg_geas_homepage = https://github.com/crownedgrouse/geas +pkg_geas_fetch = git +pkg_geas_repo = https://github.com/crownedgrouse/geas +pkg_geas_commit = master + +PACKAGES += geef +pkg_geef_name = geef +pkg_geef_description = Git NEEEEF (Erlang NIF) +pkg_geef_homepage = https://github.com/carlosmn/geef +pkg_geef_fetch = git +pkg_geef_repo = https://github.com/carlosmn/geef +pkg_geef_commit = master + +PACKAGES += gen_cycle +pkg_gen_cycle_name = gen_cycle +pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks +pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_fetch = git +pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_commit = develop + +PACKAGES += gen_icmp +pkg_gen_icmp_name = gen_icmp +pkg_gen_icmp_description = Erlang interface to ICMP sockets +pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp +pkg_gen_icmp_fetch = git +pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp +pkg_gen_icmp_commit = master + +PACKAGES += gen_nb_server +pkg_gen_nb_server_name = gen_nb_server +pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers +pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_fetch = git +pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_commit = master + +PACKAGES += gen_paxos +pkg_gen_paxos_name = gen_paxos +pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol +pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos +pkg_gen_paxos_fetch = git +pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos +pkg_gen_paxos_commit = master + +PACKAGES += gen_smtp +pkg_gen_smtp_name = gen_smtp +pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules +pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_fetch = git +pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_commit = master + +PACKAGES += gen_tracker +pkg_gen_tracker_name = gen_tracker +pkg_gen_tracker_description = supervisor with ets handling of children and their metadata +pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_fetch = git +pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_commit = master + +PACKAGES += gen_unix +pkg_gen_unix_name = gen_unix +pkg_gen_unix_description = Erlang Unix socket interface +pkg_gen_unix_homepage = https://github.com/msantos/gen_unix +pkg_gen_unix_fetch = git +pkg_gen_unix_repo = https://github.com/msantos/gen_unix +pkg_gen_unix_commit = master + +PACKAGES += getopt +pkg_getopt_name = getopt +pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax +pkg_getopt_homepage = https://github.com/jcomellas/getopt +pkg_getopt_fetch = git +pkg_getopt_repo = https://github.com/jcomellas/getopt +pkg_getopt_commit = master + +PACKAGES += gettext +pkg_gettext_name = gettext +pkg_gettext_description = Erlang internationalization library. +pkg_gettext_homepage = https://github.com/etnt/gettext +pkg_gettext_fetch = git +pkg_gettext_repo = https://github.com/etnt/gettext +pkg_gettext_commit = master + +PACKAGES += giallo +pkg_giallo_name = giallo +pkg_giallo_description = Small and flexible web framework on top of Cowboy +pkg_giallo_homepage = https://github.com/kivra/giallo +pkg_giallo_fetch = git +pkg_giallo_repo = https://github.com/kivra/giallo +pkg_giallo_commit = master + +PACKAGES += gin +pkg_gin_name = gin +pkg_gin_description = The guards and for Erlang parse_transform +pkg_gin_homepage = https://github.com/mad-cocktail/gin +pkg_gin_fetch = git +pkg_gin_repo = https://github.com/mad-cocktail/gin +pkg_gin_commit = master + +PACKAGES += gitty +pkg_gitty_name = gitty +pkg_gitty_description = Git access in erlang +pkg_gitty_homepage = https://github.com/maxlapshin/gitty +pkg_gitty_fetch = git +pkg_gitty_repo = https://github.com/maxlapshin/gitty +pkg_gitty_commit = master + +PACKAGES += gold_fever +pkg_gold_fever_name = gold_fever +pkg_gold_fever_description = A Treasure Hunt for Erlangers +pkg_gold_fever_homepage = https://github.com/inaka/gold_fever +pkg_gold_fever_fetch = git +pkg_gold_fever_repo = https://github.com/inaka/gold_fever +pkg_gold_fever_commit = master + +PACKAGES += gossiperl +pkg_gossiperl_name = gossiperl +pkg_gossiperl_description = Gossip middleware in Erlang +pkg_gossiperl_homepage = http://gossiperl.com/ +pkg_gossiperl_fetch = git +pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl +pkg_gossiperl_commit = master + +PACKAGES += gpb +pkg_gpb_name = gpb +pkg_gpb_description = A Google Protobuf implementation for Erlang +pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_fetch = git +pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_commit = master + +PACKAGES += gproc +pkg_gproc_name = gproc +pkg_gproc_description = Extended process registry for Erlang +pkg_gproc_homepage = https://github.com/uwiger/gproc +pkg_gproc_fetch = git +pkg_gproc_repo = https://github.com/uwiger/gproc +pkg_gproc_commit = master + +PACKAGES += grapherl +pkg_grapherl_name = grapherl +pkg_grapherl_description = Create graphs of Erlang systems and programs +pkg_grapherl_homepage = https://github.com/eproxus/grapherl +pkg_grapherl_fetch = git +pkg_grapherl_repo = https://github.com/eproxus/grapherl +pkg_grapherl_commit = master + +PACKAGES += gun +pkg_gun_name = gun +pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang. +pkg_gun_homepage = http//ninenines.eu +pkg_gun_fetch = git +pkg_gun_repo = https://github.com/ninenines/gun +pkg_gun_commit = master + +PACKAGES += gut +pkg_gut_name = gut +pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman +pkg_gut_homepage = https://github.com/unbalancedparentheses/gut +pkg_gut_fetch = git +pkg_gut_repo = https://github.com/unbalancedparentheses/gut +pkg_gut_commit = master + +PACKAGES += hackney +pkg_hackney_name = hackney +pkg_hackney_description = simple HTTP client in Erlang +pkg_hackney_homepage = https://github.com/benoitc/hackney +pkg_hackney_fetch = git +pkg_hackney_repo = https://github.com/benoitc/hackney +pkg_hackney_commit = master + +PACKAGES += hamcrest +pkg_hamcrest_name = hamcrest +pkg_hamcrest_description = Erlang port of Hamcrest +pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_fetch = git +pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_commit = master + +PACKAGES += hanoidb +pkg_hanoidb_name = hanoidb +pkg_hanoidb_description = Erlang LSM BTree Storage +pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_fetch = git +pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_commit = master + +PACKAGES += hottub +pkg_hottub_name = hottub +pkg_hottub_description = Permanent Erlang Worker Pool +pkg_hottub_homepage = https://github.com/bfrog/hottub +pkg_hottub_fetch = git +pkg_hottub_repo = https://github.com/bfrog/hottub +pkg_hottub_commit = master + +PACKAGES += hpack +pkg_hpack_name = hpack +pkg_hpack_description = HPACK Implementation for Erlang +pkg_hpack_homepage = https://github.com/joedevivo/hpack +pkg_hpack_fetch = git +pkg_hpack_repo = https://github.com/joedevivo/hpack +pkg_hpack_commit = master + +PACKAGES += hyper +pkg_hyper_name = hyper +pkg_hyper_description = Erlang implementation of HyperLogLog +pkg_hyper_homepage = https://github.com/GameAnalytics/hyper +pkg_hyper_fetch = git +pkg_hyper_repo = https://github.com/GameAnalytics/hyper +pkg_hyper_commit = master + +PACKAGES += ibrowse +pkg_ibrowse_name = ibrowse +pkg_ibrowse_description = Erlang HTTP client +pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_fetch = git +pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_commit = v4.1.1 + +PACKAGES += ierlang +pkg_ierlang_name = ierlang +pkg_ierlang_description = An Erlang language kernel for IPython. +pkg_ierlang_homepage = https://github.com/robbielynch/ierlang +pkg_ierlang_fetch = git +pkg_ierlang_repo = https://github.com/robbielynch/ierlang +pkg_ierlang_commit = master + +PACKAGES += iota +pkg_iota_name = iota +pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code +pkg_iota_homepage = https://github.com/jpgneves/iota +pkg_iota_fetch = git +pkg_iota_repo = https://github.com/jpgneves/iota +pkg_iota_commit = master + +PACKAGES += irc_lib +pkg_irc_lib_name = irc_lib +pkg_irc_lib_description = Erlang irc client library +pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_fetch = git +pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_commit = master + +PACKAGES += ircd +pkg_ircd_name = ircd +pkg_ircd_description = A pluggable IRC daemon application/library for Erlang. +pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd +pkg_ircd_fetch = git +pkg_ircd_repo = https://github.com/tonyg/erlang-ircd +pkg_ircd_commit = master + +PACKAGES += iris +pkg_iris_name = iris +pkg_iris_description = Iris Erlang binding +pkg_iris_homepage = https://github.com/project-iris/iris-erl +pkg_iris_fetch = git +pkg_iris_repo = https://github.com/project-iris/iris-erl +pkg_iris_commit = master + +PACKAGES += iso8601 +pkg_iso8601_name = iso8601 +pkg_iso8601_description = Erlang ISO 8601 date formatter/parser +pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_fetch = git +pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_commit = master + +PACKAGES += jamdb_sybase +pkg_jamdb_sybase_name = jamdb_sybase +pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE +pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_fetch = git +pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_commit = 0.6.0 + +PACKAGES += jerg +pkg_jerg_name = jerg +pkg_jerg_description = JSON Schema to Erlang Records Generator +pkg_jerg_homepage = https://github.com/ddossot/jerg +pkg_jerg_fetch = git +pkg_jerg_repo = https://github.com/ddossot/jerg +pkg_jerg_commit = master + +PACKAGES += jesse +pkg_jesse_name = jesse +pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang. +pkg_jesse_homepage = https://github.com/klarna/jesse +pkg_jesse_fetch = git +pkg_jesse_repo = https://github.com/klarna/jesse +pkg_jesse_commit = master + +PACKAGES += jiffy +pkg_jiffy_name = jiffy +pkg_jiffy_description = JSON NIFs for Erlang. +pkg_jiffy_homepage = https://github.com/davisp/jiffy +pkg_jiffy_fetch = git +pkg_jiffy_repo = https://github.com/davisp/jiffy +pkg_jiffy_commit = master + +PACKAGES += jiffy_v +pkg_jiffy_v_name = jiffy_v +pkg_jiffy_v_description = JSON validation utility +pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_fetch = git +pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_commit = 0.3.3 + +PACKAGES += jobs +pkg_jobs_name = jobs +pkg_jobs_description = a Job scheduler for load regulation +pkg_jobs_homepage = https://github.com/esl/jobs +pkg_jobs_fetch = git +pkg_jobs_repo = https://github.com/esl/jobs +pkg_jobs_commit = 0.3 + +PACKAGES += joxa +pkg_joxa_name = joxa +pkg_joxa_description = A Modern Lisp for the Erlang VM +pkg_joxa_homepage = https://github.com/joxa/joxa +pkg_joxa_fetch = git +pkg_joxa_repo = https://github.com/joxa/joxa +pkg_joxa_commit = master + +PACKAGES += json +pkg_json_name = json +pkg_json_description = a high level json library for erlang (17.0+) +pkg_json_homepage = https://github.com/talentdeficit/json +pkg_json_fetch = git +pkg_json_repo = https://github.com/talentdeficit/json +pkg_json_commit = master + +PACKAGES += json_rec +pkg_json_rec_name = json_rec +pkg_json_rec_description = JSON to erlang record +pkg_json_rec_homepage = https://github.com/justinkirby/json_rec +pkg_json_rec_fetch = git +pkg_json_rec_repo = https://github.com/justinkirby/json_rec +pkg_json_rec_commit = master + +PACKAGES += jsonerl +pkg_jsonerl_name = jsonerl +pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder +pkg_jsonerl_homepage = https://github.com/lambder/jsonerl +pkg_jsonerl_fetch = git +pkg_jsonerl_repo = https://github.com/lambder/jsonerl +pkg_jsonerl_commit = master + +PACKAGES += jsonpath +pkg_jsonpath_name = jsonpath +pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation +pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_fetch = git +pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_commit = master + +PACKAGES += jsonx +pkg_jsonx_name = jsonx +pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C. +pkg_jsonx_homepage = https://github.com/iskra/jsonx +pkg_jsonx_fetch = git +pkg_jsonx_repo = https://github.com/iskra/jsonx +pkg_jsonx_commit = master + +PACKAGES += jsx +pkg_jsx_name = jsx +pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON. +pkg_jsx_homepage = https://github.com/talentdeficit/jsx +pkg_jsx_fetch = git +pkg_jsx_repo = https://github.com/talentdeficit/jsx +pkg_jsx_commit = master + +PACKAGES += kafka +pkg_kafka_name = kafka +pkg_kafka_description = Kafka consumer and producer in Erlang +pkg_kafka_homepage = https://github.com/wooga/kafka-erlang +pkg_kafka_fetch = git +pkg_kafka_repo = https://github.com/wooga/kafka-erlang +pkg_kafka_commit = master + +PACKAGES += kai +pkg_kai_name = kai +pkg_kai_description = DHT storage by Takeshi Inoue +pkg_kai_homepage = https://github.com/synrc/kai +pkg_kai_fetch = git +pkg_kai_repo = https://github.com/synrc/kai +pkg_kai_commit = master + +PACKAGES += katja +pkg_katja_name = katja +pkg_katja_description = A simple Riemann client written in Erlang. +pkg_katja_homepage = https://github.com/nifoc/katja +pkg_katja_fetch = git +pkg_katja_repo = https://github.com/nifoc/katja +pkg_katja_commit = master + +PACKAGES += kdht +pkg_kdht_name = kdht +pkg_kdht_description = kdht is an erlang DHT implementation +pkg_kdht_homepage = https://github.com/kevinlynx/kdht +pkg_kdht_fetch = git +pkg_kdht_repo = https://github.com/kevinlynx/kdht +pkg_kdht_commit = master + +PACKAGES += key2value +pkg_key2value_name = key2value +pkg_key2value_description = Erlang 2-way map +pkg_key2value_homepage = https://github.com/okeuday/key2value +pkg_key2value_fetch = git +pkg_key2value_repo = https://github.com/okeuday/key2value +pkg_key2value_commit = master + +PACKAGES += keys1value +pkg_keys1value_name = keys1value +pkg_keys1value_description = Erlang set associative map for key lists +pkg_keys1value_homepage = https://github.com/okeuday/keys1value +pkg_keys1value_fetch = git +pkg_keys1value_repo = https://github.com/okeuday/keys1value +pkg_keys1value_commit = master + +PACKAGES += kinetic +pkg_kinetic_name = kinetic +pkg_kinetic_description = Erlang Kinesis Client +pkg_kinetic_homepage = https://github.com/AdRoll/kinetic +pkg_kinetic_fetch = git +pkg_kinetic_repo = https://github.com/AdRoll/kinetic +pkg_kinetic_commit = master + +PACKAGES += kjell +pkg_kjell_name = kjell +pkg_kjell_description = Erlang Shell +pkg_kjell_homepage = https://github.com/karlll/kjell +pkg_kjell_fetch = git +pkg_kjell_repo = https://github.com/karlll/kjell +pkg_kjell_commit = master + +PACKAGES += kraken +pkg_kraken_name = kraken +pkg_kraken_description = Distributed Pubsub Server for Realtime Apps +pkg_kraken_homepage = https://github.com/Asana/kraken +pkg_kraken_fetch = git +pkg_kraken_repo = https://github.com/Asana/kraken +pkg_kraken_commit = master + +PACKAGES += kucumberl +pkg_kucumberl_name = kucumberl +pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber +pkg_kucumberl_homepage = https://github.com/openshine/kucumberl +pkg_kucumberl_fetch = git +pkg_kucumberl_repo = https://github.com/openshine/kucumberl +pkg_kucumberl_commit = master + +PACKAGES += kvc +pkg_kvc_name = kvc +pkg_kvc_description = KVC - Key Value Coding for Erlang data structures +pkg_kvc_homepage = https://github.com/etrepum/kvc +pkg_kvc_fetch = git +pkg_kvc_repo = https://github.com/etrepum/kvc +pkg_kvc_commit = master + +PACKAGES += kvlists +pkg_kvlists_name = kvlists +pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang +pkg_kvlists_homepage = https://github.com/jcomellas/kvlists +pkg_kvlists_fetch = git +pkg_kvlists_repo = https://github.com/jcomellas/kvlists +pkg_kvlists_commit = master + +PACKAGES += kvs +pkg_kvs_name = kvs +pkg_kvs_description = Container and Iterator +pkg_kvs_homepage = https://github.com/synrc/kvs +pkg_kvs_fetch = git +pkg_kvs_repo = https://github.com/synrc/kvs +pkg_kvs_commit = master + +PACKAGES += lager +pkg_lager_name = lager +pkg_lager_description = A logging framework for Erlang/OTP. +pkg_lager_homepage = https://github.com/basho/lager +pkg_lager_fetch = git +pkg_lager_repo = https://github.com/basho/lager +pkg_lager_commit = master + +PACKAGES += lager_amqp_backend +pkg_lager_amqp_backend_name = lager_amqp_backend +pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend +pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_fetch = git +pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_commit = master + +PACKAGES += lager_syslog +pkg_lager_syslog_name = lager_syslog +pkg_lager_syslog_description = Syslog backend for lager +pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog +pkg_lager_syslog_fetch = git +pkg_lager_syslog_repo = https://github.com/basho/lager_syslog +pkg_lager_syslog_commit = master + +PACKAGES += lambdapad +pkg_lambdapad_name = lambdapad +pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang. +pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad +pkg_lambdapad_fetch = git +pkg_lambdapad_repo = https://github.com/gar1t/lambdapad +pkg_lambdapad_commit = master + +PACKAGES += lasp +pkg_lasp_name = lasp +pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations +pkg_lasp_homepage = http://lasp-lang.org/ +pkg_lasp_fetch = git +pkg_lasp_repo = https://github.com/lasp-lang/lasp +pkg_lasp_commit = master + +PACKAGES += lasse +pkg_lasse_name = lasse +pkg_lasse_description = SSE handler for Cowboy +pkg_lasse_homepage = https://github.com/inaka/lasse +pkg_lasse_fetch = git +pkg_lasse_repo = https://github.com/inaka/lasse +pkg_lasse_commit = 0.1.0 + +PACKAGES += ldap +pkg_ldap_name = ldap +pkg_ldap_description = LDAP server written in Erlang +pkg_ldap_homepage = https://github.com/spawnproc/ldap +pkg_ldap_fetch = git +pkg_ldap_repo = https://github.com/spawnproc/ldap +pkg_ldap_commit = master + +PACKAGES += lethink +pkg_lethink_name = lethink +pkg_lethink_description = erlang driver for rethinkdb +pkg_lethink_homepage = https://github.com/taybin/lethink +pkg_lethink_fetch = git +pkg_lethink_repo = https://github.com/taybin/lethink +pkg_lethink_commit = master + +PACKAGES += lfe +pkg_lfe_name = lfe +pkg_lfe_description = Lisp Flavoured Erlang (LFE) +pkg_lfe_homepage = https://github.com/rvirding/lfe +pkg_lfe_fetch = git +pkg_lfe_repo = https://github.com/rvirding/lfe +pkg_lfe_commit = master + +PACKAGES += ling +pkg_ling_name = ling +pkg_ling_description = Erlang on Xen +pkg_ling_homepage = https://github.com/cloudozer/ling +pkg_ling_fetch = git +pkg_ling_repo = https://github.com/cloudozer/ling +pkg_ling_commit = master + +PACKAGES += live +pkg_live_name = live +pkg_live_description = Automated module and configuration reloader. +pkg_live_homepage = http://ninenines.eu +pkg_live_fetch = git +pkg_live_repo = https://github.com/ninenines/live +pkg_live_commit = master + +PACKAGES += lmq +pkg_lmq_name = lmq +pkg_lmq_description = Lightweight Message Queue +pkg_lmq_homepage = https://github.com/iij/lmq +pkg_lmq_fetch = git +pkg_lmq_repo = https://github.com/iij/lmq +pkg_lmq_commit = master + +PACKAGES += locker +pkg_locker_name = locker +pkg_locker_description = Atomic distributed 'check and set' for short-lived keys +pkg_locker_homepage = https://github.com/wooga/locker +pkg_locker_fetch = git +pkg_locker_repo = https://github.com/wooga/locker +pkg_locker_commit = master + +PACKAGES += locks +pkg_locks_name = locks +pkg_locks_description = A scalable, deadlock-resolving resource locker +pkg_locks_homepage = https://github.com/uwiger/locks +pkg_locks_fetch = git +pkg_locks_repo = https://github.com/uwiger/locks +pkg_locks_commit = master + +PACKAGES += log4erl +pkg_log4erl_name = log4erl +pkg_log4erl_description = A logger for erlang in the spirit of Log4J. +pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl +pkg_log4erl_fetch = git +pkg_log4erl_repo = https://github.com/ahmednawras/log4erl +pkg_log4erl_commit = master + +PACKAGES += lol +pkg_lol_name = lol +pkg_lol_description = Lisp on erLang, and programming is fun again +pkg_lol_homepage = https://github.com/b0oh/lol +pkg_lol_fetch = git +pkg_lol_repo = https://github.com/b0oh/lol +pkg_lol_commit = master + +PACKAGES += lucid +pkg_lucid_name = lucid +pkg_lucid_description = HTTP/2 server written in Erlang +pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid +pkg_lucid_fetch = git +pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid +pkg_lucid_commit = master + +PACKAGES += luerl +pkg_luerl_name = luerl +pkg_luerl_description = Lua in Erlang +pkg_luerl_homepage = https://github.com/rvirding/luerl +pkg_luerl_fetch = git +pkg_luerl_repo = https://github.com/rvirding/luerl +pkg_luerl_commit = develop + +PACKAGES += luwak +pkg_luwak_name = luwak +pkg_luwak_description = Large-object storage interface for Riak +pkg_luwak_homepage = https://github.com/basho/luwak +pkg_luwak_fetch = git +pkg_luwak_repo = https://github.com/basho/luwak +pkg_luwak_commit = master + +PACKAGES += lux +pkg_lux_name = lux +pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands +pkg_lux_homepage = https://github.com/hawk/lux +pkg_lux_fetch = git +pkg_lux_repo = https://github.com/hawk/lux +pkg_lux_commit = master + +PACKAGES += machi +pkg_machi_name = machi +pkg_machi_description = Machi file store +pkg_machi_homepage = https://github.com/basho/machi +pkg_machi_fetch = git +pkg_machi_repo = https://github.com/basho/machi +pkg_machi_commit = master + +PACKAGES += mad +pkg_mad_name = mad +pkg_mad_description = Small and Fast Rebar Replacement +pkg_mad_homepage = https://github.com/synrc/mad +pkg_mad_fetch = git +pkg_mad_repo = https://github.com/synrc/mad +pkg_mad_commit = master + +PACKAGES += marina +pkg_marina_name = marina +pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client +pkg_marina_homepage = https://github.com/lpgauth/marina +pkg_marina_fetch = git +pkg_marina_repo = https://github.com/lpgauth/marina +pkg_marina_commit = master + +PACKAGES += mavg +pkg_mavg_name = mavg +pkg_mavg_description = Erlang :: Exponential moving average library +pkg_mavg_homepage = https://github.com/EchoTeam/mavg +pkg_mavg_fetch = git +pkg_mavg_repo = https://github.com/EchoTeam/mavg +pkg_mavg_commit = master + +PACKAGES += mc_erl +pkg_mc_erl_name = mc_erl +pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. +pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl +pkg_mc_erl_fetch = git +pkg_mc_erl_repo = https://github.com/clonejo/mc-erl +pkg_mc_erl_commit = master + +PACKAGES += mcd +pkg_mcd_name = mcd +pkg_mcd_description = Fast memcached protocol client in pure Erlang +pkg_mcd_homepage = https://github.com/EchoTeam/mcd +pkg_mcd_fetch = git +pkg_mcd_repo = https://github.com/EchoTeam/mcd +pkg_mcd_commit = master + +PACKAGES += mcerlang +pkg_mcerlang_name = mcerlang +pkg_mcerlang_description = The McErlang model checker for Erlang +pkg_mcerlang_homepage = https://github.com/fredlund/McErlang +pkg_mcerlang_fetch = git +pkg_mcerlang_repo = https://github.com/fredlund/McErlang +pkg_mcerlang_commit = master + +PACKAGES += meck +pkg_meck_name = meck +pkg_meck_description = A mocking library for Erlang +pkg_meck_homepage = https://github.com/eproxus/meck +pkg_meck_fetch = git +pkg_meck_repo = https://github.com/eproxus/meck +pkg_meck_commit = master + +PACKAGES += mekao +pkg_mekao_name = mekao +pkg_mekao_description = SQL constructor +pkg_mekao_homepage = https://github.com/ddosia/mekao +pkg_mekao_fetch = git +pkg_mekao_repo = https://github.com/ddosia/mekao +pkg_mekao_commit = master + +PACKAGES += memo +pkg_memo_name = memo +pkg_memo_description = Erlang memoization server +pkg_memo_homepage = https://github.com/tuncer/memo +pkg_memo_fetch = git +pkg_memo_repo = https://github.com/tuncer/memo +pkg_memo_commit = master + +PACKAGES += merge_index +pkg_merge_index_name = merge_index +pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop). +pkg_merge_index_homepage = https://github.com/basho/merge_index +pkg_merge_index_fetch = git +pkg_merge_index_repo = https://github.com/basho/merge_index +pkg_merge_index_commit = master + +PACKAGES += merl +pkg_merl_name = merl +pkg_merl_description = Metaprogramming in Erlang +pkg_merl_homepage = https://github.com/richcarl/merl +pkg_merl_fetch = git +pkg_merl_repo = https://github.com/richcarl/merl +pkg_merl_commit = master + +PACKAGES += mimetypes +pkg_mimetypes_name = mimetypes +pkg_mimetypes_description = Erlang MIME types library +pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes +pkg_mimetypes_fetch = git +pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes +pkg_mimetypes_commit = master + +PACKAGES += mixer +pkg_mixer_name = mixer +pkg_mixer_description = Mix in functions from other modules +pkg_mixer_homepage = https://github.com/chef/mixer +pkg_mixer_fetch = git +pkg_mixer_repo = https://github.com/chef/mixer +pkg_mixer_commit = master + +PACKAGES += mochiweb +pkg_mochiweb_name = mochiweb +pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers. +pkg_mochiweb_homepage = https://github.com/mochi/mochiweb +pkg_mochiweb_fetch = git +pkg_mochiweb_repo = https://github.com/mochi/mochiweb +pkg_mochiweb_commit = master + +PACKAGES += mochiweb_xpath +pkg_mochiweb_xpath_name = mochiweb_xpath +pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser +pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_fetch = git +pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_commit = master + +PACKAGES += mockgyver +pkg_mockgyver_name = mockgyver +pkg_mockgyver_description = A mocking library for Erlang +pkg_mockgyver_homepage = https://github.com/klajo/mockgyver +pkg_mockgyver_fetch = git +pkg_mockgyver_repo = https://github.com/klajo/mockgyver +pkg_mockgyver_commit = master + +PACKAGES += modlib +pkg_modlib_name = modlib +pkg_modlib_description = Web framework based on Erlang's inets httpd +pkg_modlib_homepage = https://github.com/gar1t/modlib +pkg_modlib_fetch = git +pkg_modlib_repo = https://github.com/gar1t/modlib +pkg_modlib_commit = master + +PACKAGES += mongodb +pkg_mongodb_name = mongodb +pkg_mongodb_description = MongoDB driver for Erlang +pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_fetch = git +pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_commit = master + +PACKAGES += mongooseim +pkg_mongooseim_name = mongooseim +pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions +pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform +pkg_mongooseim_fetch = git +pkg_mongooseim_repo = https://github.com/esl/MongooseIM +pkg_mongooseim_commit = master + +PACKAGES += moyo +pkg_moyo_name = moyo +pkg_moyo_description = Erlang utility functions library +pkg_moyo_homepage = https://github.com/dwango/moyo +pkg_moyo_fetch = git +pkg_moyo_repo = https://github.com/dwango/moyo +pkg_moyo_commit = master + +PACKAGES += msgpack +pkg_msgpack_name = msgpack +pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang +pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_fetch = git +pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_commit = master + +PACKAGES += mu2 +pkg_mu2_name = mu2 +pkg_mu2_description = Erlang mutation testing tool +pkg_mu2_homepage = https://github.com/ramsay-t/mu2 +pkg_mu2_fetch = git +pkg_mu2_repo = https://github.com/ramsay-t/mu2 +pkg_mu2_commit = master + +PACKAGES += mustache +pkg_mustache_name = mustache +pkg_mustache_description = Mustache template engine for Erlang. +pkg_mustache_homepage = https://github.com/mojombo/mustache.erl +pkg_mustache_fetch = git +pkg_mustache_repo = https://github.com/mojombo/mustache.erl +pkg_mustache_commit = master + +PACKAGES += myproto +pkg_myproto_name = myproto +pkg_myproto_description = MySQL Server Protocol in Erlang +pkg_myproto_homepage = https://github.com/altenwald/myproto +pkg_myproto_fetch = git +pkg_myproto_repo = https://github.com/altenwald/myproto +pkg_myproto_commit = master + +PACKAGES += mysql +pkg_mysql_name = mysql +pkg_mysql_description = Erlang MySQL Driver (from code.google.com) +pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_fetch = git +pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_commit = master + +PACKAGES += n2o +pkg_n2o_name = n2o +pkg_n2o_description = WebSocket Application Server +pkg_n2o_homepage = https://github.com/5HT/n2o +pkg_n2o_fetch = git +pkg_n2o_repo = https://github.com/5HT/n2o +pkg_n2o_commit = master + +PACKAGES += nat_upnp +pkg_nat_upnp_name = nat_upnp +pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD +pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_fetch = git +pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_commit = master + +PACKAGES += neo4j +pkg_neo4j_name = neo4j +pkg_neo4j_description = Erlang client library for Neo4J. +pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_fetch = git +pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_commit = master + +PACKAGES += neotoma +pkg_neotoma_name = neotoma +pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars. +pkg_neotoma_homepage = https://github.com/seancribbs/neotoma +pkg_neotoma_fetch = git +pkg_neotoma_repo = https://github.com/seancribbs/neotoma +pkg_neotoma_commit = master + +PACKAGES += newrelic +pkg_newrelic_name = newrelic +pkg_newrelic_description = Erlang library for sending metrics to New Relic +pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang +pkg_newrelic_fetch = git +pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang +pkg_newrelic_commit = master + +PACKAGES += nifty +pkg_nifty_name = nifty +pkg_nifty_description = Erlang NIF wrapper generator +pkg_nifty_homepage = https://github.com/parapluu/nifty +pkg_nifty_fetch = git +pkg_nifty_repo = https://github.com/parapluu/nifty +pkg_nifty_commit = master + +PACKAGES += nitrogen_core +pkg_nitrogen_core_name = nitrogen_core +pkg_nitrogen_core_description = The core Nitrogen library. +pkg_nitrogen_core_homepage = http://nitrogenproject.com/ +pkg_nitrogen_core_fetch = git +pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core +pkg_nitrogen_core_commit = master + +PACKAGES += nkbase +pkg_nkbase_name = nkbase +pkg_nkbase_description = NkBASE distributed database +pkg_nkbase_homepage = https://github.com/Nekso/nkbase +pkg_nkbase_fetch = git +pkg_nkbase_repo = https://github.com/Nekso/nkbase +pkg_nkbase_commit = develop + +PACKAGES += nkdocker +pkg_nkdocker_name = nkdocker +pkg_nkdocker_description = Erlang Docker client +pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker +pkg_nkdocker_fetch = git +pkg_nkdocker_repo = https://github.com/Nekso/nkdocker +pkg_nkdocker_commit = master + +PACKAGES += nkpacket +pkg_nkpacket_name = nkpacket +pkg_nkpacket_description = Generic Erlang transport layer +pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket +pkg_nkpacket_fetch = git +pkg_nkpacket_repo = https://github.com/Nekso/nkpacket +pkg_nkpacket_commit = master + +PACKAGES += nksip +pkg_nksip_name = nksip +pkg_nksip_description = Erlang SIP application server +pkg_nksip_homepage = https://github.com/kalta/nksip +pkg_nksip_fetch = git +pkg_nksip_repo = https://github.com/kalta/nksip +pkg_nksip_commit = master + +PACKAGES += nodefinder +pkg_nodefinder_name = nodefinder +pkg_nodefinder_description = automatic node discovery via UDP multicast +pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder +pkg_nodefinder_fetch = git +pkg_nodefinder_repo = https://github.com/okeuday/nodefinder +pkg_nodefinder_commit = master + +PACKAGES += nprocreg +pkg_nprocreg_name = nprocreg +pkg_nprocreg_description = Minimal Distributed Erlang Process Registry +pkg_nprocreg_homepage = http://nitrogenproject.com/ +pkg_nprocreg_fetch = git +pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg +pkg_nprocreg_commit = master + +PACKAGES += oauth +pkg_oauth_name = oauth +pkg_oauth_description = An Erlang OAuth 1.0 implementation +pkg_oauth_homepage = https://github.com/tim/erlang-oauth +pkg_oauth_fetch = git +pkg_oauth_repo = https://github.com/tim/erlang-oauth +pkg_oauth_commit = master + +PACKAGES += oauth2 +pkg_oauth2_name = oauth2 +pkg_oauth2_description = Erlang Oauth2 implementation +pkg_oauth2_homepage = https://github.com/kivra/oauth2 +pkg_oauth2_fetch = git +pkg_oauth2_repo = https://github.com/kivra/oauth2 +pkg_oauth2_commit = master + +PACKAGES += oauth2c +pkg_oauth2c_name = oauth2c +pkg_oauth2c_description = Erlang OAuth2 Client +pkg_oauth2c_homepage = https://github.com/kivra/oauth2_client +pkg_oauth2c_fetch = git +pkg_oauth2c_repo = https://github.com/kivra/oauth2_client +pkg_oauth2c_commit = master + +PACKAGES += octopus +pkg_octopus_name = octopus +pkg_octopus_description = Small and flexible pool manager written in Erlang +pkg_octopus_homepage = https://github.com/erlangbureau/octopus +pkg_octopus_fetch = git +pkg_octopus_repo = https://github.com/erlangbureau/octopus +pkg_octopus_commit = 1.0.0 + +PACKAGES += of_protocol +pkg_of_protocol_name = of_protocol +pkg_of_protocol_description = OpenFlow Protocol Library for Erlang +pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_fetch = git +pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_commit = master + +PACKAGES += opencouch +pkg_opencouch_name = couch +pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB +pkg_opencouch_homepage = https://github.com/benoitc/opencouch +pkg_opencouch_fetch = git +pkg_opencouch_repo = https://github.com/benoitc/opencouch +pkg_opencouch_commit = master + +PACKAGES += openflow +pkg_openflow_name = openflow +pkg_openflow_description = An OpenFlow controller written in pure erlang +pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_fetch = git +pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_commit = master + +PACKAGES += openid +pkg_openid_name = openid +pkg_openid_description = Erlang OpenID +pkg_openid_homepage = https://github.com/brendonh/erl_openid +pkg_openid_fetch = git +pkg_openid_repo = https://github.com/brendonh/erl_openid +pkg_openid_commit = master + +PACKAGES += openpoker +pkg_openpoker_name = openpoker +pkg_openpoker_description = Genesis Texas hold'em Game Server +pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker +pkg_openpoker_fetch = git +pkg_openpoker_repo = https://github.com/hpyhacking/openpoker +pkg_openpoker_commit = master + +PACKAGES += pal +pkg_pal_name = pal +pkg_pal_description = Pragmatic Authentication Library +pkg_pal_homepage = https://github.com/manifest/pal +pkg_pal_fetch = git +pkg_pal_repo = https://github.com/manifest/pal +pkg_pal_commit = master + +PACKAGES += parse_trans +pkg_parse_trans_name = parse_trans +pkg_parse_trans_description = Parse transform utilities for Erlang +pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans +pkg_parse_trans_fetch = git +pkg_parse_trans_repo = https://github.com/uwiger/parse_trans +pkg_parse_trans_commit = master + +PACKAGES += parsexml +pkg_parsexml_name = parsexml +pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API +pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml +pkg_parsexml_fetch = git +pkg_parsexml_repo = https://github.com/maxlapshin/parsexml +pkg_parsexml_commit = master + +PACKAGES += pegjs +pkg_pegjs_name = pegjs +pkg_pegjs_description = An implementation of PEG.js grammar for Erlang. +pkg_pegjs_homepage = https://github.com/dmitriid/pegjs +pkg_pegjs_fetch = git +pkg_pegjs_repo = https://github.com/dmitriid/pegjs +pkg_pegjs_commit = 0.3 + +PACKAGES += percept2 +pkg_percept2_name = percept2 +pkg_percept2_description = Concurrent profiling tool for Erlang +pkg_percept2_homepage = https://github.com/huiqing/percept2 +pkg_percept2_fetch = git +pkg_percept2_repo = https://github.com/huiqing/percept2 +pkg_percept2_commit = master + +PACKAGES += pgsql +pkg_pgsql_name = pgsql +pkg_pgsql_description = Erlang PostgreSQL driver +pkg_pgsql_homepage = https://github.com/semiocast/pgsql +pkg_pgsql_fetch = git +pkg_pgsql_repo = https://github.com/semiocast/pgsql +pkg_pgsql_commit = master + +PACKAGES += pkgx +pkg_pkgx_name = pkgx +pkg_pkgx_description = Build .deb packages from Erlang releases +pkg_pkgx_homepage = https://github.com/arjan/pkgx +pkg_pkgx_fetch = git +pkg_pkgx_repo = https://github.com/arjan/pkgx +pkg_pkgx_commit = master + +PACKAGES += pkt +pkg_pkt_name = pkt +pkg_pkt_description = Erlang network protocol library +pkg_pkt_homepage = https://github.com/msantos/pkt +pkg_pkt_fetch = git +pkg_pkt_repo = https://github.com/msantos/pkt +pkg_pkt_commit = master + +PACKAGES += plain_fsm +pkg_plain_fsm_name = plain_fsm +pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs. +pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_fetch = git +pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_commit = master + +PACKAGES += plumtree +pkg_plumtree_name = plumtree +pkg_plumtree_description = Epidemic Broadcast Trees +pkg_plumtree_homepage = https://github.com/helium/plumtree +pkg_plumtree_fetch = git +pkg_plumtree_repo = https://github.com/helium/plumtree +pkg_plumtree_commit = master + +PACKAGES += pmod_transform +pkg_pmod_transform_name = pmod_transform +pkg_pmod_transform_description = Parse transform for parameterized modules +pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform +pkg_pmod_transform_fetch = git +pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform +pkg_pmod_transform_commit = master + +PACKAGES += pobox +pkg_pobox_name = pobox +pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang +pkg_pobox_homepage = https://github.com/ferd/pobox +pkg_pobox_fetch = git +pkg_pobox_repo = https://github.com/ferd/pobox +pkg_pobox_commit = master + +PACKAGES += ponos +pkg_ponos_name = ponos +pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang +pkg_ponos_homepage = https://github.com/klarna/ponos +pkg_ponos_fetch = git +pkg_ponos_repo = https://github.com/klarna/ponos +pkg_ponos_commit = master + +PACKAGES += poolboy +pkg_poolboy_name = poolboy +pkg_poolboy_description = A hunky Erlang worker pool factory +pkg_poolboy_homepage = https://github.com/devinus/poolboy +pkg_poolboy_fetch = git +pkg_poolboy_repo = https://github.com/devinus/poolboy +pkg_poolboy_commit = master + +PACKAGES += pooler +pkg_pooler_name = pooler +pkg_pooler_description = An OTP Process Pool Application +pkg_pooler_homepage = https://github.com/seth/pooler +pkg_pooler_fetch = git +pkg_pooler_repo = https://github.com/seth/pooler +pkg_pooler_commit = master + +PACKAGES += pqueue +pkg_pqueue_name = pqueue +pkg_pqueue_description = Erlang Priority Queues +pkg_pqueue_homepage = https://github.com/okeuday/pqueue +pkg_pqueue_fetch = git +pkg_pqueue_repo = https://github.com/okeuday/pqueue +pkg_pqueue_commit = master + +PACKAGES += procket +pkg_procket_name = procket +pkg_procket_description = Erlang interface to low level socket operations +pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket +pkg_procket_fetch = git +pkg_procket_repo = https://github.com/msantos/procket +pkg_procket_commit = master + +PACKAGES += prop +pkg_prop_name = prop +pkg_prop_description = An Erlang code scaffolding and generator system. +pkg_prop_homepage = https://github.com/nuex/prop +pkg_prop_fetch = git +pkg_prop_repo = https://github.com/nuex/prop +pkg_prop_commit = master + +PACKAGES += proper +pkg_proper_name = proper +pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang. +pkg_proper_homepage = http://proper.softlab.ntua.gr +pkg_proper_fetch = git +pkg_proper_repo = https://github.com/manopapad/proper +pkg_proper_commit = master + +PACKAGES += props +pkg_props_name = props +pkg_props_description = Property structure library +pkg_props_homepage = https://github.com/greyarea/props +pkg_props_fetch = git +pkg_props_repo = https://github.com/greyarea/props +pkg_props_commit = master + +PACKAGES += protobuffs +pkg_protobuffs_name = protobuffs +pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs. +pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_fetch = git +pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_commit = master + +PACKAGES += psycho +pkg_psycho_name = psycho +pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware. +pkg_psycho_homepage = https://github.com/gar1t/psycho +pkg_psycho_fetch = git +pkg_psycho_repo = https://github.com/gar1t/psycho +pkg_psycho_commit = master + +PACKAGES += purity +pkg_purity_name = purity +pkg_purity_description = A side-effect analyzer for Erlang +pkg_purity_homepage = https://github.com/mpitid/purity +pkg_purity_fetch = git +pkg_purity_repo = https://github.com/mpitid/purity +pkg_purity_commit = master + +PACKAGES += push_service +pkg_push_service_name = push_service +pkg_push_service_description = Push service +pkg_push_service_homepage = https://github.com/hairyhum/push_service +pkg_push_service_fetch = git +pkg_push_service_repo = https://github.com/hairyhum/push_service +pkg_push_service_commit = master + +PACKAGES += qdate +pkg_qdate_name = qdate +pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang. +pkg_qdate_homepage = https://github.com/choptastic/qdate +pkg_qdate_fetch = git +pkg_qdate_repo = https://github.com/choptastic/qdate +pkg_qdate_commit = 0.4.0 + +PACKAGES += qrcode +pkg_qrcode_name = qrcode +pkg_qrcode_description = QR Code encoder in Erlang +pkg_qrcode_homepage = https://github.com/komone/qrcode +pkg_qrcode_fetch = git +pkg_qrcode_repo = https://github.com/komone/qrcode +pkg_qrcode_commit = master + +PACKAGES += quest +pkg_quest_name = quest +pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang. +pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest +pkg_quest_fetch = git +pkg_quest_repo = https://github.com/eriksoe/ErlangQuest +pkg_quest_commit = master + +PACKAGES += quickrand +pkg_quickrand_name = quickrand +pkg_quickrand_description = Quick Erlang Random Number Generation +pkg_quickrand_homepage = https://github.com/okeuday/quickrand +pkg_quickrand_fetch = git +pkg_quickrand_repo = https://github.com/okeuday/quickrand +pkg_quickrand_commit = master + +PACKAGES += rabbit +pkg_rabbit_name = rabbit +pkg_rabbit_description = RabbitMQ Server +pkg_rabbit_homepage = https://www.rabbitmq.com/ +pkg_rabbit_fetch = git +pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git +pkg_rabbit_commit = master + +PACKAGES += rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak +pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_fetch = git +pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_commit = master + +PACKAGES += rack +pkg_rack_name = rack +pkg_rack_description = Rack handler for erlang +pkg_rack_homepage = https://github.com/erlyvideo/rack +pkg_rack_fetch = git +pkg_rack_repo = https://github.com/erlyvideo/rack +pkg_rack_commit = master + +PACKAGES += radierl +pkg_radierl_name = radierl +pkg_radierl_description = RADIUS protocol stack implemented in Erlang. +pkg_radierl_homepage = https://github.com/vances/radierl +pkg_radierl_fetch = git +pkg_radierl_repo = https://github.com/vances/radierl +pkg_radierl_commit = master + +PACKAGES += rafter +pkg_rafter_name = rafter +pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol +pkg_rafter_homepage = https://github.com/andrewjstone/rafter +pkg_rafter_fetch = git +pkg_rafter_repo = https://github.com/andrewjstone/rafter +pkg_rafter_commit = master + +PACKAGES += ranch +pkg_ranch_name = ranch +pkg_ranch_description = Socket acceptor pool for TCP protocols. +pkg_ranch_homepage = http://ninenines.eu +pkg_ranch_fetch = git +pkg_ranch_repo = https://github.com/ninenines/ranch +pkg_ranch_commit = 1.1.0 + +PACKAGES += rbeacon +pkg_rbeacon_name = rbeacon +pkg_rbeacon_description = LAN discovery and presence in Erlang. +pkg_rbeacon_homepage = https://github.com/refuge/rbeacon +pkg_rbeacon_fetch = git +pkg_rbeacon_repo = https://github.com/refuge/rbeacon +pkg_rbeacon_commit = master + +PACKAGES += rebar +pkg_rebar_name = rebar +pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases. +pkg_rebar_homepage = http://www.rebar3.org +pkg_rebar_fetch = git +pkg_rebar_repo = https://github.com/rebar/rebar3 +pkg_rebar_commit = master + +PACKAGES += rebus +pkg_rebus_name = rebus +pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang. +pkg_rebus_homepage = https://github.com/olle/rebus +pkg_rebus_fetch = git +pkg_rebus_repo = https://github.com/olle/rebus +pkg_rebus_commit = master + +PACKAGES += rec2json +pkg_rec2json_name = rec2json +pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily. +pkg_rec2json_homepage = https://github.com/lordnull/rec2json +pkg_rec2json_fetch = git +pkg_rec2json_repo = https://github.com/lordnull/rec2json +pkg_rec2json_commit = master + +PACKAGES += recon +pkg_recon_name = recon +pkg_recon_description = Collection of functions and scripts to debug Erlang in production. +pkg_recon_homepage = https://github.com/ferd/recon +pkg_recon_fetch = git +pkg_recon_repo = https://github.com/ferd/recon +pkg_recon_commit = 2.2.1 + +PACKAGES += record_info +pkg_record_info_name = record_info +pkg_record_info_description = Convert between record and proplist +pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info +pkg_record_info_fetch = git +pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info +pkg_record_info_commit = master + +PACKAGES += redgrid +pkg_redgrid_name = redgrid +pkg_redgrid_description = automatic Erlang node discovery via redis +pkg_redgrid_homepage = https://github.com/jkvor/redgrid +pkg_redgrid_fetch = git +pkg_redgrid_repo = https://github.com/jkvor/redgrid +pkg_redgrid_commit = master + +PACKAGES += redo +pkg_redo_name = redo +pkg_redo_description = pipelined erlang redis client +pkg_redo_homepage = https://github.com/jkvor/redo +pkg_redo_fetch = git +pkg_redo_repo = https://github.com/jkvor/redo +pkg_redo_commit = master + +PACKAGES += reload_mk +pkg_reload_mk_name = reload_mk +pkg_reload_mk_description = Live reload plugin for erlang.mk. +pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk +pkg_reload_mk_fetch = git +pkg_reload_mk_repo = https://github.com/bullno1/reload.mk +pkg_reload_mk_commit = master + +PACKAGES += reltool_util +pkg_reltool_util_name = reltool_util +pkg_reltool_util_description = Erlang reltool utility functionality application +pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util +pkg_reltool_util_fetch = git +pkg_reltool_util_repo = https://github.com/okeuday/reltool_util +pkg_reltool_util_commit = master + +PACKAGES += relx +pkg_relx_name = relx +pkg_relx_description = Sane, simple release creation for Erlang +pkg_relx_homepage = https://github.com/erlware/relx +pkg_relx_fetch = git +pkg_relx_repo = https://github.com/erlware/relx +pkg_relx_commit = master + +PACKAGES += resource_discovery +pkg_resource_discovery_name = resource_discovery +pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster. +pkg_resource_discovery_homepage = http://erlware.org/ +pkg_resource_discovery_fetch = git +pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery +pkg_resource_discovery_commit = master + +PACKAGES += restc +pkg_restc_name = restc +pkg_restc_description = Erlang Rest Client +pkg_restc_homepage = https://github.com/kivra/restclient +pkg_restc_fetch = git +pkg_restc_repo = https://github.com/kivra/restclient +pkg_restc_commit = master + +PACKAGES += rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation. +pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_fetch = git +pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_commit = master + +PACKAGES += riak_control +pkg_riak_control_name = riak_control +pkg_riak_control_description = Webmachine-based administration interface for Riak. +pkg_riak_control_homepage = https://github.com/basho/riak_control +pkg_riak_control_fetch = git +pkg_riak_control_repo = https://github.com/basho/riak_control +pkg_riak_control_commit = master + +PACKAGES += riak_core +pkg_riak_core_name = riak_core +pkg_riak_core_description = Distributed systems infrastructure used by Riak. +pkg_riak_core_homepage = https://github.com/basho/riak_core +pkg_riak_core_fetch = git +pkg_riak_core_repo = https://github.com/basho/riak_core +pkg_riak_core_commit = master + +PACKAGES += riak_dt +pkg_riak_dt_name = riak_dt +pkg_riak_dt_description = Convergent replicated datatypes in Erlang +pkg_riak_dt_homepage = https://github.com/basho/riak_dt +pkg_riak_dt_fetch = git +pkg_riak_dt_repo = https://github.com/basho/riak_dt +pkg_riak_dt_commit = master + +PACKAGES += riak_ensemble +pkg_riak_ensemble_name = riak_ensemble +pkg_riak_ensemble_description = Multi-Paxos framework in Erlang +pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_fetch = git +pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_commit = master + +PACKAGES += riak_kv +pkg_riak_kv_name = riak_kv +pkg_riak_kv_description = Riak Key/Value Store +pkg_riak_kv_homepage = https://github.com/basho/riak_kv +pkg_riak_kv_fetch = git +pkg_riak_kv_repo = https://github.com/basho/riak_kv +pkg_riak_kv_commit = master + +PACKAGES += riak_pg +pkg_riak_pg_name = riak_pg +pkg_riak_pg_description = Distributed process groups with riak_core. +pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_fetch = git +pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_commit = master + +PACKAGES += riak_pipe +pkg_riak_pipe_name = riak_pipe +pkg_riak_pipe_description = Riak Pipelines +pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe +pkg_riak_pipe_fetch = git +pkg_riak_pipe_repo = https://github.com/basho/riak_pipe +pkg_riak_pipe_commit = master + +PACKAGES += riak_sysmon +pkg_riak_sysmon_name = riak_sysmon +pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages +pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_fetch = git +pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_commit = master + +PACKAGES += riak_test +pkg_riak_test_name = riak_test +pkg_riak_test_description = I'm in your cluster, testing your riaks +pkg_riak_test_homepage = https://github.com/basho/riak_test +pkg_riak_test_fetch = git +pkg_riak_test_repo = https://github.com/basho/riak_test +pkg_riak_test_commit = master + +PACKAGES += riakc +pkg_riakc_name = riakc +pkg_riakc_description = Erlang clients for Riak. +pkg_riakc_homepage = https://github.com/basho/riak-erlang-client +pkg_riakc_fetch = git +pkg_riakc_repo = https://github.com/basho/riak-erlang-client +pkg_riakc_commit = master + +PACKAGES += riakhttpc +pkg_riakhttpc_name = riakhttpc +pkg_riakhttpc_description = Riak Erlang client using the HTTP interface +pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_fetch = git +pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_commit = master + +PACKAGES += riaknostic +pkg_riaknostic_name = riaknostic +pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap +pkg_riaknostic_homepage = https://github.com/basho/riaknostic +pkg_riaknostic_fetch = git +pkg_riaknostic_repo = https://github.com/basho/riaknostic +pkg_riaknostic_commit = master + +PACKAGES += riakpool +pkg_riakpool_name = riakpool +pkg_riakpool_description = erlang riak client pool +pkg_riakpool_homepage = https://github.com/dweldon/riakpool +pkg_riakpool_fetch = git +pkg_riakpool_repo = https://github.com/dweldon/riakpool +pkg_riakpool_commit = master + +PACKAGES += rivus_cep +pkg_rivus_cep_name = rivus_cep +pkg_rivus_cep_description = Complex event processing in Erlang +pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_fetch = git +pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_commit = master + +PACKAGES += rlimit +pkg_rlimit_name = rlimit +pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent +pkg_rlimit_homepage = https://github.com/jlouis/rlimit +pkg_rlimit_fetch = git +pkg_rlimit_repo = https://github.com/jlouis/rlimit +pkg_rlimit_commit = master + +PACKAGES += safetyvalve +pkg_safetyvalve_name = safetyvalve +pkg_safetyvalve_description = A safety valve for your erlang node +pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_fetch = git +pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_commit = master + +PACKAGES += seestar +pkg_seestar_name = seestar +pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol +pkg_seestar_homepage = https://github.com/iamaleksey/seestar +pkg_seestar_fetch = git +pkg_seestar_repo = https://github.com/iamaleksey/seestar +pkg_seestar_commit = master + +PACKAGES += service +pkg_service_name = service +pkg_service_description = A minimal Erlang behavior for creating CloudI internal services +pkg_service_homepage = http://cloudi.org/ +pkg_service_fetch = git +pkg_service_repo = https://github.com/CloudI/service +pkg_service_commit = master + +PACKAGES += setup +pkg_setup_name = setup +pkg_setup_description = Generic setup utility for Erlang-based systems +pkg_setup_homepage = https://github.com/uwiger/setup +pkg_setup_fetch = git +pkg_setup_repo = https://github.com/uwiger/setup +pkg_setup_commit = master + +PACKAGES += sext +pkg_sext_name = sext +pkg_sext_description = Sortable Erlang Term Serialization +pkg_sext_homepage = https://github.com/uwiger/sext +pkg_sext_fetch = git +pkg_sext_repo = https://github.com/uwiger/sext +pkg_sext_commit = master + +PACKAGES += sfmt +pkg_sfmt_name = sfmt +pkg_sfmt_description = SFMT pseudo random number generator for Erlang. +pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_fetch = git +pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_commit = master + +PACKAGES += sgte +pkg_sgte_name = sgte +pkg_sgte_description = A simple Erlang Template Engine +pkg_sgte_homepage = https://github.com/filippo/sgte +pkg_sgte_fetch = git +pkg_sgte_repo = https://github.com/filippo/sgte +pkg_sgte_commit = master + +PACKAGES += sheriff +pkg_sheriff_name = sheriff +pkg_sheriff_description = Parse transform for type based validation. +pkg_sheriff_homepage = http://ninenines.eu +pkg_sheriff_fetch = git +pkg_sheriff_repo = https://github.com/extend/sheriff +pkg_sheriff_commit = master + +PACKAGES += shotgun +pkg_shotgun_name = shotgun +pkg_shotgun_description = better than just a gun +pkg_shotgun_homepage = https://github.com/inaka/shotgun +pkg_shotgun_fetch = git +pkg_shotgun_repo = https://github.com/inaka/shotgun +pkg_shotgun_commit = 0.1.0 + +PACKAGES += sidejob +pkg_sidejob_name = sidejob +pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang +pkg_sidejob_homepage = https://github.com/basho/sidejob +pkg_sidejob_fetch = git +pkg_sidejob_repo = https://github.com/basho/sidejob +pkg_sidejob_commit = master + +PACKAGES += sieve +pkg_sieve_name = sieve +pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang +pkg_sieve_homepage = https://github.com/benoitc/sieve +pkg_sieve_fetch = git +pkg_sieve_repo = https://github.com/benoitc/sieve +pkg_sieve_commit = master + +PACKAGES += sighandler +pkg_sighandler_name = sighandler +pkg_sighandler_description = Handle UNIX signals in Er lang +pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler +pkg_sighandler_fetch = git +pkg_sighandler_repo = https://github.com/jkingsbery/sighandler +pkg_sighandler_commit = master + +PACKAGES += simhash +pkg_simhash_name = simhash +pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data. +pkg_simhash_homepage = https://github.com/ferd/simhash +pkg_simhash_fetch = git +pkg_simhash_repo = https://github.com/ferd/simhash +pkg_simhash_commit = master + +PACKAGES += simple_bridge +pkg_simple_bridge_name = simple_bridge +pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers. +pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_fetch = git +pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_commit = master + +PACKAGES += simple_oauth2 +pkg_simple_oauth2_name = simple_oauth2 +pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured) +pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_fetch = git +pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_commit = master + +PACKAGES += skel +pkg_skel_name = skel +pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang +pkg_skel_homepage = https://github.com/ParaPhrase/skel +pkg_skel_fetch = git +pkg_skel_repo = https://github.com/ParaPhrase/skel +pkg_skel_commit = master + +PACKAGES += smother +pkg_smother_name = smother +pkg_smother_description = Extended code coverage metrics for Erlang. +pkg_smother_homepage = https://ramsay-t.github.io/Smother/ +pkg_smother_fetch = git +pkg_smother_repo = https://github.com/ramsay-t/Smother +pkg_smother_commit = master + +PACKAGES += social +pkg_social_name = social +pkg_social_description = Cowboy handler for social login via OAuth2 providers +pkg_social_homepage = https://github.com/dvv/social +pkg_social_fetch = git +pkg_social_repo = https://github.com/dvv/social +pkg_social_commit = master + +PACKAGES += spapi_router +pkg_spapi_router_name = spapi_router +pkg_spapi_router_description = Partially-connected Erlang clustering +pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router +pkg_spapi_router_fetch = git +pkg_spapi_router_repo = https://github.com/spilgames/spapi-router +pkg_spapi_router_commit = master + +PACKAGES += sqerl +pkg_sqerl_name = sqerl +pkg_sqerl_description = An Erlang-flavoured SQL DSL +pkg_sqerl_homepage = https://github.com/hairyhum/sqerl +pkg_sqerl_fetch = git +pkg_sqerl_repo = https://github.com/hairyhum/sqerl +pkg_sqerl_commit = master + +PACKAGES += srly +pkg_srly_name = srly +pkg_srly_description = Native Erlang Unix serial interface +pkg_srly_homepage = https://github.com/msantos/srly +pkg_srly_fetch = git +pkg_srly_repo = https://github.com/msantos/srly +pkg_srly_commit = master + +PACKAGES += sshrpc +pkg_sshrpc_name = sshrpc +pkg_sshrpc_description = Erlang SSH RPC module (experimental) +pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_fetch = git +pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_commit = master + +PACKAGES += stable +pkg_stable_name = stable +pkg_stable_description = Library of assorted helpers for Cowboy web server. +pkg_stable_homepage = https://github.com/dvv/stable +pkg_stable_fetch = git +pkg_stable_repo = https://github.com/dvv/stable +pkg_stable_commit = master + +PACKAGES += statebox +pkg_statebox_name = statebox +pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak. +pkg_statebox_homepage = https://github.com/mochi/statebox +pkg_statebox_fetch = git +pkg_statebox_repo = https://github.com/mochi/statebox +pkg_statebox_commit = master + +PACKAGES += statebox_riak +pkg_statebox_riak_name = statebox_riak +pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media. +pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak +pkg_statebox_riak_fetch = git +pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak +pkg_statebox_riak_commit = master + +PACKAGES += statman +pkg_statman_name = statman +pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM +pkg_statman_homepage = https://github.com/knutin/statman +pkg_statman_fetch = git +pkg_statman_repo = https://github.com/knutin/statman +pkg_statman_commit = master + +PACKAGES += statsderl +pkg_statsderl_name = statsderl +pkg_statsderl_description = StatsD client (erlang) +pkg_statsderl_homepage = https://github.com/lpgauth/statsderl +pkg_statsderl_fetch = git +pkg_statsderl_repo = https://github.com/lpgauth/statsderl +pkg_statsderl_commit = master + +PACKAGES += stdinout_pool +pkg_stdinout_pool_name = stdinout_pool +pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication. +pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_fetch = git +pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_commit = master + +PACKAGES += stockdb +pkg_stockdb_name = stockdb +pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang +pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb +pkg_stockdb_fetch = git +pkg_stockdb_repo = https://github.com/maxlapshin/stockdb +pkg_stockdb_commit = master + +PACKAGES += stripe +pkg_stripe_name = stripe +pkg_stripe_description = Erlang interface to the stripe.com API +pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang +pkg_stripe_fetch = git +pkg_stripe_repo = https://github.com/mattsta/stripe-erlang +pkg_stripe_commit = v1 + +PACKAGES += surrogate +pkg_surrogate_name = surrogate +pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes. +pkg_surrogate_homepage = https://github.com/skruger/Surrogate +pkg_surrogate_fetch = git +pkg_surrogate_repo = https://github.com/skruger/Surrogate +pkg_surrogate_commit = master + +PACKAGES += swab +pkg_swab_name = swab +pkg_swab_description = General purpose buffer handling module +pkg_swab_homepage = https://github.com/crownedgrouse/swab +pkg_swab_fetch = git +pkg_swab_repo = https://github.com/crownedgrouse/swab +pkg_swab_commit = master + +PACKAGES += swarm +pkg_swarm_name = swarm +pkg_swarm_description = Fast and simple acceptor pool for Erlang +pkg_swarm_homepage = https://github.com/jeremey/swarm +pkg_swarm_fetch = git +pkg_swarm_repo = https://github.com/jeremey/swarm +pkg_swarm_commit = master + +PACKAGES += switchboard +pkg_switchboard_name = switchboard +pkg_switchboard_description = A framework for processing email using worker plugins. +pkg_switchboard_homepage = https://github.com/thusfresh/switchboard +pkg_switchboard_fetch = git +pkg_switchboard_repo = https://github.com/thusfresh/switchboard +pkg_switchboard_commit = master + +PACKAGES += syn +pkg_syn_name = syn +pkg_syn_description = A global process registry for Erlang. +pkg_syn_homepage = https://github.com/ostinelli/syn +pkg_syn_fetch = git +pkg_syn_repo = https://github.com/ostinelli/syn +pkg_syn_commit = master + +PACKAGES += sync +pkg_sync_name = sync +pkg_sync_description = On-the-fly recompiling and reloading in Erlang. +pkg_sync_homepage = https://github.com/rustyio/sync +pkg_sync_fetch = git +pkg_sync_repo = https://github.com/rustyio/sync +pkg_sync_commit = master + +PACKAGES += syntaxerl +pkg_syntaxerl_name = syntaxerl +pkg_syntaxerl_description = Syntax checker for Erlang +pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_fetch = git +pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_commit = master + +PACKAGES += syslog +pkg_syslog_name = syslog +pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3) +pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog +pkg_syslog_fetch = git +pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog +pkg_syslog_commit = master + +PACKAGES += taskforce +pkg_taskforce_name = taskforce +pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks. +pkg_taskforce_homepage = https://github.com/g-andrade/taskforce +pkg_taskforce_fetch = git +pkg_taskforce_repo = https://github.com/g-andrade/taskforce +pkg_taskforce_commit = master + +PACKAGES += tddreloader +pkg_tddreloader_name = tddreloader +pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes +pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader +pkg_tddreloader_fetch = git +pkg_tddreloader_repo = https://github.com/version2beta/tddreloader +pkg_tddreloader_commit = master + +PACKAGES += tempo +pkg_tempo_name = tempo +pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang. +pkg_tempo_homepage = https://github.com/selectel/tempo +pkg_tempo_fetch = git +pkg_tempo_repo = https://github.com/selectel/tempo +pkg_tempo_commit = master + +PACKAGES += ticktick +pkg_ticktick_name = ticktick +pkg_ticktick_description = Ticktick is an id generator for message service. +pkg_ticktick_homepage = https://github.com/ericliang/ticktick +pkg_ticktick_fetch = git +pkg_ticktick_repo = https://github.com/ericliang/ticktick +pkg_ticktick_commit = master + +PACKAGES += tinymq +pkg_tinymq_name = tinymq +pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue +pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_fetch = git +pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_commit = master + +PACKAGES += tinymt +pkg_tinymt_name = tinymt +pkg_tinymt_description = TinyMT pseudo random number generator for Erlang. +pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_fetch = git +pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_commit = master + +PACKAGES += tirerl +pkg_tirerl_name = tirerl +pkg_tirerl_description = Erlang interface to Elastic Search +pkg_tirerl_homepage = https://github.com/inaka/tirerl +pkg_tirerl_fetch = git +pkg_tirerl_repo = https://github.com/inaka/tirerl +pkg_tirerl_commit = master + +PACKAGES += traffic_tools +pkg_traffic_tools_name = traffic_tools +pkg_traffic_tools_description = Simple traffic limiting library +pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools +pkg_traffic_tools_fetch = git +pkg_traffic_tools_repo = https://github.com/systra/traffic_tools +pkg_traffic_tools_commit = master + +PACKAGES += trails +pkg_trails_name = trails +pkg_trails_description = A couple of improvements over Cowboy Routes +pkg_trails_homepage = http://inaka.github.io/cowboy-trails/ +pkg_trails_fetch = git +pkg_trails_repo = https://github.com/inaka/cowboy-trails +pkg_trails_commit = master + +PACKAGES += trane +pkg_trane_name = trane +pkg_trane_description = SAX style broken HTML parser in Erlang +pkg_trane_homepage = https://github.com/massemanet/trane +pkg_trane_fetch = git +pkg_trane_repo = https://github.com/massemanet/trane +pkg_trane_commit = master + +PACKAGES += transit +pkg_transit_name = transit +pkg_transit_description = transit format for erlang +pkg_transit_homepage = https://github.com/isaiah/transit-erlang +pkg_transit_fetch = git +pkg_transit_repo = https://github.com/isaiah/transit-erlang +pkg_transit_commit = master + +PACKAGES += trie +pkg_trie_name = trie +pkg_trie_description = Erlang Trie Implementation +pkg_trie_homepage = https://github.com/okeuday/trie +pkg_trie_fetch = git +pkg_trie_repo = https://github.com/okeuday/trie +pkg_trie_commit = master + +PACKAGES += triq +pkg_triq_name = triq +pkg_triq_description = Trifork QuickCheck +pkg_triq_homepage = https://github.com/krestenkrab/triq +pkg_triq_fetch = git +pkg_triq_repo = https://github.com/krestenkrab/triq +pkg_triq_commit = master + +PACKAGES += tunctl +pkg_tunctl_name = tunctl +pkg_tunctl_description = Erlang TUN/TAP interface +pkg_tunctl_homepage = https://github.com/msantos/tunctl +pkg_tunctl_fetch = git +pkg_tunctl_repo = https://github.com/msantos/tunctl +pkg_tunctl_commit = master + +PACKAGES += twerl +pkg_twerl_name = twerl +pkg_twerl_description = Erlang client for the Twitter Streaming API +pkg_twerl_homepage = https://github.com/lucaspiller/twerl +pkg_twerl_fetch = git +pkg_twerl_repo = https://github.com/lucaspiller/twerl +pkg_twerl_commit = oauth + +PACKAGES += twitter_erlang +pkg_twitter_erlang_name = twitter_erlang +pkg_twitter_erlang_description = An Erlang twitter client +pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_fetch = git +pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_commit = master + +PACKAGES += ucol_nif +pkg_ucol_nif_name = ucol_nif +pkg_ucol_nif_description = ICU based collation Erlang module +pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif +pkg_ucol_nif_fetch = git +pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif +pkg_ucol_nif_commit = master + +PACKAGES += unicorn +pkg_unicorn_name = unicorn +pkg_unicorn_description = Generic configuration server +pkg_unicorn_homepage = https://github.com/shizzard/unicorn +pkg_unicorn_fetch = git +pkg_unicorn_repo = https://github.com/shizzard/unicorn +pkg_unicorn_commit = 0.3.0 + +PACKAGES += unsplit +pkg_unsplit_name = unsplit +pkg_unsplit_description = Resolves conflicts in Mnesia after network splits +pkg_unsplit_homepage = https://github.com/uwiger/unsplit +pkg_unsplit_fetch = git +pkg_unsplit_repo = https://github.com/uwiger/unsplit +pkg_unsplit_commit = master + +PACKAGES += uuid +pkg_uuid_name = uuid +pkg_uuid_description = Erlang UUID Implementation +pkg_uuid_homepage = https://github.com/okeuday/uuid +pkg_uuid_fetch = git +pkg_uuid_repo = https://github.com/okeuday/uuid +pkg_uuid_commit = v1.4.0 + +PACKAGES += ux +pkg_ux_name = ux +pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation) +pkg_ux_homepage = https://github.com/erlang-unicode/ux +pkg_ux_fetch = git +pkg_ux_repo = https://github.com/erlang-unicode/ux +pkg_ux_commit = master + +PACKAGES += vert +pkg_vert_name = vert +pkg_vert_description = Erlang binding to libvirt virtualization API +pkg_vert_homepage = https://github.com/msantos/erlang-libvirt +pkg_vert_fetch = git +pkg_vert_repo = https://github.com/msantos/erlang-libvirt +pkg_vert_commit = master + +PACKAGES += verx +pkg_verx_name = verx +pkg_verx_description = Erlang implementation of the libvirtd remote protocol +pkg_verx_homepage = https://github.com/msantos/verx +pkg_verx_fetch = git +pkg_verx_repo = https://github.com/msantos/verx +pkg_verx_commit = master + +PACKAGES += vmq_acl +pkg_vmq_acl_name = vmq_acl +pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_acl_homepage = https://verne.mq/ +pkg_vmq_acl_fetch = git +pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl +pkg_vmq_acl_commit = master + +PACKAGES += vmq_bridge +pkg_vmq_bridge_name = vmq_bridge +pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_bridge_homepage = https://verne.mq/ +pkg_vmq_bridge_fetch = git +pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge +pkg_vmq_bridge_commit = master + +PACKAGES += vmq_graphite +pkg_vmq_graphite_name = vmq_graphite +pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_graphite_homepage = https://verne.mq/ +pkg_vmq_graphite_fetch = git +pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite +pkg_vmq_graphite_commit = master + +PACKAGES += vmq_passwd +pkg_vmq_passwd_name = vmq_passwd +pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_passwd_homepage = https://verne.mq/ +pkg_vmq_passwd_fetch = git +pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd +pkg_vmq_passwd_commit = master + +PACKAGES += vmq_server +pkg_vmq_server_name = vmq_server +pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_server_homepage = https://verne.mq/ +pkg_vmq_server_fetch = git +pkg_vmq_server_repo = https://github.com/erlio/vmq_server +pkg_vmq_server_commit = master + +PACKAGES += vmq_snmp +pkg_vmq_snmp_name = vmq_snmp +pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_snmp_homepage = https://verne.mq/ +pkg_vmq_snmp_fetch = git +pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp +pkg_vmq_snmp_commit = master + +PACKAGES += vmq_systree +pkg_vmq_systree_name = vmq_systree +pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_systree_homepage = https://verne.mq/ +pkg_vmq_systree_fetch = git +pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree +pkg_vmq_systree_commit = master + +PACKAGES += vmstats +pkg_vmstats_name = vmstats +pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs. +pkg_vmstats_homepage = https://github.com/ferd/vmstats +pkg_vmstats_fetch = git +pkg_vmstats_repo = https://github.com/ferd/vmstats +pkg_vmstats_commit = master + +PACKAGES += walrus +pkg_walrus_name = walrus +pkg_walrus_description = Walrus - Mustache-like Templating +pkg_walrus_homepage = https://github.com/devinus/walrus +pkg_walrus_fetch = git +pkg_walrus_repo = https://github.com/devinus/walrus +pkg_walrus_commit = master + +PACKAGES += webmachine +pkg_webmachine_name = webmachine +pkg_webmachine_description = A REST-based system for building web applications. +pkg_webmachine_homepage = https://github.com/basho/webmachine +pkg_webmachine_fetch = git +pkg_webmachine_repo = https://github.com/basho/webmachine +pkg_webmachine_commit = master + +PACKAGES += websocket_client +pkg_websocket_client_name = websocket_client +pkg_websocket_client_description = Erlang websocket client (ws and wss supported) +pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client +pkg_websocket_client_fetch = git +pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client +pkg_websocket_client_commit = master + +PACKAGES += worker_pool +pkg_worker_pool_name = worker_pool +pkg_worker_pool_description = a simple erlang worker pool +pkg_worker_pool_homepage = https://github.com/inaka/worker_pool +pkg_worker_pool_fetch = git +pkg_worker_pool_repo = https://github.com/inaka/worker_pool +pkg_worker_pool_commit = 1.0.3 + +PACKAGES += wrangler +pkg_wrangler_name = wrangler +pkg_wrangler_description = Import of the Wrangler svn repository. +pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html +pkg_wrangler_fetch = git +pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler +pkg_wrangler_commit = master + +PACKAGES += wsock +pkg_wsock_name = wsock +pkg_wsock_description = Erlang library to build WebSocket clients and servers +pkg_wsock_homepage = https://github.com/madtrick/wsock +pkg_wsock_fetch = git +pkg_wsock_repo = https://github.com/madtrick/wsock +pkg_wsock_commit = master + +PACKAGES += xhttpc +pkg_xhttpc_name = xhttpc +pkg_xhttpc_description = Extensible HTTP Client for Erlang +pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc +pkg_xhttpc_fetch = git +pkg_xhttpc_repo = https://github.com/seriyps/xhttpc +pkg_xhttpc_commit = master + +PACKAGES += xref_runner +pkg_xref_runner_name = xref_runner +pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref) +pkg_xref_runner_homepage = https://github.com/inaka/xref_runner +pkg_xref_runner_fetch = git +pkg_xref_runner_repo = https://github.com/inaka/xref_runner +pkg_xref_runner_commit = 0.2.0 + +PACKAGES += yamerl +pkg_yamerl_name = yamerl +pkg_yamerl_description = YAML 1.2 parser in pure Erlang +pkg_yamerl_homepage = https://github.com/yakaz/yamerl +pkg_yamerl_fetch = git +pkg_yamerl_repo = https://github.com/yakaz/yamerl +pkg_yamerl_commit = master + +PACKAGES += yamler +pkg_yamler_name = yamler +pkg_yamler_description = libyaml-based yaml loader for Erlang +pkg_yamler_homepage = https://github.com/goertzenator/yamler +pkg_yamler_fetch = git +pkg_yamler_repo = https://github.com/goertzenator/yamler +pkg_yamler_commit = master + +PACKAGES += yaws +pkg_yaws_name = yaws +pkg_yaws_description = Yaws webserver +pkg_yaws_homepage = http://yaws.hyber.org +pkg_yaws_fetch = git +pkg_yaws_repo = https://github.com/klacke/yaws +pkg_yaws_commit = master + +PACKAGES += zab_engine +pkg_zab_engine_name = zab_engine +pkg_zab_engine_description = zab propotocol implement by erlang +pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_fetch = git +pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_commit = master + +PACKAGES += zeta +pkg_zeta_name = zeta +pkg_zeta_description = HTTP access log parser in Erlang +pkg_zeta_homepage = https://github.com/s1n4/zeta +pkg_zeta_fetch = git +pkg_zeta_repo = https://github.com/s1n4/zeta +pkg_zeta_commit = + +PACKAGES += zippers +pkg_zippers_name = zippers +pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers +pkg_zippers_homepage = https://github.com/ferd/zippers +pkg_zippers_fetch = git +pkg_zippers_repo = https://github.com/ferd/zippers +pkg_zippers_commit = master + +PACKAGES += zlists +pkg_zlists_name = zlists +pkg_zlists_description = Erlang lazy lists library. +pkg_zlists_homepage = https://github.com/vjache/erlang-zlists +pkg_zlists_fetch = git +pkg_zlists_repo = https://github.com/vjache/erlang-zlists +pkg_zlists_commit = master + +PACKAGES += zraft_lib +pkg_zraft_lib_name = zraft_lib +pkg_zraft_lib_description = Erlang raft consensus protocol implementation +pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_fetch = git +pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_commit = master + +PACKAGES += zucchini +pkg_zucchini_name = zucchini +pkg_zucchini_description = An Erlang INI parser +pkg_zucchini_homepage = https://github.com/devinus/zucchini +pkg_zucchini_fetch = git +pkg_zucchini_repo = https://github.com/devinus/zucchini +pkg_zucchini_commit = master + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: search + +define pkg_print + $(verbose) printf "%s\n" \ + $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \ + "App name: $(pkg_$(1)_name)" \ + "Description: $(pkg_$(1)_description)" \ + "Home page: $(pkg_$(1)_homepage)" \ + "Fetch with: $(pkg_$(1)_fetch)" \ + "Repository: $(pkg_$(1)_repo)" \ + "Commit: $(pkg_$(1)_commit)" \ + "" + +endef + +search: +ifdef q + $(foreach p,$(PACKAGES), \ + $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \ + $(call pkg_print,$(p)))) +else + $(foreach p,$(PACKAGES),$(call pkg_print,$(p))) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-deps + +# Configuration. + +ifdef OTP_DEPS +$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.) +endif + +IGNORE_DEPS ?= +export IGNORE_DEPS + +APPS_DIR ?= $(CURDIR)/apps +export APPS_DIR + +DEPS_DIR ?= $(CURDIR)/deps +export DEPS_DIR + +REBAR_DEPS_DIR = $(DEPS_DIR) +export REBAR_DEPS_DIR + +dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) +dep_repo = $(patsubst git://github.com/%,https://github.com/%, \ + $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))) +dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) + +ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) +ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) + +ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),) +ifeq ($(ERL_LIBS),) + ERL_LIBS = $(APPS_DIR):$(DEPS_DIR) +else + ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR) +endif +endif +export ERL_LIBS + +export NO_AUTOPATCH + +# Verbosity. + +dep_verbose_0 = @echo " DEP " $(1); +dep_verbose_2 = set -x; +dep_verbose = $(dep_verbose_$(V)) + +# Core targets. + +ifneq ($(SKIP_DEPS),) +deps:: +else +deps:: $(ALL_DEPS_DIRS) +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \ + if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \ + :; \ + else \ + echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ + if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ + $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \ + else \ + echo "Error: No Makefile to build dependency $$dep."; \ + exit 2; \ + fi \ + fi \ + done +endif + +# Deps related targets. + +# @todo rename GNUmakefile and makefile into Makefile first, if they exist +# While Makefile file could be GNUmakefile or makefile, +# in practice only Makefile is needed so far. +define dep_autopatch + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \ + $(call dep_autopatch2,$(1)); \ + else \ + if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + $(call dep_autopatch_erlang_mk,$(1)); \ + else \ + $(call erlang,$(call dep_autopatch_app.erl,$(1))); \ + fi \ + fi \ + else \ + if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ + $(call dep_autopatch_noop,$(1)); \ + else \ + $(call dep_autopatch2,$(1)); \ + fi \ + fi +endef + +define dep_autopatch2 + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + if [ -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \ + $(call dep_autopatch_fetch_rebar); \ + $(call dep_autopatch_rebar,$(1)); \ + else \ + $(call dep_autopatch_gen,$(1)); \ + fi +endef + +define dep_autopatch_noop + printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile +endef + +# Overwrite erlang.mk with the current file by default. +ifeq ($(NO_AUTOPATCH_ERLANG_MK),) +define dep_autopatch_erlang_mk + echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \ + > $(DEPS_DIR)/$1/erlang.mk +endef +else +define dep_autopatch_erlang_mk + : +endef +endif + +define dep_autopatch_gen + printf "%s\n" \ + "ERLC_OPTS = +debug_info" \ + "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile +endef + +define dep_autopatch_fetch_rebar + mkdir -p $(ERLANG_MK_TMP); \ + if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ + git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \ + cd $(ERLANG_MK_TMP)/rebar; \ + git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \ + $(MAKE); \ + cd -; \ + fi +endef + +define dep_autopatch_rebar + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \ + fi; \ + $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \ + rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app +endef + +define dep_autopatch_rebar.erl + application:load(rebar), + application:set_env(rebar, log_level, debug), + Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of + {ok, Conf0} -> Conf0; + _ -> [] + end, + {Conf, OsEnv} = fun() -> + case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of + false -> {Conf1, []}; + true -> + Bindings0 = erl_eval:new_bindings(), + Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0), + Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1), + Before = os:getenv(), + {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings), + {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)} + end + end(), + Write = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append]) + end, + Escape = fun (Text) -> + re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}]) + end, + Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package " + "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"), + Write("C_SRC_DIR = /path/do/not/exist\n"), + Write("C_SRC_TYPE = rebar\n"), + Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"), + Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]), + fun() -> + Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), + case lists:keyfind(erl_opts, 1, Conf) of + false -> ok; + {_, ErlOpts} -> + lists:foreach(fun + ({d, D}) -> + Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + ({i, I}) -> + Write(["ERLC_OPTS += -I ", I, "\n"]); + ({platform_define, Regex, D}) -> + case rebar_utils:is_arch(Regex) of + true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + false -> ok + end; + ({parse_transform, PT}) -> + Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n"); + (_) -> ok + end, ErlOpts) + end, + Write("\n") + end(), + fun() -> + File = case lists:keyfind(deps, 1, Conf) of + false -> []; + {_, Deps} -> + [begin case case Dep of + {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}}; + {N, S} when is_tuple(S) -> {N, S}; + {N, _, S} -> {N, S}; + {N, _, S, _} -> {N, S}; + _ -> false + end of + false -> ok; + {Name, Source} -> + {Method, Repo, Commit} = case Source of + {hex, V} -> {hex, V, undefined}; + {git, R} -> {git, R, master}; + {M, R, {branch, C}} -> {M, R, C}; + {M, R, {ref, C}} -> {M, R, C}; + {M, R, {tag, C}} -> {M, R, C}; + {M, R, C} -> {M, R, C} + end, + Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit])) + end end || Dep <- Deps] + end + end(), + fun() -> + case lists:keyfind(erl_first_files, 1, Conf) of + false -> ok; + {_, Files} -> + Names = [[" ", case lists:reverse(F) of + "lre." ++ Elif -> lists:reverse(Elif); + Elif -> lists:reverse(Elif) + end] || "src/" ++ F <- Files], + Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) + end + end(), + FindFirst = fun(F, Fd) -> + case io:parse_erl_form(Fd, undefined) of + {ok, {attribute, _, compile, {parse_transform, PT}}, _} -> + [PT, F(F, Fd)]; + {ok, {attribute, _, compile, CompileOpts}, _} when is_list(CompileOpts) -> + case proplists:get_value(parse_transform, CompileOpts) of + undefined -> [F(F, Fd)]; + PT -> [PT, F(F, Fd)] + end; + {ok, {attribute, _, include, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end + end; + {ok, {attribute, _, include_lib, "$(1)/include/" ++ Hrl}, _} -> + {ok, HrlFd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]), + [F(F, HrlFd), F(F, Fd)]; + {ok, {attribute, _, include_lib, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {ok, {attribute, _, import, {Imp, _}}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(Imp) ++ ".erl", [read]) of + {ok, ImpFd} -> [Imp, F(F, ImpFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {eof, _} -> + file:close(Fd), + []; + _ -> + F(F, Fd) + end + end, + fun() -> + ErlFiles = filelib:wildcard("$(call core_native_path,$(DEPS_DIR)/$1/src/)*.erl"), + First0 = lists:usort(lists:flatten([begin + {ok, Fd} = file:open(F, [read]), + FindFirst(FindFirst, Fd) + end || F <- ErlFiles])), + First = lists:flatten([begin + {ok, Fd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", [read]), + FindFirst(FindFirst, Fd) + end || M <- First0, lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)]) ++ First0, + Write(["COMPILE_FIRST +=", [[" ", atom_to_list(M)] || M <- First, + lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)], "\n"]) + end(), + Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"), + Write("\npreprocess::\n"), + Write("\npre-deps::\n"), + Write("\npre-app::\n"), + PatchHook = fun(Cmd) -> + case Cmd of + "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + _ -> Escape(Cmd) + end + end, + fun() -> + case lists:keyfind(pre_hooks, 1, Conf) of + false -> ok; + {_, Hooks} -> + [case H of + {'get-deps', Cmd} -> + Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n"); + {compile, Cmd} -> + Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + {Regex, compile, Cmd} -> + case rebar_utils:is_arch(Regex) of + true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + false -> ok + end; + _ -> ok + end || H <- Hooks] + end + end(), + ShellToMk = fun(V) -> + re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]), + "-Werror\\\\b", "", [{return, list}, global]) + end, + PortSpecs = fun() -> + case lists:keyfind(port_specs, 1, Conf) of + false -> + case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of + false -> []; + true -> + [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"), + proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}] + end; + {_, Specs} -> + lists:flatten([case S of + {Output, Input} -> {ShellToMk(Output), Input, []}; + {Regex, Output, Input} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, []}; + false -> [] + end; + {Regex, Output, Input, [{env, Env}]} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, Env}; + false -> [] + end + end || S <- Specs]) + end + end(), + PortSpecWrite = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append]) + end, + case PortSpecs of + [] -> ok; + _ -> + Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"), + PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I ~s/erts-~s/include -I ~s\n", + [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])), + PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L ~s -lerl_interface -lei\n", + [code:lib_dir(erl_interface, lib)])), + [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv], + FilterEnv = fun(Env) -> + lists:flatten([case E of + {_, _} -> E; + {Regex, K, V} -> + case rebar_utils:is_arch(Regex) of + true -> {K, V}; + false -> [] + end + end || E <- Env]) + end, + MergeEnv = fun(Env) -> + lists:foldl(fun ({K, V}, Acc) -> + case lists:keyfind(K, 1, Acc) of + false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc]; + {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc] + end + end, [], Env) + end, + PortEnv = case lists:keyfind(port_env, 1, Conf) of + false -> []; + {_, PortEnv0} -> FilterEnv(PortEnv0) + end, + PortSpec = fun ({Output, Input0, Env}) -> + filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output), + Input = [[" ", I] || I <- Input0], + PortSpecWrite([ + [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))], + case $(PLATFORM) of + darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress"; + _ -> "" + end, + "\n\nall:: ", Output, "\n\n", + "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))], + Output, ": $$\(foreach ext,.c .C .cc .cpp,", + "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n", + "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)", + case filename:extension(Output) of + [] -> "\n"; + _ -> " -shared\n" + end]) + end, + [PortSpec(S) || S <- PortSpecs] + end, + Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"), + RunPlugin = fun(Plugin, Step) -> + case erlang:function_exported(Plugin, Step, 2) of + false -> ok; + true -> + c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"), + Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(), + dict:store(base_dir, "", dict:new())}, undefined), + io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret]) + end + end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins} -> + [begin + case lists:keyfind(deps, 1, Conf) of + false -> ok; + {_, Deps} -> + case lists:keyfind(P, 1, Deps) of + false -> ok; + _ -> + Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P), + io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]), + io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]), + code:add_patha(Path ++ "/ebin") + end + end + end || P <- Plugins], + [case code:load_file(P) of + {module, P} -> ok; + _ -> + case lists:keyfind(plugin_dir, 1, Conf) of + false -> ok; + {_, PluginsDir} -> + ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl", + {ok, P, Bin} = compile:file(ErlFile, [binary]), + {module, P} = code:load_binary(P, ErlFile, Bin) + end + end || P <- Plugins], + [RunPlugin(P, preprocess) || P <- Plugins], + [RunPlugin(P, pre_compile) || P <- Plugins], + [RunPlugin(P, compile) || P <- Plugins] + end + end(), + halt() +endef + +define dep_autopatch_app.erl + UpdateModules = fun(App) -> + case filelib:is_regular(App) of + false -> ok; + true -> + {ok, [{application, '$(1)', L0}]} = file:consult(App), + Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true, + fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []), + L = lists:keystore(modules, 1, L0, {modules, Mods}), + ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}])) + end + end, + UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"), + halt() +endef + +define dep_autopatch_appsrc.erl + AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", + AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end, + case filelib:is_regular(AppSrcIn) of + false -> ok; + true -> + {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), + L1 = lists:keystore(modules, 1, L0, {modules, []}), + L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end, + L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end, + ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])), + case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end + end, + halt() +endef + +define dep_fetch_git + git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1)); +endef + +define dep_fetch_git-submodule + git submodule update --init -- $(DEPS_DIR)/$1; +endef + +define dep_fetch_hg + hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1)); +endef + +define dep_fetch_svn + svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_cp + cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_hex.erl + ssl:start(), + inets:start(), + {ok, {{_, 200, _}, _, Body}} = httpc:request(get, + {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []}, + [], [{body_format, binary}]), + {ok, Files} = erl_tar:extract({binary, Body}, [memory]), + {_, Source} = lists:keyfind("contents.tar.gz", 1, Files), + ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]), + halt() +endef + +# Hex only has a package version. No need to look in the Erlang.mk packages. +define dep_fetch_hex + $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1)))))); +endef + +define dep_fetch_fail + echo "Error: Unknown or invalid dependency: $(1)." >&2; \ + exit 78; +endef + +# Kept for compatibility purposes with older Erlang.mk configuration. +define dep_fetch_legacy + $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \ + git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \ + cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master); +endef + +define dep_fetch + $(if $(dep_$(1)), \ + $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \ + $(word 1,$(dep_$(1))), \ + $(if $(IS_DEP),legacy,fail)), \ + $(if $(filter $(1),$(PACKAGES)), \ + $(pkg_$(1)_fetch), \ + fail)) +endef + +define dep_target +$(DEPS_DIR)/$(call dep_name,$1): + $(eval DEP_NAME := $(call dep_name,$1)) + $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) + $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ + echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \ + exit 17; \ + fi + $(verbose) mkdir -p $(DEPS_DIR) + $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$1)),$1) + $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure.ac -o -f $(DEPS_DIR)/$(DEP_NAME)/configure.in ]; then \ + echo " AUTO " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && autoreconf -Wall -vif -I m4; \ + fi + - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \ + echo " CONF " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \ + fi +ifeq ($(filter $(1),$(NO_AUTOPATCH)),) + $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi; \ + if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \ + echo " PATCH Downloading rabbitmq-server"; \ + git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \ + fi; \ + ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \ + elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi \ + else \ + $$(call dep_autopatch,$(DEP_NAME)) \ + fi +endif +endef + +$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep)))) + +ifndef IS_APP +clean:: clean-apps + +clean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \ + done + +distclean:: distclean-apps + +distclean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \ + done +endif + +ifndef SKIP_DEPS +distclean:: distclean-deps + +distclean-deps: + $(gen_verbose) rm -rf $(DEPS_DIR) +endif + +# Forward-declare variables used in core/deps-tools.mk. This is required +# in case plugins use them. + +ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/list-deps.log +ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/list-doc-deps.log +ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/list-rel-deps.log +ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/list-test-deps.log +ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/list-shell-deps.log + +# External plugins. + +DEP_PLUGINS ?= + +define core_dep_plugin +-include $(DEPS_DIR)/$(1) + +$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; +endef + +$(foreach p,$(DEP_PLUGINS),\ + $(eval $(if $(findstring /,$p),\ + $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ + $(call core_dep_plugin,$p/plugins.mk,$p)))) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Configuration. + +DTL_FULL_PATH ?= +DTL_PATH ?= templates/ +DTL_SUFFIX ?= _dtl + +# Verbosity. + +dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); +dtl_verbose = $(dtl_verbose_$(V)) + +# Core targets. + +define erlydtl_compile.erl + [begin + Module0 = case "$(strip $(DTL_FULL_PATH))" of + "" -> + filename:basename(F, ".dtl"); + _ -> + "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"), + re:replace(F2, "/", "_", [{return, list}, global]) + end, + Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), + case erlydtl:compile(F, Module, [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of + ok -> ok; + {ok, _} -> ok + end + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) + +DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl)) + +ifdef DTL_FULL_PATH +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%)))) +else +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES)))) +endif + +ifneq ($(words $(DTL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(DTL_FILES); \ + fi + @touch $@ + +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl +endif + +ebin/$(PROJECT).app:: $(DTL_FILES) + $(if $(strip $?),\ + $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?,-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Verbosity. + +proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); +proto_verbose = $(proto_verbose_$(V)) + +# Core targets. + +define compile_proto + $(verbose) mkdir -p ebin/ include/ + $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1))) + $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl + $(verbose) rm ebin/*.erl +endef + +define compile_proto.erl + [begin + Dir = filename:dirname(filename:dirname(F)), + protobuffs_compile:generate_source(F, + [{output_include_dir, Dir ++ "/include"}, + {output_src_dir, Dir ++ "/ebin"}]) + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) +ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto)) + $(if $(strip $?),$(call compile_proto,$?)) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-app + +# Configuration. + +ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \ + +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec +COMPILE_FIRST ?= +COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST))) +ERLC_EXCLUDE ?= +ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE))) + +ERLC_MIB_OPTS ?= +COMPILE_MIB_FIRST ?= +COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST))) + +# Verbosity. + +app_verbose_0 = @echo " APP " $(PROJECT); +app_verbose_2 = set -x; +app_verbose = $(app_verbose_$(V)) + +appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src; +appsrc_verbose_2 = set -x; +appsrc_verbose = $(appsrc_verbose_$(V)) + +makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d; +makedep_verbose_2 = set -x; +makedep_verbose = $(makedep_verbose_$(V)) + +erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\ + $(filter %.erl %.core,$(?F))); +erlc_verbose_2 = set -x; +erlc_verbose = $(erlc_verbose_$(V)) + +xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F)); +xyrl_verbose_2 = set -x; +xyrl_verbose = $(xyrl_verbose_$(V)) + +asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F)); +asn1_verbose_2 = set -x; +asn1_verbose = $(asn1_verbose_$(V)) + +mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F)); +mib_verbose_2 = set -x; +mib_verbose = $(mib_verbose_$(V)) + +ifneq ($(wildcard src/),) + +# Targets. + +ifeq ($(wildcard ebin/test),) +app:: deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +else +app:: clean deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +endif + +ifeq ($(wildcard src/$(PROJECT)_app.erl),) +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, []}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]} +]}. +endef +else +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}, + {mod, {$(PROJECT)_app, []}} +]}. +endef +endif + +app-build: ebin/$(PROJECT).app + $(verbose) : + +# Source files. + +ERL_FILES = $(sort $(call core_find,src/,*.erl)) +CORE_FILES = $(sort $(call core_find,src/,*.core)) + +# ASN.1 files. + +ifneq ($(wildcard asn1/),) +ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1)) +ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +define compile_asn1 + $(verbose) mkdir -p include/ + $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1) + $(verbose) mv asn1/*.erl src/ + $(verbose) mv asn1/*.hrl include/ + $(verbose) mv asn1/*.asn1db include/ +endef + +$(PROJECT).d:: $(ASN1_FILES) + $(if $(strip $?),$(call compile_asn1,$?)) +endif + +# SNMP MIB files. + +ifneq ($(wildcard mibs/),) +MIB_FILES = $(sort $(call core_find,mibs/,*.mib)) + +$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES) + $(verbose) mkdir -p include/ priv/mibs/ + $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $? + $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?))) +endif + +# Leex and Yecc files. + +XRL_FILES = $(sort $(call core_find,src/,*.xrl)) +XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES)))) +ERL_FILES += $(XRL_ERL_FILES) + +YRL_FILES = $(sort $(call core_find,src/,*.yrl)) +YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES)))) +ERL_FILES += $(YRL_ERL_FILES) + +$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES) + $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?) + +# Erlang and Core Erlang files. + +define makedep.erl + ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")), + Modules = [{filename:basename(F, ".erl"), F} || F <- ErlFiles], + Add = fun (Dep, Acc) -> + case lists:keyfind(atom_to_list(Dep), 1, Modules) of + {_, DepFile} -> [DepFile|Acc]; + false -> Acc + end + end, + AddHd = fun (Dep, Acc) -> + case {Dep, lists:keymember(Dep, 2, Modules)} of + {"src/" ++ _, false} -> [Dep|Acc]; + {"include/" ++ _, false} -> [Dep|Acc]; + _ -> Acc + end + end, + CompileFirst = fun (Deps) -> + First0 = [case filename:extension(D) of + ".erl" -> filename:basename(D, ".erl"); + _ -> [] + end || D <- Deps], + case lists:usort(First0) of + [] -> []; + [[]] -> []; + First -> ["COMPILE_FIRST +=", [[" ", F] || F <- First], "\n"] + end + end, + Depend = [begin + case epp:parse_file(F, ["include/"], []) of + {ok, Forms} -> + Deps = lists:usort(lists:foldl(fun + ({attribute, _, behavior, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, behaviour, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, compile, {parse_transform, Dep}}, Acc) -> Add(Dep, Acc); + ({attribute, _, file, {Dep, _}}, Acc) -> AddHd(Dep, Acc); + (_, Acc) -> Acc + end, [], Forms)), + case Deps of + [] -> ""; + _ -> [F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n", CompileFirst(Deps)] + end; + {error, enoent} -> + [] + end + end || F <- ErlFiles], + ok = file:write_file("$(1)", Depend), + halt() +endef + +ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),) +$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST) + $(makedep_verbose) $(call erlang,$(call makedep.erl,$@)) +endif + +ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \ + touch -c $(PROJECT).d; \ + fi + @touch $@ + +$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change +endif + +-include $(PROJECT).d + +ebin/$(PROJECT).app:: ebin/ + +ebin/: + $(verbose) mkdir -p ebin/ + +define compile_erl + $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \ + -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1)) +endef + +ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src) + $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?)) + $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE))) + $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true)) + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES))))))) +ifeq ($(wildcard src/$(PROJECT).app.src),) + $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \ + > ebin/$(PROJECT).app +else + $(verbose) if [ -z "$$(grep -E '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \ + echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \ + exit 1; \ + fi + $(appsrc_verbose) cat src/$(PROJECT).app.src \ + | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \ + | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(GITDESCRIBE)\"}/" \ + > ebin/$(PROJECT).app +endif + +clean:: clean-app + +clean-app: + $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \ + $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \ + $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +endif + +# Copyright (c) 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: docs-deps + +# Configuration. + +ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS)) + +# Targets. + +$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +doc-deps: +else +doc-deps: $(ALL_DOC_DEPS_DIRS) + $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rel-deps + +# Configuration. + +ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS)) + +# Targets. + +$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +rel-deps: +else +rel-deps: $(ALL_REL_DEPS_DIRS) + $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: test-deps test-dir test-build clean-test-dir + +# Configuration. + +TEST_DIR ?= $(CURDIR)/test + +ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS)) + +TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard +TEST_ERLC_OPTS += -DTEST=1 + +# Targets. + +$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +test-deps: +else +test-deps: $(ALL_TEST_DEPS_DIRS) + $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done +endif + +ifneq ($(wildcard $(TEST_DIR)),) +test-dir: + $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \ + $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/ +endif + +ifeq ($(wildcard ebin/test),) +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: clean deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" + $(gen_verbose) touch ebin/test +else +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" +endif + +clean:: clean-test-dir + +clean-test-dir: +ifneq ($(wildcard $(TEST_DIR)/*.beam),) + $(gen_verbose) rm -f $(TEST_DIR)/*.beam +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rebar.config + +# We strip out -Werror because we don't want to fail due to +# warnings when used as a dependency. + +compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/') + +define compat_convert_erlc_opts +$(if $(filter-out -Werror,$1),\ + $(if $(findstring +,$1),\ + $(shell echo $1 | cut -b 2-))) +endef + +define compat_rebar_config +{deps, [$(call comma_list,$(foreach d,$(DEPS),\ + {$(call dep_name,$d),".*",{git,"$(call dep_repo,$d)","$(call dep_commit,$d)"}}))]}. +{erl_opts, [$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$(ERLC_OPTS)),\ + $(call compat_convert_erlc_opts,$o)))]}. +endef + +$(eval _compat_rebar_config = $$(compat_rebar_config)) +$(eval export _compat_rebar_config) + +rebar.config: + $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc + +MAN_INSTALL_PATH ?= /usr/local/share/man +MAN_SECTIONS ?= 3 7 + +docs:: asciidoc + +asciidoc: distclean-asciidoc doc-deps asciidoc-guide asciidoc-manual + +ifeq ($(wildcard doc/src/guide/book.asciidoc),) +asciidoc-guide: +else +asciidoc-guide: + a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf + a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ +endif + +ifeq ($(wildcard doc/src/manual/*.asciidoc),) +asciidoc-manual: +else +asciidoc-manual: + for f in doc/src/manual/*.asciidoc ; do \ + a2x -v -f manpage $$f ; \ + done + for s in $(MAN_SECTIONS); do \ + mkdir -p doc/man$$s/ ; \ + mv doc/src/manual/*.$$s doc/man$$s/ ; \ + gzip doc/man$$s/*.$$s ; \ + done + +install-docs:: install-asciidoc + +install-asciidoc: asciidoc-manual + for s in $(MAN_SECTIONS); do \ + mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \ + install -g 0 -o 0 -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \ + done +endif + +distclean:: distclean-asciidoc + +distclean-asciidoc: + $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/ + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Bootstrap targets:" \ + " bootstrap Generate a skeleton of an OTP application" \ + " bootstrap-lib Generate a skeleton of an OTP library" \ + " bootstrap-rel Generate the files needed to build a release" \ + " new-app n=NAME Create a new local OTP application NAME" \ + " new-lib n=NAME Create a new local OTP library NAME" \ + " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ + " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \ + " list-templates List available templates" + +# Bootstrap templates. + +define bs_appsrc +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]}, + {mod, {$p_app, []}}, + {env, []} +]}. +endef + +define bs_appsrc_lib +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. +endef + +ifdef SP +define bs_Makefile +PROJECT = $p +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.0.1 + +# Whitespace to be used when creating files from templates. +SP = $(SP) + +include erlang.mk +endef +else +define bs_Makefile +PROJECT = $p +include erlang.mk +endef +endif + +define bs_apps_Makefile +PROJECT = $p +include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk +endef + +define bs_app +-module($p_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). + +start(_Type, _Args) -> + $p_sup:start_link(). + +stop(_State) -> + ok. +endef + +define bs_relx_config +{release, {$p_release, "1"}, [$p]}. +{extended_start_script, true}. +{sys_config, "rel/sys.config"}. +{vm_args, "rel/vm.args"}. +endef + +define bs_sys_config +[ +]. +endef + +define bs_vm_args +-name $p@127.0.0.1 +-setcookie $p +-heart +endef + +# Normal templates. + +define tpl_supervisor +-module($(n)). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +init([]) -> + Procs = [], + {ok, {{one_for_one, 1, 5}, Procs}}. +endef + +define tpl_gen_server +-module($(n)). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +%% gen_server. + +init([]) -> + {ok, #state{}}. + +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +endef + +define tpl_cowboy_http +-module($(n)). +-behaviour(cowboy_http_handler). + +-export([init/3]). +-export([handle/2]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {ok, Req, #state{}}. + +handle(Req, State=#state{}) -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_gen_fsm +-module($(n)). +-behaviour(gen_fsm). + +%% API. +-export([start_link/0]). + +%% gen_fsm. +-export([init/1]). +-export([state_name/2]). +-export([handle_event/3]). +-export([state_name/3]). +-export([handle_sync_event/4]). +-export([handle_info/3]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_fsm:start_link(?MODULE, [], []). + +%% gen_fsm. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_Event, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_Event, StateName, StateData) -> + {next_state, StateName, StateData}. + +state_name(_Event, _From, StateData) -> + {reply, ignored, state_name, StateData}. + +handle_sync_event(_Event, _From, StateName, StateData) -> + {reply, ignored, StateName, StateData}. + +handle_info(_Info, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + +define tpl_cowboy_loop +-module($(n)). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {loop, Req, #state{}, 5000, hibernate}. + +info(_Info, Req, State) -> + {loop, Req, State, hibernate}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_cowboy_rest +-module($(n)). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_html/2]). + +init(_, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. + +get_html(Req, State) -> + {<<"This is REST!">>, Req, State}. +endef + +define tpl_cowboy_ws +-module($(n)). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +-record(state, { +}). + +init(_, _, _) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, #state{}}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_ranch_protocol +-module($(n)). +-behaviour(ranch_protocol). + +-export([start_link/4]). +-export([init/4]). + +-type opts() :: []. +-export_type([opts/0]). + +-record(state, { + socket :: inet:socket(), + transport :: module() +}). + +start_link(Ref, Socket, Transport, Opts) -> + Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]), + {ok, Pid}. + +-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Ref, Socket, Transport, _Opts) -> + ok = ranch:accept_ack(Ref), + loop(#state{socket=Socket, transport=Transport}). + +loop(State) -> + loop(State). +endef + +# Plugin-specific targets. + +define render_template + $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) +endef + +ifndef WS +ifdef SP +WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a)) +else +WS = $(tab) +endif +endif + +bootstrap: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(eval n := $(PROJECT)_sup) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc,src/$(PROJECT).app.src) +endif + $(call render_template,bs_app,src/$(PROJECT)_app.erl) + $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl) + +bootstrap-lib: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src) +endif + +bootstrap-rel: +ifneq ($(wildcard relx.config),) + $(error Error: relx.config already exists) +endif +ifneq ($(wildcard rel/),) + $(error Error: rel/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_relx_config,relx.config) + $(verbose) mkdir rel/ + $(call render_template,bs_sys_config,rel/sys.config) + $(call render_template,bs_vm_args,rel/vm.args) + +new-app: +ifndef in + $(error Usage: $(MAKE) new-app in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(eval n := $(in)_sup) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) +endif + $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) + $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) + +new-lib: +ifndef in + $(error Usage: $(MAKE) new-lib in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) +endif + +new: +ifeq ($(wildcard src/)$(in),) + $(error Error: src/ directory does not exist) +endif +ifndef t + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifndef tpl_$(t) + $(error Unknown template) +endif +ifndef n + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in= +else + $(call render_template,tpl_$(t),src/$(n).erl) +endif + +list-templates: + $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-c_src distclean-c_src-env + +# Configuration. + +C_SRC_DIR ?= $(CURDIR)/c_src +C_SRC_ENV ?= $(C_SRC_DIR)/env.mk +C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT).so +C_SRC_TYPE ?= shared + +# System type and C compiler/flags. + +ifeq ($(PLATFORM),darwin) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall + LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress +else ifeq ($(PLATFORM),freebsd) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +else ifeq ($(PLATFORM),linux) + CC ?= gcc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +endif + +CFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) +CXXFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) + +LDLIBS += -L $(ERL_INTERFACE_LIB_DIR) -lerl_interface -lei + +# Verbosity. + +c_verbose_0 = @echo " C " $(?F); +c_verbose = $(c_verbose_$(V)) + +cpp_verbose_0 = @echo " CPP " $(?F); +cpp_verbose = $(cpp_verbose_$(V)) + +link_verbose_0 = @echo " LD " $(@F); +link_verbose = $(link_verbose_$(V)) + +# Targets. + +ifeq ($(wildcard $(C_SRC_DIR)),) +else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),) +app:: app-c_src + +test-build:: app-c_src + +app-c_src: + $(MAKE) -C $(C_SRC_DIR) + +clean:: + $(MAKE) -C $(C_SRC_DIR) clean + +else + +ifeq ($(SOURCES),) +SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat)))) +endif +OBJECTS = $(addsuffix .o, $(basename $(SOURCES))) + +COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c +COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c + +app:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +$(C_SRC_OUTPUT): $(OBJECTS) + $(verbose) mkdir -p priv/ + $(link_verbose) $(CC) $(OBJECTS) \ + $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \ + -o $(C_SRC_OUTPUT) + +%.o: %.c + $(COMPILE_C) $(OUTPUT_OPTION) $< + +%.o: %.cc + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.C + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.cpp + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +clean:: clean-c_src + +clean-c_src: + $(gen_verbose) rm -f $(C_SRC_OUTPUT) $(OBJECTS) + +endif + +ifneq ($(wildcard $(C_SRC_DIR)),) +$(C_SRC_ENV): + $(verbose) $(ERL) -eval "file:write_file(\"$(C_SRC_ENV)\", \ + io_lib:format( \ + \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \ + \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \ + \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \ + [code:root_dir(), erlang:system_info(version), \ + code:lib_dir(erl_interface, include), \ + code:lib_dir(erl_interface, lib)])), \ + halt()." + +distclean:: distclean-c_src-env + +distclean-c_src-env: + $(gen_verbose) rm -f $(C_SRC_ENV) + +-include $(C_SRC_ENV) +endif + +# Templates. + +define bs_c_nif +#include "erl_nif.h" + +static int loads = 0; + +static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info) +{ + /* Initialize private data. */ + *priv_data = NULL; + + loads++; + + return 0; +} + +static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info) +{ + /* Convert the private data to the new version. */ + *priv_data = *old_priv_data; + + loads++; + + return 0; +} + +static void unload(ErlNifEnv* env, void* priv_data) +{ + if (loads == 1) { + /* Destroy the private data. */ + } + + loads--; +} + +static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{ + if (enif_is_atom(env, argv[0])) { + return enif_make_tuple2(env, + enif_make_atom(env, "hello"), + argv[0]); + } + + return enif_make_tuple2(env, + enif_make_atom(env, "error"), + enif_make_atom(env, "badarg")); +} + +static ErlNifFunc nif_funcs[] = { + {"hello", 1, hello} +}; + +ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload) +endef + +define bs_erl_nif +-module($n). + +-export([hello/1]). + +-on_load(on_load/0). +on_load() -> + PrivDir = case code:priv_dir(?MODULE) of + {error, _} -> + AppPath = filename:dirname(filename:dirname(code:which(?MODULE))), + filename:join(AppPath, "priv"); + Path -> + Path + end, + erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0). + +hello(_) -> + erlang:nif_error({not_loaded, ?MODULE}). +endef + +new-nif: +ifneq ($(wildcard $(C_SRC_DIR)/$n.c),) + $(error Error: $(C_SRC_DIR)/$n.c already exists) +endif +ifneq ($(wildcard src/$n.erl),) + $(error Error: src/$n.erl already exists) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in= +else + $(verbose) mkdir -p $(C_SRC_DIR) src/ + $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c) + $(call render_template,bs_erl_nif,src/$n.erl) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ci ci-setup distclean-kerl + +KERL ?= $(CURDIR)/kerl +export KERL + +KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl + +OTP_GIT ?= https://github.com/erlang/otp + +CI_INSTALL_DIR ?= $(HOME)/erlang +CI_OTP ?= + +ifeq ($(strip $(CI_OTP)),) +ci:: +else +ci:: $(addprefix ci-,$(CI_OTP)) + +ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP)) + +ci-setup:: + +ci_verbose_0 = @echo " CI " $(1); +ci_verbose = $(ci_verbose_$(V)) + +define ci_target +ci-$(1): $(CI_INSTALL_DIR)/$(1) + $(ci_verbose) \ + PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \ + CI_OTP_RELEASE="$(1)" \ + CT_OPTS="-label $(1)" \ + $(MAKE) clean ci-setup tests +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp)))) + +define ci_otp_target +ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),) +$(CI_INSTALL_DIR)/$(1): $(KERL) + $(KERL) build git $(OTP_GIT) $(1) $(1) + $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1) +endif +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp)))) + +$(KERL): + $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL)) + $(verbose) chmod +x $(KERL) + +help:: + $(verbose) printf "%s\n" "" \ + "Continuous Integration targets:" \ + " ci Run '$(MAKE) tests' on all configured Erlang versions." \ + "" \ + "The CI_OTP variable must be defined with the Erlang versions" \ + "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3" + +distclean:: distclean-kerl + +distclean-kerl: + $(gen_verbose) rm -rf $(KERL) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ct distclean-ct + +# Configuration. + +CT_OPTS ?= +ifneq ($(wildcard $(TEST_DIR)),) + CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl)))) +else + CT_SUITES ?= +endif + +# Core targets. + +tests:: ct + +distclean:: distclean-ct + +help:: + $(verbose) printf "%s\n" "" \ + "Common_test targets:" \ + " ct Run all the common_test suites for this project" \ + "" \ + "All your common_test suites have their associated targets." \ + "A suite named http_SUITE can be ran using the ct-http target." + +# Plugin-specific targets. + +CT_RUN = ct_run \ + -no_auto_compile \ + -noinput \ + -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(TEST_DIR) \ + -dir $(TEST_DIR) \ + -logdir $(CURDIR)/logs + +ifeq ($(CT_SUITES),) +ct: +else +ct: test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) +endif + +define ct_suite_target +ct-$(1): test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(1)) $(CT_OPTS) +endef + +$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) + +distclean-ct: + $(gen_verbose) rm -rf $(CURDIR)/logs/ + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: plt distclean-plt dialyze + +# Configuration. + +DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt +export DIALYZER_PLT + +PLT_APPS ?= +DIALYZER_DIRS ?= --src -r src +DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions \ + -Wunmatched_returns # -Wunderspecs + +# Core targets. + +check:: dialyze + +distclean:: distclean-plt + +help:: + $(verbose) printf "%s\n" "" \ + "Dialyzer targets:" \ + " plt Build a PLT file for this project" \ + " dialyze Analyze the project using Dialyzer" + +# Plugin-specific targets. + +$(DIALYZER_PLT): deps app + $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS) + +plt: $(DIALYZER_PLT) + +distclean-plt: + $(gen_verbose) rm -f $(DIALYZER_PLT) + +ifneq ($(wildcard $(DIALYZER_PLT)),) +dialyze: +else +dialyze: $(DIALYZER_PLT) +endif + $(verbose) dialyzer --no_native $(DIALYZER_DIRS) $(DIALYZER_OPTS) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-edoc edoc + +# Configuration. + +EDOC_OPTS ?= + +# Core targets. + +docs:: distclean-edoc edoc + +distclean:: distclean-edoc + +# Plugin-specific targets. + +edoc: doc-deps + $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().' + +distclean-edoc: + $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: elvis distclean-elvis + +# Configuration. + +ELVIS_CONFIG ?= $(CURDIR)/elvis.config + +ELVIS ?= $(CURDIR)/elvis +export ELVIS + +ELVIS_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis +ELVIS_CONFIG_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis.config +ELVIS_OPTS ?= + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Elvis targets:" \ + " elvis Run Elvis using the local elvis.config or download the default otherwise" + +distclean:: distclean-elvis + +# Plugin-specific targets. + +$(ELVIS): + $(gen_verbose) $(call core_http_get,$(ELVIS),$(ELVIS_URL)) + $(verbose) chmod +x $(ELVIS) + +$(ELVIS_CONFIG): + $(verbose) $(call core_http_get,$(ELVIS_CONFIG),$(ELVIS_CONFIG_URL)) + +elvis: $(ELVIS) $(ELVIS_CONFIG) + $(verbose) $(ELVIS) rock -c $(ELVIS_CONFIG) $(ELVIS_OPTS) + +distclean-elvis: + $(gen_verbose) rm -rf $(ELVIS) + +# Copyright (c) 2014 Dave Cottlehuber +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-escript escript + +# Configuration. + +ESCRIPT_NAME ?= $(PROJECT) +ESCRIPT_COMMENT ?= This is an -*- erlang -*- file + +ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*" +ESCRIPT_SYS_CONFIG ?= "rel/sys.config" +ESCRIPT_EMU_ARGS ?= -pa . \ + -sasl errlog_type error \ + -escript main $(ESCRIPT_NAME) +ESCRIPT_SHEBANG ?= /usr/bin/env escript +ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**" + +# Core targets. + +distclean:: distclean-escript + +help:: + $(verbose) printf "%s\n" "" \ + "Escript targets:" \ + " escript Build an executable escript archive" \ + +# Plugin-specific targets. + +# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl +# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center +# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE : +# Software may only be used for the great good and the true happiness of all +# sentient beings. + +define ESCRIPT_RAW +'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\ +'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\ +' [F || F <- A, not filelib:is_dir(F) ] end,'\ +'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\ +'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\ +'Ez = fun(Escript) ->'\ +' Static = Files([$(ESCRIPT_STATIC)]),'\ +' Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\ +' Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\ +' escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\ +' {archive, Archive, [memory]},'\ +' {shebang, "$(ESCRIPT_SHEBANG)"},'\ +' {comment, "$(ESCRIPT_COMMENT)"},'\ +' {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\ +' ]),'\ +' file:change_mode(Escript, 8#755)'\ +'end,'\ +'Ez("$(ESCRIPT_NAME)"),'\ +'halt().' +endef + +ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW)) + +escript:: distclean-escript deps app + $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND) + +distclean-escript: + $(gen_verbose) rm -f $(ESCRIPT_NAME) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: relx-rel distclean-relx-rel distclean-relx run + +# Configuration. + +RELX ?= $(CURDIR)/relx +RELX_CONFIG ?= $(CURDIR)/relx.config + +RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.5.0/relx +RELX_OPTS ?= +RELX_OUTPUT_DIR ?= _rel + +ifeq ($(firstword $(RELX_OPTS)),-o) + RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) +else + RELX_OPTS += -o $(RELX_OUTPUT_DIR) +endif + +# Core targets. + +ifeq ($(IS_DEP),) +ifneq ($(wildcard $(RELX_CONFIG)),) +rel:: relx-rel +endif +endif + +distclean:: distclean-relx-rel distclean-relx + +# Plugin-specific targets. + +$(RELX): + $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) + $(verbose) chmod +x $(RELX) + +relx-rel: $(RELX) rel-deps app + $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) + +distclean-relx-rel: + $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) + +distclean-relx: + $(gen_verbose) rm -rf $(RELX) + +# Run target. + +ifeq ($(wildcard $(RELX_CONFIG)),) +run: +else + +define get_relx_release.erl + {ok, Config} = file:consult("$(RELX_CONFIG)"), + {release, {Name, _}, _} = lists:keyfind(release, 1, Config), + io:format("~s", [Name]), + halt(0). +endef + +RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))` + +run: all + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console + +help:: + $(verbose) printf "%s\n" "" \ + "Relx targets:" \ + " run Compile the project, build the release and run it" + +endif + +# Copyright (c) 2014, M Robert Martin +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: shell + +# Configuration. + +SHELL_ERL ?= erl +SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin +SHELL_OPTS ?= + +ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) + +# Core targets + +help:: + $(verbose) printf "%s\n" "" \ + "Shell targets:" \ + " shell Run an erlang shell with SHELL_OPTS or reasonable default" + +# Plugin-specific targets. + +$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) + +build-shell-deps: $(ALL_SHELL_DEPS_DIRS) + $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done + +shell: build-shell-deps + $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS) + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq) +.PHONY: triq + +# Targets. + +tests:: triq + +define triq_check.erl + code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]), + try + case $(1) of + all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]); + module -> triq:check($(2)); + function -> triq:check($(2)) + end + of + true -> halt(0); + _ -> halt(1) + catch error:undef -> + io:format("Undefined property or module~n"), + halt(0) + end. +endef + +ifdef t +ifeq (,$(findstring :,$(t))) +triq: test-build + $(verbose) $(call erlang,$(call triq_check.erl,module,$(t))) +else +triq: test-build + $(verbose) echo Testing $(t)/0 + $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)())) +endif +else +triq: test-build + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam)))))) + $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES))) +endif +endif + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: xref distclean-xref + +# Configuration. + +ifeq ($(XREF_CONFIG),) + XREF_ARGS := +else + XREF_ARGS := -c $(XREF_CONFIG) +endif + +XREFR ?= $(CURDIR)/xrefr +export XREFR + +XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Xref targets:" \ + " xref Run Xrefr using $XREF_CONFIG as config file if defined" + +distclean:: distclean-xref + +# Plugin-specific targets. + +$(XREFR): + $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL)) + $(verbose) chmod +x $(XREFR) + +xref: deps app $(XREFR) + $(gen_verbose) $(XREFR) $(XREFR_ARGS) + +distclean-xref: + $(gen_verbose) rm -rf $(XREFR) + +# Copyright 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +COVER_REPORT_DIR = cover + +# Hook in coverage to ct + +ifdef COVER +ifdef CT_RUN +# All modules in 'ebin' +COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam))) + +test-build:: $(TEST_DIR)/ct.cover.spec + +$(TEST_DIR)/ct.cover.spec: + $(verbose) echo Cover mods: $(COVER_MODS) + $(gen_verbose) printf "%s\n" \ + '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \ + '{export,"$(CURDIR)/ct.coverdata"}.' > $@ + +CT_RUN += -cover $(TEST_DIR)/ct.cover.spec +endif +endif + +# Core targets + +ifdef COVER +ifneq ($(COVER_REPORT_DIR),) +tests:: + $(verbose) $(MAKE) --no-print-directory cover-report +endif +endif + +clean:: coverdata-clean + +ifneq ($(COVER_REPORT_DIR),) +distclean:: cover-report-clean +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Cover targets:" \ + " cover-report Generate a HTML coverage report from previously collected" \ + " cover data." \ + " all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \ + "" \ + "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \ + "target tests additionally generates a HTML coverage report from the combined" \ + "coverdata files from each of these testing tools. HTML reports can be disabled" \ + "by setting COVER_REPORT_DIR to empty." + +# Plugin specific targets + +COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata)) + +.PHONY: coverdata-clean +coverdata-clean: + $(gen_verbose) rm -f *.coverdata ct.cover.spec + +# Merge all coverdata files into one. +all.coverdata: $(COVERDATA) + $(gen_verbose) $(ERL) -eval ' \ + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \ + cover:export("$@"), halt(0).' + +# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to +# empty if you want the coverdata files but not the HTML report. +ifneq ($(COVER_REPORT_DIR),) + +.PHONY: cover-report-clean cover-report + +cover-report-clean: + $(gen_verbose) rm -rf $(COVER_REPORT_DIR) + +ifeq ($(COVERDATA),) +cover-report: +else + +# Modules which include eunit.hrl always contain one line without coverage +# because eunit defines test/0 which is never called. We compensate for this. +EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \ + grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ + | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq)) + +define cover_report.erl + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) + Ms = cover:imported_modules(), + [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M) + ++ ".COVER.html", [html]) || M <- Ms], + Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms], + EunitHrlMods = [$(EUNIT_HRL_MODS)], + Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of + true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report], + TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]), + TotalN = lists:sum([N || {_, {_, N}} <- Report1]), + TotalPerc = round(100 * TotalY / (TotalY + TotalN)), + {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]), + io:format(F, "~n" + "~n" + "Coverage report~n" + "~n", []), + io:format(F, "

Coverage

~n

Total: ~p%

~n", [TotalPerc]), + io:format(F, "~n", []), + [io:format(F, "" + "~n", + [M, M, round(100 * Y / (Y + N))]) || {M, {Y, N}} <- Report1], + How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))", + Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")", + io:format(F, "
ModuleCoverage
~p~p%
~n" + "

Generated using ~s and erlang.mk on ~s.

~n" + "", [How, Date]), + halt(). +endef + +cover-report: + $(gen_verbose) mkdir -p $(COVER_REPORT_DIR) + $(gen_verbose) $(call erlang,$(cover_report.erl)) + +endif +endif # ifneq ($(COVER_REPORT_DIR),) + +# Copyright (c) 2013-2015, Loïc Hoguin +# Copyright (c) 2015, Jean-Sébastien Pédron +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Fetch dependencies (without building them). + +.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \ + fetch-shell-deps + +ifneq ($(SKIP_DEPS),) +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: + @: +else +# By default, we fetch "normal" dependencies. They are also included no +# matter the type of requested dependencies. +# +# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS). +fetch-deps: $(ALL_DEPS_DIRS) +fetch-doc-deps: $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS) +fetch-rel-deps: $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS) +fetch-test-deps: $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS) +fetch-shell-deps: $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(filter doc,$(DEP_TYPES)),) +fetch-deps: $(ALL_DOC_DEPS_DIRS) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +fetch-deps: $(ALL_REL_DEPS_DIRS) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +fetch-deps: $(ALL_TEST_DEPS_DIRS) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +fetch-deps: $(ALL_SHELL_DEPS_DIRS) +endif + +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep $@ IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/$@.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $^ ; do \ + if ! grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/$@.log; then \ + echo $$dep >> $(ERLANG_MK_TMP)/$@.log; \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C $$dep fetch-deps IS_DEP=1 || exit $$?; \ + fi \ + fi \ + done +endif # ifneq ($(SKIP_DEPS),) + +# List dependencies recursively. + +.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \ + list-shell-deps + +ifneq ($(SKIP_DEPS),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): + $(verbose) :> $@ +else +LIST_DIRS = $(ALL_DEPS_DIRS) +LIST_DEPS = $(BUILD_DEPS) $(DEPS) + +$(ERLANG_MK_RECURSIVE_DEPS_LIST): fetch-deps + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DIRS += $(ALL_DOC_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DEPS += $(DOC_DEPS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-doc-deps +else +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DIRS += $(ALL_REL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DEPS += $(REL_DEPS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-rel-deps +else +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DIRS += $(ALL_TEST_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DEPS += $(TEST_DEPS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-test-deps +else +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DIRS += $(ALL_SHELL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DEPS += $(SHELL_DEPS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-shell-deps +else +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-deps +endif + +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): +ifneq ($(IS_DEP),1) + $(verbose) rm -f $@.orig +endif +ifndef IS_APP + $(verbose) for app in $(filter-out $(CURDIR),$(ALL_APPS_DIRS)); do \ + $(MAKE) -C "$$app" --no-print-directory $@ IS_APP=1 || :; \ + done +endif + $(verbose) for dep in $(filter-out $(CURDIR),$(LIST_DIRS)); do \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C "$$dep" --no-print-directory $@ IS_DEP=1; \ + fi; \ + done + $(verbose) for dep in $(LIST_DEPS); do \ + echo $(DEPS_DIR)/$$dep; \ + done >> $@.orig +ifndef IS_APP +ifneq ($(IS_DEP),1) + $(verbose) sort < $@.orig | uniq > $@ + $(verbose) rm -f $@.orig +endif +endif +endif # ifneq ($(SKIP_DEPS),) + +ifneq ($(SKIP_DEPS),) +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + @: +else +list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST) +list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(IS_DEP),1) +ifneq ($(filter doc,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) +endif +endif + +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + $(verbose) cat $^ | sort | uniq +endif # ifneq ($(SKIP_DEPS),) diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/etc/rabbit-test.sh b/rabbitmq-server/deps/rabbitmq_federation/etc/rabbit-test.sh similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-federation/etc/rabbit-test.sh rename to rabbitmq-server/deps/rabbitmq_federation/etc/rabbit-test.sh diff --git a/rabbitmq-server/deps/rabbitmq_federation/etc/setup-rabbit-test.sh b/rabbitmq-server/deps/rabbitmq_federation/etc/setup-rabbit-test.sh new file mode 100755 index 0000000..2e2282e --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_federation/etc/setup-rabbit-test.sh @@ -0,0 +1,2 @@ +#!/bin/sh -e +sh -e `dirname $0`/rabbit-test.sh "$DEPS_DIR/rabbit/scripts/rabbitmqctl -n $RABBITMQ_NODENAME" diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/include/rabbit_federation.hrl b/rabbitmq-server/deps/rabbitmq_federation/include/rabbit_federation.hrl similarity index 95% rename from rabbitmq-server/plugins-src/rabbitmq-federation/include/rabbit_federation.hrl rename to rabbitmq-server/deps/rabbitmq_federation/include/rabbit_federation.hrl index 0995cfd..5081eca 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/include/rabbit_federation.hrl +++ b/rabbitmq-server/deps/rabbitmq_federation/include/rabbit_federation.hrl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -record(upstream, {uris, diff --git a/rabbitmq-server/deps/rabbitmq_federation/rabbitmq-components.mk b/rabbitmq-server/deps/rabbitmq_federation/rabbitmq-components.mk new file mode 100644 index 0000000..eed26fd --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_federation/rabbitmq-components.mk @@ -0,0 +1,331 @@ +ifeq ($(.DEFAULT_GOAL),) +# Define default goal to `all` because this file defines some targets +# before the inclusion of erlang.mk leading to the wrong target becoming +# the default. +.DEFAULT_GOAL = all +endif + +# Automatically add rabbitmq-common to the dependencies, at least for +# the Makefiles. +ifneq ($(PROJECT),rabbit_common) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +ifeq ($(filter rabbit_common,$(DEPS)),) +DEPS += rabbit_common +endif +endif +endif + +# -------------------------------------------------------------------- +# RabbitMQ components. +# -------------------------------------------------------------------- + +# For RabbitMQ repositories, we want to checkout branches which match +# the parent project. For instance, if the parent project is on a +# release tag, dependencies must be on the same release tag. If the +# parent project is on a topic branch, dependencies must be on the same +# topic branch or fallback to `stable` or `master` whichever was the +# base of the topic branch. + +dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_clusterer = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_lvc = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_visualiser = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master +dep_sockjs = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master +dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master + +dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master + +# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk +# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch +# needs to add "ranch" as a BUILD_DEPS. The list of projects needing +# this workaround are: +# o rabbitmq-web-stomp +dep_ranch = git https://github.com/ninenines/ranch 1.2.1 + +RABBITMQ_COMPONENTS = amqp_client \ + rabbit \ + rabbit_common \ + rabbitmq_amqp1_0 \ + rabbitmq_auth_backend_amqp \ + rabbitmq_auth_backend_http \ + rabbitmq_auth_backend_ldap \ + rabbitmq_auth_mechanism_ssl \ + rabbitmq_boot_steps_visualiser \ + rabbitmq_clusterer \ + rabbitmq_codegen \ + rabbitmq_consistent_hash_exchange \ + rabbitmq_delayed_message_exchange \ + rabbitmq_dotnet_client \ + rabbitmq_event_exchange \ + rabbitmq_federation \ + rabbitmq_federation_management \ + rabbitmq_java_client \ + rabbitmq_lvc \ + rabbitmq_management \ + rabbitmq_management_agent \ + rabbitmq_management_exchange \ + rabbitmq_management_themes \ + rabbitmq_management_visualiser \ + rabbitmq_message_timestamp \ + rabbitmq_metronome \ + rabbitmq_mqtt \ + rabbitmq_recent_history_exchange \ + rabbitmq_rtopic_exchange \ + rabbitmq_sharding \ + rabbitmq_shovel \ + rabbitmq_shovel_management \ + rabbitmq_stomp \ + rabbitmq_test \ + rabbitmq_toke \ + rabbitmq_top \ + rabbitmq_tracing \ + rabbitmq_web_dispatch \ + rabbitmq_web_stomp \ + rabbitmq_web_stomp_examples \ + rabbitmq_website + +# Several components have a custom erlang.mk/build.config, mainly +# to disable eunit. Therefore, we can't use the top-level project's +# erlang.mk copy. +NO_AUTOPATCH += $(RABBITMQ_COMPONENTS) + +ifeq ($(origin current_rmq_ref),undefined) +ifneq ($(wildcard .git),) +current_rmq_ref := $(shell (\ + ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\ + if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi)) +else +current_rmq_ref := master +endif +endif +export current_rmq_ref + +ifeq ($(origin base_rmq_ref),undefined) +ifneq ($(wildcard .git),) +base_rmq_ref := $(shell \ + (git rev-parse --verify -q stable >/dev/null && \ + git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \ + echo stable) || \ + echo master) +else +base_rmq_ref := master +endif +endif +export base_rmq_ref + +# Repository URL selection. +# +# First, we infer other components' location from the current project +# repository URL, if it's a Git repository: +# - We take the "origin" remote URL as the base +# - The current project name and repository name is replaced by the +# target's properties: +# eg. rabbitmq-common is replaced by rabbitmq-codegen +# eg. rabbit_common is replaced by rabbitmq_codegen +# +# If cloning from this computed location fails, we fallback to RabbitMQ +# upstream which is GitHub. + +# Maccro to transform eg. "rabbit_common" to "rabbitmq-common". +rmq_cmp_repo_name = $(word 2,$(dep_$(1))) + +# Upstream URL for the current project. +RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT)) +RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git +RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git + +# Current URL for the current project. If this is not a Git clone, +# default to the upstream Git repository. +ifneq ($(wildcard .git),) +git_origin_fetch_url := $(shell git config remote.origin.url) +git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url) +RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url) +RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url) +else +RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL) +RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL) +endif + +# Macro to replace the following pattern: +# 1. /foo.git -> /bar.git +# 2. /foo -> /bar +# 3. /foo/ -> /bar/ +subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3)))) + +# Macro to replace both the project's name (eg. "rabbit_common") and +# repository name (eg. "rabbitmq-common") by the target's equivalent. +# +# This macro is kept on one line because we don't want whitespaces in +# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell +# single-quoted string. +dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo)) + +dep_rmq_commits = $(if $(dep_$(1)), \ + $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \ + $(pkg_$(1)_commit)) + +define dep_fetch_git_rmq + fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \ + fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \ + if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \ + git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url1"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \ + elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url2"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \ + fi; \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \ + $(foreach ref,$(call dep_rmq_commits,$(1)), \ + git checkout -q $(ref) >/dev/null 2>&1 || \ + ) \ + (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \ + 1>&2 && false) ) && \ + (test "$$$$fetch_url" = "$$$$push_url" || \ + git remote set-url --push origin "$$$$push_url") +endef + +# -------------------------------------------------------------------- +# Component distribution. +# -------------------------------------------------------------------- + +list-dist-deps:: + @: + +prepare-dist:: + @: + +# -------------------------------------------------------------------- +# Run a RabbitMQ node (moved from rabbitmq-run.mk as a workaround). +# -------------------------------------------------------------------- + +# Add "rabbit" to the build dependencies when the user wants to start +# a broker or to the test dependencies when the user wants to test a +# project. +# +# NOTE: This should belong to rabbitmq-run.mk. Unfortunately, it is +# loaded *after* erlang.mk which is too late to add a dependency. That's +# why rabbitmq-components.mk knows the list of targets which start a +# broker and add "rabbit" to the dependencies in this case. + +ifneq ($(PROJECT),rabbit) +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS)),) +RUN_RMQ_TARGETS = run-broker \ + run-background-broker \ + run-node \ + run-background-node \ + start-background-node + +ifneq ($(filter $(RUN_RMQ_TARGETS),$(MAKECMDGOALS)),) +BUILD_DEPS += rabbit +endif +endif + +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +ifneq ($(filter check tests tests-with-broker test,$(MAKECMDGOALS)),) +TEST_DEPS += rabbit +endif +endif +endif + +ifeq ($(filter rabbit_public_umbrella amqp_client rabbit_common rabbitmq_test,$(PROJECT)),) +ifeq ($(filter rabbitmq_test,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +TEST_DEPS += rabbitmq_test +endif +endif + +# -------------------------------------------------------------------- +# rabbitmq-components.mk checks. +# -------------------------------------------------------------------- + +ifeq ($(PROJECT),rabbit_common) +else ifdef SKIP_RMQCOMP_CHECK +else ifeq ($(IS_DEP),1) +else ifneq ($(filter co up,$(MAKECMDGOALS)),) +else +# In all other cases, rabbitmq-components.mk must be in sync. +deps:: check-rabbitmq-components.mk +fetch-deps: check-rabbitmq-components.mk +endif + +# If this project is under the Umbrella project, we override $(DEPS_DIR) +# to point to the Umbrella's one. We also disable `make distclean` so +# $(DEPS_DIR) is not accidentally removed. + +ifneq ($(wildcard ../../UMBRELLA.md),) +UNDER_UMBRELLA = 1 +else ifneq ($(wildcard UMBRELLA.md),) +UNDER_UMBRELLA = 1 +endif + +ifeq ($(UNDER_UMBRELLA),1) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +DEPS_DIR ?= $(abspath ..) + +distclean:: distclean-components + @: + +distclean-components: +endif + +ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),) +SKIP_DEPS = 1 +endif +endif + +UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk + +check-rabbitmq-components.mk: + $(verbose) cmp -s rabbitmq-components.mk \ + $(UPSTREAM_RMQ_COMPONENTS_MK) || \ + (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \ + false) + +ifeq ($(PROJECT),rabbit_common) +rabbitmq-components-mk: + @: +else +rabbitmq-components-mk: + $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) . +ifeq ($(DO_COMMIT),yes) + $(verbose) git diff --quiet rabbitmq-components.mk \ + || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk +endif +endif diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_app.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_app.erl similarity index 96% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_app.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_app.erl index 119ef60..28e0aae 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_app.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_app.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_app). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_db.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_db.erl similarity index 96% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_db.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_db.erl index d00f991..82e06a9 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_db.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_db.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_db). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_event.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_event.erl similarity index 96% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_event.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_event.erl index 677d5f2..6e92816 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_event.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_event.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_event). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_exchange.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_exchange.erl index fa6102c..773e204 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_exchange.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% %% TODO rename this diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange_link.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_exchange_link.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange_link.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_exchange_link.erl index 12f5316..88f52b7 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange_link.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_exchange_link.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_exchange_link). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange_link_sup_sup.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_exchange_link_sup_sup.erl similarity index 97% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange_link_sup_sup.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_exchange_link_sup_sup.erl index 529edea..ba6da91 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_exchange_link_sup_sup.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_exchange_link_sup_sup.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_exchange_link_sup_sup). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_link_sup.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_link_sup.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_link_sup.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_link_sup.erl index 2999a18..8345eb5 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_link_sup.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_link_sup.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_link_sup). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_link_util.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_link_util.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_link_util.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_link_util.erl index 757331f..817152c 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_link_util.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_link_util.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_link_util). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_parameters.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_parameters.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_parameters.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_parameters.erl index c05f4c0..30549f1 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_parameters.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_parameters.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_parameters). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_queue.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_queue.erl index 49c4f40..ec9ec8d 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_queue.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_queue). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue_link.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_queue_link.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue_link.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_queue_link.erl index e498f76..e6d8019 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue_link.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_queue_link.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_queue_link). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue_link_sup_sup.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_queue_link_sup_sup.erl similarity index 97% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue_link_sup_sup.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_queue_link_sup_sup.erl index 9c6a703..3dba50f 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_queue_link_sup_sup.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_queue_link_sup_sup.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_queue_link_sup_sup). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_status.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_status.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_status.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_status.erl index 59dc79e..af6c6c1 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_status.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_status.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_status). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_sup.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_sup.erl similarity index 97% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_sup.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_sup.erl index 52a837d..cfcaeb4 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_sup.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_sup.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_sup). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_upstream.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_upstream.erl similarity index 96% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_upstream.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_upstream.erl index ae4c512..d6223d6 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_upstream.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_upstream.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_upstream). @@ -71,7 +71,9 @@ remove_credentials(URI) -> list_to_binary(amqp_uri:remove_credentials(binary_to_list(URI))). to_params(Upstream = #upstream{uris = URIs}, XorQ) -> - random:seed(now()), + random:seed(erlang:phash2([node()]), + time_compat:monotonic_time(), + time_compat:unique_integer()), URI = lists:nth(random:uniform(length(URIs)), URIs), {ok, Params} = amqp_uri:parse(binary_to_list(URI), vhost(XorQ)), XorQ1 = with_name(Upstream, vhost(Params), XorQ), diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_upstream_exchange.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_upstream_exchange.erl similarity index 97% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_upstream_exchange.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_upstream_exchange.erl index 920bc9f..61aaf6b 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_upstream_exchange.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_upstream_exchange.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_upstream_exchange). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_util.erl b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_util.erl similarity index 97% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_util.erl rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_util.erl index 33e903e..24bc138 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbit_federation_util.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbit_federation_util.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_util). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbitmq_federation.app.src b/rabbitmq-server/deps/rabbitmq_federation/src/rabbitmq_federation.app.src similarity index 92% rename from rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbitmq_federation.app.src rename to rabbitmq-server/deps/rabbitmq_federation/src/rabbitmq_federation.app.src index 0100da3..cae5439 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/src/rabbitmq_federation.app.src +++ b/rabbitmq-server/deps/rabbitmq_federation/src/rabbitmq_federation.app.src @@ -1,6 +1,6 @@ {application, rabbitmq_federation, [{description, "RabbitMQ Federation"}, - {vsn, "%%VSN%%"}, + {vsn, "3.6.1"}, {modules, []}, {registered, []}, {mod, {rabbit_federation_app, []}}, diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_exchange_test.erl b/rabbitmq-server/deps/rabbitmq_federation/test/src/rabbit_federation_exchange_test.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_exchange_test.erl rename to rabbitmq-server/deps/rabbitmq_federation/test/src/rabbit_federation_exchange_test.erl index cce16f8..60d0966 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_exchange_test.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/test/src/rabbit_federation_exchange_test.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_exchange_test). @@ -74,7 +74,7 @@ multiple_uris_test() -> expect_uris([]) -> ok; expect_uris(URIs) -> [Link] = rabbit_federation_status:status(), URI = pget(uri, Link), - kill_only_connection(n("rabbit-test")), + kill_only_connection(n(os:getenv("RABBITMQ_NODENAME"))), expect_uris(URIs -- [URI]). kill_only_connection(Node) -> diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_queue_test.erl b/rabbitmq-server/deps/rabbitmq_federation/test/src/rabbit_federation_queue_test.erl similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_queue_test.erl rename to rabbitmq-server/deps/rabbitmq_federation/test/src/rabbit_federation_queue_test.erl index d58c0d6..4f3cf5a 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_queue_test.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/test/src/rabbit_federation_queue_test.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_queue_test). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_test_util.erl b/rabbitmq-server/deps/rabbitmq_federation/test/src/rabbit_federation_test_util.erl similarity index 96% rename from rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_test_util.erl rename to rabbitmq-server/deps/rabbitmq_federation/test/src/rabbit_federation_test_util.erl index d70042e..75a180b 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_test_util.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/test/src/rabbit_federation_test_util.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_test_util). @@ -114,8 +114,9 @@ no_plugins(Cfg) -> %% "fake" cfg to let us use various utility functions when running %% in-broker tests single_cfg() -> - [{nodename, 'rabbit-test'}, - {node, rabbit_nodes:make('rabbit-test')}, + Nodename = list_to_atom(os:getenv("RABBITMQ_NODENAME")), + [{nodename, Nodename}, + {node, rabbit_nodes:make(Nodename)}, {port, 5672}]. %%---------------------------------------------------------------------------- diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_unit_test.erl b/rabbitmq-server/deps/rabbitmq_federation/test/src/rabbit_federation_unit_test.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_unit_test.erl rename to rabbitmq-server/deps/rabbitmq_federation/test/src/rabbit_federation_unit_test.erl index 76d23b8..e680623 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/test/src/rabbit_federation_unit_test.erl +++ b/rabbitmq-server/deps/rabbitmq_federation/test/src/rabbit_federation_unit_test.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ Federation. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_unit_test). diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/CONTRIBUTING.md b/rabbitmq-server/deps/rabbitmq_federation_management/CONTRIBUTING.md similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-erlang-client/CONTRIBUTING.md rename to rabbitmq-server/deps/rabbitmq_federation_management/CONTRIBUTING.md diff --git a/rabbitmq-server/deps/rabbitmq_federation_management/LICENSE b/rabbitmq-server/deps/rabbitmq_federation_management/LICENSE new file mode 100644 index 0000000..b945a8e --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_federation_management/LICENSE @@ -0,0 +1,14 @@ +This package, the RabbitMQ FederationManagement Plugin is licensed under the MPL. For the +MPL, please see LICENSE-MPL-RabbitMQ. + +This package makes use of the following third party libraries: + +jQuery - http://jquery.com/ - MIT license, see LICENSE-MIT-jQuery164 +EJS - http://embeddedjs.com/ - MIT license, see LICENSE-MIT-EJS10 +Sammy - http://code.quirkey.com/sammy/ - MIT license, see LICENSE-MIT-Sammy060 +webmachine - http://webmachine.basho.com/ - Apache license, 2.0 +mochiweb - http://github.com/mochi/mochiweb/ - MIT license +base64.js - http://code.google.com/p/stringencoders/ - BSD license, see LICENSE-BSD-base64js + +If you have any questions regarding licensing, please contact us at +info@rabbitmq.com. diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-APACHE2-ExplorerCanvas b/rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-APACHE2-ExplorerCanvas similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-APACHE2-ExplorerCanvas rename to rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-APACHE2-ExplorerCanvas diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-BSD-base64js b/rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-BSD-base64js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-BSD-base64js rename to rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-BSD-base64js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MIT-EJS10 b/rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-MIT-EJS10 similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MIT-EJS10 rename to rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-MIT-EJS10 diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MIT-Flot b/rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-MIT-Flot similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MIT-Flot rename to rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-MIT-Flot diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MIT-Sammy060 b/rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-MIT-Sammy060 similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MIT-Sammy060 rename to rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-MIT-Sammy060 diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MIT-jQuery164 b/rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-MIT-jQuery164 similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE-MIT-jQuery164 rename to rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-MIT-jQuery164 diff --git a/rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-MPL-RabbitMQ b/rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-MPL-RabbitMQ new file mode 100644 index 0000000..e163fcc --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_federation_management/LICENSE-MPL-RabbitMQ @@ -0,0 +1,455 @@ + MOZILLA PUBLIC LICENSE + Version 1.1 + + --------------- + +1. Definitions. + + 1.0.1. "Commercial Use" means distribution or otherwise making the + Covered Code available to a third party. + + 1.1. "Contributor" means each entity that creates or contributes to + the creation of Modifications. + + 1.2. "Contributor Version" means the combination of the Original + Code, prior Modifications used by a Contributor, and the Modifications + made by that particular Contributor. + + 1.3. "Covered Code" means the Original Code or Modifications or the + combination of the Original Code and Modifications, in each case + including portions thereof. + + 1.4. "Electronic Distribution Mechanism" means a mechanism generally + accepted in the software development community for the electronic + transfer of data. + + 1.5. "Executable" means Covered Code in any form other than Source + Code. + + 1.6. "Initial Developer" means the individual or entity identified + as the Initial Developer in the Source Code notice required by Exhibit + A. + + 1.7. "Larger Work" means a work which combines Covered Code or + portions thereof with code not governed by the terms of this License. + + 1.8. "License" means this document. + + 1.8.1. "Licensable" means having the right to grant, to the maximum + extent possible, whether at the time of the initial grant or + subsequently acquired, any and all of the rights conveyed herein. + + 1.9. "Modifications" means any addition to or deletion from the + substance or structure of either the Original Code or any previous + Modifications. When Covered Code is released as a series of files, a + Modification is: + A. Any addition to or deletion from the contents of a file + containing Original Code or previous Modifications. + + B. Any new file that contains any part of the Original Code or + previous Modifications. + + 1.10. "Original Code" means Source Code of computer software code + which is described in the Source Code notice required by Exhibit A as + Original Code, and which, at the time of its release under this + License is not already Covered Code governed by this License. + + 1.10.1. "Patent Claims" means any patent claim(s), now owned or + hereafter acquired, including without limitation, method, process, + and apparatus claims, in any patent Licensable by grantor. + + 1.11. "Source Code" means the preferred form of the Covered Code for + making modifications to it, including all modules it contains, plus + any associated interface definition files, scripts used to control + compilation and installation of an Executable, or source code + differential comparisons against either the Original Code or another + well known, available Covered Code of the Contributor's choice. The + Source Code can be in a compressed or archival form, provided the + appropriate decompression or de-archiving software is widely available + for no charge. + + 1.12. "You" (or "Your") means an individual or a legal entity + exercising rights under, and complying with all of the terms of, this + License or a future version of this License issued under Section 6.1. + For legal entities, "You" includes any entity which controls, is + controlled by, or is under common control with You. For purposes of + this definition, "control" means (a) the power, direct or indirect, + to cause the direction or management of such entity, whether by + contract or otherwise, or (b) ownership of more than fifty percent + (50%) of the outstanding shares or beneficial ownership of such + entity. + +2. Source Code License. + + 2.1. The Initial Developer Grant. + The Initial Developer hereby grants You a world-wide, royalty-free, + non-exclusive license, subject to third party intellectual property + claims: + (a) under intellectual property rights (other than patent or + trademark) Licensable by Initial Developer to use, reproduce, + modify, display, perform, sublicense and distribute the Original + Code (or portions thereof) with or without Modifications, and/or + as part of a Larger Work; and + + (b) under Patents Claims infringed by the making, using or + selling of Original Code, to make, have made, use, practice, + sell, and offer for sale, and/or otherwise dispose of the + Original Code (or portions thereof). + + (c) the licenses granted in this Section 2.1(a) and (b) are + effective on the date Initial Developer first distributes + Original Code under the terms of this License. + + (d) Notwithstanding Section 2.1(b) above, no patent license is + granted: 1) for code that You delete from the Original Code; 2) + separate from the Original Code; or 3) for infringements caused + by: i) the modification of the Original Code or ii) the + combination of the Original Code with other software or devices. + + 2.2. Contributor Grant. + Subject to third party intellectual property claims, each Contributor + hereby grants You a world-wide, royalty-free, non-exclusive license + + (a) under intellectual property rights (other than patent or + trademark) Licensable by Contributor, to use, reproduce, modify, + display, perform, sublicense and distribute the Modifications + created by such Contributor (or portions thereof) either on an + unmodified basis, with other Modifications, as Covered Code + and/or as part of a Larger Work; and + + (b) under Patent Claims infringed by the making, using, or + selling of Modifications made by that Contributor either alone + and/or in combination with its Contributor Version (or portions + of such combination), to make, use, sell, offer for sale, have + made, and/or otherwise dispose of: 1) Modifications made by that + Contributor (or portions thereof); and 2) the combination of + Modifications made by that Contributor with its Contributor + Version (or portions of such combination). + + (c) the licenses granted in Sections 2.2(a) and 2.2(b) are + effective on the date Contributor first makes Commercial Use of + the Covered Code. + + (d) Notwithstanding Section 2.2(b) above, no patent license is + granted: 1) for any code that Contributor has deleted from the + Contributor Version; 2) separate from the Contributor Version; + 3) for infringements caused by: i) third party modifications of + Contributor Version or ii) the combination of Modifications made + by that Contributor with other software (except as part of the + Contributor Version) or other devices; or 4) under Patent Claims + infringed by Covered Code in the absence of Modifications made by + that Contributor. + +3. Distribution Obligations. + + 3.1. Application of License. + The Modifications which You create or to which You contribute are + governed by the terms of this License, including without limitation + Section 2.2. The Source Code version of Covered Code may be + distributed only under the terms of this License or a future version + of this License released under Section 6.1, and You must include a + copy of this License with every copy of the Source Code You + distribute. You may not offer or impose any terms on any Source Code + version that alters or restricts the applicable version of this + License or the recipients' rights hereunder. However, You may include + an additional document offering the additional rights described in + Section 3.5. + + 3.2. Availability of Source Code. + Any Modification which You create or to which You contribute must be + made available in Source Code form under the terms of this License + either on the same media as an Executable version or via an accepted + Electronic Distribution Mechanism to anyone to whom you made an + Executable version available; and if made available via Electronic + Distribution Mechanism, must remain available for at least twelve (12) + months after the date it initially became available, or at least six + (6) months after a subsequent version of that particular Modification + has been made available to such recipients. You are responsible for + ensuring that the Source Code version remains available even if the + Electronic Distribution Mechanism is maintained by a third party. + + 3.3. Description of Modifications. + You must cause all Covered Code to which You contribute to contain a + file documenting the changes You made to create that Covered Code and + the date of any change. You must include a prominent statement that + the Modification is derived, directly or indirectly, from Original + Code provided by the Initial Developer and including the name of the + Initial Developer in (a) the Source Code, and (b) in any notice in an + Executable version or related documentation in which You describe the + origin or ownership of the Covered Code. + + 3.4. Intellectual Property Matters + (a) Third Party Claims. + If Contributor has knowledge that a license under a third party's + intellectual property rights is required to exercise the rights + granted by such Contributor under Sections 2.1 or 2.2, + Contributor must include a text file with the Source Code + distribution titled "LEGAL" which describes the claim and the + party making the claim in sufficient detail that a recipient will + know whom to contact. If Contributor obtains such knowledge after + the Modification is made available as described in Section 3.2, + Contributor shall promptly modify the LEGAL file in all copies + Contributor makes available thereafter and shall take other steps + (such as notifying appropriate mailing lists or newsgroups) + reasonably calculated to inform those who received the Covered + Code that new knowledge has been obtained. + + (b) Contributor APIs. + If Contributor's Modifications include an application programming + interface and Contributor has knowledge of patent licenses which + are reasonably necessary to implement that API, Contributor must + also include this information in the LEGAL file. + + (c) Representations. + Contributor represents that, except as disclosed pursuant to + Section 3.4(a) above, Contributor believes that Contributor's + Modifications are Contributor's original creation(s) and/or + Contributor has sufficient rights to grant the rights conveyed by + this License. + + 3.5. Required Notices. + You must duplicate the notice in Exhibit A in each file of the Source + Code. If it is not possible to put such notice in a particular Source + Code file due to its structure, then You must include such notice in a + location (such as a relevant directory) where a user would be likely + to look for such a notice. If You created one or more Modification(s) + You may add your name as a Contributor to the notice described in + Exhibit A. You must also duplicate this License in any documentation + for the Source Code where You describe recipients' rights or ownership + rights relating to Covered Code. You may choose to offer, and to + charge a fee for, warranty, support, indemnity or liability + obligations to one or more recipients of Covered Code. However, You + may do so only on Your own behalf, and not on behalf of the Initial + Developer or any Contributor. You must make it absolutely clear than + any such warranty, support, indemnity or liability obligation is + offered by You alone, and You hereby agree to indemnify the Initial + Developer and every Contributor for any liability incurred by the + Initial Developer or such Contributor as a result of warranty, + support, indemnity or liability terms You offer. + + 3.6. Distribution of Executable Versions. + You may distribute Covered Code in Executable form only if the + requirements of Section 3.1-3.5 have been met for that Covered Code, + and if You include a notice stating that the Source Code version of + the Covered Code is available under the terms of this License, + including a description of how and where You have fulfilled the + obligations of Section 3.2. The notice must be conspicuously included + in any notice in an Executable version, related documentation or + collateral in which You describe recipients' rights relating to the + Covered Code. You may distribute the Executable version of Covered + Code or ownership rights under a license of Your choice, which may + contain terms different from this License, provided that You are in + compliance with the terms of this License and that the license for the + Executable version does not attempt to limit or alter the recipient's + rights in the Source Code version from the rights set forth in this + License. If You distribute the Executable version under a different + license You must make it absolutely clear that any terms which differ + from this License are offered by You alone, not by the Initial + Developer or any Contributor. You hereby agree to indemnify the + Initial Developer and every Contributor for any liability incurred by + the Initial Developer or such Contributor as a result of any such + terms You offer. + + 3.7. Larger Works. + You may create a Larger Work by combining Covered Code with other code + not governed by the terms of this License and distribute the Larger + Work as a single product. In such a case, You must make sure the + requirements of this License are fulfilled for the Covered Code. + +4. Inability to Comply Due to Statute or Regulation. + + If it is impossible for You to comply with any of the terms of this + License with respect to some or all of the Covered Code due to + statute, judicial order, or regulation then You must: (a) comply with + the terms of this License to the maximum extent possible; and (b) + describe the limitations and the code they affect. Such description + must be included in the LEGAL file described in Section 3.4 and must + be included with all distributions of the Source Code. Except to the + extent prohibited by statute or regulation, such description must be + sufficiently detailed for a recipient of ordinary skill to be able to + understand it. + +5. Application of this License. + + This License applies to code to which the Initial Developer has + attached the notice in Exhibit A and to related Covered Code. + +6. Versions of the License. + + 6.1. New Versions. + Netscape Communications Corporation ("Netscape") may publish revised + and/or new versions of the License from time to time. Each version + will be given a distinguishing version number. + + 6.2. Effect of New Versions. + Once Covered Code has been published under a particular version of the + License, You may always continue to use it under the terms of that + version. You may also choose to use such Covered Code under the terms + of any subsequent version of the License published by Netscape. No one + other than Netscape has the right to modify the terms applicable to + Covered Code created under this License. + + 6.3. Derivative Works. + If You create or use a modified version of this License (which you may + only do in order to apply it to code which is not already Covered Code + governed by this License), You must (a) rename Your license so that + the phrases "Mozilla", "MOZILLAPL", "MOZPL", "Netscape", + "MPL", "NPL" or any confusingly similar phrase do not appear in your + license (except to note that your license differs from this License) + and (b) otherwise make it clear that Your version of the license + contains terms which differ from the Mozilla Public License and + Netscape Public License. (Filling in the name of the Initial + Developer, Original Code or Contributor in the notice described in + Exhibit A shall not of themselves be deemed to be modifications of + this License.) + +7. DISCLAIMER OF WARRANTY. + + COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS, + WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, + WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF + DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. + THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED CODE + IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE IN ANY RESPECT, + YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE + COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER + OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF + ANY COVERED CODE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. + +8. TERMINATION. + + 8.1. This License and the rights granted hereunder will terminate + automatically if You fail to comply with terms herein and fail to cure + such breach within 30 days of becoming aware of the breach. All + sublicenses to the Covered Code which are properly granted shall + survive any termination of this License. Provisions which, by their + nature, must remain in effect beyond the termination of this License + shall survive. + + 8.2. If You initiate litigation by asserting a patent infringement + claim (excluding declatory judgment actions) against Initial Developer + or a Contributor (the Initial Developer or Contributor against whom + You file such action is referred to as "Participant") alleging that: + + (a) such Participant's Contributor Version directly or indirectly + infringes any patent, then any and all rights granted by such + Participant to You under Sections 2.1 and/or 2.2 of this License + shall, upon 60 days notice from Participant terminate prospectively, + unless if within 60 days after receipt of notice You either: (i) + agree in writing to pay Participant a mutually agreeable reasonable + royalty for Your past and future use of Modifications made by such + Participant, or (ii) withdraw Your litigation claim with respect to + the Contributor Version against such Participant. If within 60 days + of notice, a reasonable royalty and payment arrangement are not + mutually agreed upon in writing by the parties or the litigation claim + is not withdrawn, the rights granted by Participant to You under + Sections 2.1 and/or 2.2 automatically terminate at the expiration of + the 60 day notice period specified above. + + (b) any software, hardware, or device, other than such Participant's + Contributor Version, directly or indirectly infringes any patent, then + any rights granted to You by such Participant under Sections 2.1(b) + and 2.2(b) are revoked effective as of the date You first made, used, + sold, distributed, or had made, Modifications made by that + Participant. + + 8.3. If You assert a patent infringement claim against Participant + alleging that such Participant's Contributor Version directly or + indirectly infringes any patent where such claim is resolved (such as + by license or settlement) prior to the initiation of patent + infringement litigation, then the reasonable value of the licenses + granted by such Participant under Sections 2.1 or 2.2 shall be taken + into account in determining the amount or value of any payment or + license. + + 8.4. In the event of termination under Sections 8.1 or 8.2 above, + all end user license agreements (excluding distributors and resellers) + which have been validly granted by You or any distributor hereunder + prior to termination shall survive termination. + +9. LIMITATION OF LIABILITY. + + UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT + (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL + DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED CODE, + OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR + ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY + CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL, + WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER + COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN + INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF + LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY + RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW + PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE + EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO + THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU. + +10. U.S. GOVERNMENT END USERS. + + The Covered Code is a "commercial item," as that term is defined in + 48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer + software" and "commercial computer software documentation," as such + terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48 + C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995), + all U.S. Government End Users acquire Covered Code with only those + rights set forth herein. + +11. MISCELLANEOUS. + + This License represents the complete agreement concerning subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. This License shall be governed by + California law provisions (except to the extent applicable law, if + any, provides otherwise), excluding its conflict-of-law provisions. + With respect to disputes in which at least one party is a citizen of, + or an entity chartered or registered to do business in the United + States of America, any litigation relating to this License shall be + subject to the jurisdiction of the Federal Courts of the Northern + District of California, with venue lying in Santa Clara County, + California, with the losing party responsible for costs, including + without limitation, court costs and reasonable attorneys' fees and + expenses. The application of the United Nations Convention on + Contracts for the International Sale of Goods is expressly excluded. + Any law or regulation which provides that the language of a contract + shall be construed against the drafter shall not apply to this + License. + +12. RESPONSIBILITY FOR CLAIMS. + + As between Initial Developer and the Contributors, each party is + responsible for claims and damages arising, directly or indirectly, + out of its utilization of rights under this License and You agree to + work with Initial Developer and Contributors to distribute such + responsibility on an equitable basis. Nothing herein is intended or + shall be deemed to constitute any admission of liability. + +13. MULTIPLE-LICENSED CODE. + + Initial Developer may designate portions of the Covered Code as + "Multiple-Licensed". "Multiple-Licensed" means that the Initial + Developer permits you to utilize portions of the Covered Code under + Your choice of the NPL or the alternative licenses, if any, specified + by the Initial Developer in the file described in Exhibit A. + +EXHIBIT A -Mozilla Public License. + + ``The contents of this file are subject to the Mozilla Public License + Version 1.1 (the "License"); you may not use this file except in + compliance with the License. You may obtain a copy of the License at + http://www.mozilla.org/MPL/ + + Software distributed under the License is distributed on an "AS IS" + basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the + License for the specific language governing rights and limitations + under the License. + + The Original Code is RabbitMQ Management Plugin. + + The Initial Developer of the Original Code is GoPivotal, Inc. + Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved.'' + + [NOTE: The text of this Exhibit A may differ slightly from the text of + the notices in the Source Code files of the Original Code. You should + use the text of this Exhibit A rather than the text found in the + Original Code Source Code for Your Modifications.] diff --git a/rabbitmq-server/deps/rabbitmq_federation_management/Makefile b/rabbitmq-server/deps/rabbitmq_federation_management/Makefile new file mode 100644 index 0000000..2a9fced --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_federation_management/Makefile @@ -0,0 +1,14 @@ +PROJECT = rabbitmq_federation_management + +DEPS = rabbitmq_management webmachine + +DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk + +# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be +# reviewed and merged. + +ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git +ERLANG_MK_COMMIT = rabbitmq-tmp + +include rabbitmq-components.mk +include erlang.mk diff --git a/rabbitmq-server/deps/rabbitmq_federation_management/README.md b/rabbitmq-server/deps/rabbitmq_federation_management/README.md new file mode 100644 index 0000000..61e0075 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_federation_management/README.md @@ -0,0 +1,38 @@ +# RabbitMQ Federation Management Plugin + +This plugin adds information on federation link status to the management +plugin. + + +## Installation + +In recent releases, this plugin ships with RabbitMQ. Enable it with + + rabbitmq-plugins enable rabbitmq_management rabbitmq_federation_management + +If you have a heterogenous cluster (where the nodes have different +plugins installed), this should be installed on the same nodes as the +management plugin. + + +## Use over HTTP API + +The HTTP API provided is tiny: + + GET /api/federation-links + + +## Building From Source + +To [build the plugin](http://www.rabbitmq.com/plugin-development.html), use + + make dist + +and see under the `./plugins` directory. + + +## Copyright and License + +(c) Pivotal Software Inc, 2007-20016. + +See `LICENSE` for license information. diff --git a/rabbitmq-server/deps/rabbitmq_federation_management/erlang.mk b/rabbitmq-server/deps/rabbitmq_federation_management/erlang.mk new file mode 100644 index 0000000..1688ee8 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_federation_management/erlang.mk @@ -0,0 +1,6640 @@ +# Copyright (c) 2013-2015, Loïc Hoguin +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +.PHONY: all app deps search rel docs install-docs check tests clean distclean help erlang-mk + +ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) + +ERLANG_MK_VERSION = 2.0.0-pre.2-16-gb52203c + +# Core configuration. + +PROJECT ?= $(notdir $(CURDIR)) +PROJECT := $(strip $(PROJECT)) + +PROJECT_VERSION ?= rolling + +# Verbosity. + +V ?= 0 + +verbose_0 = @ +verbose_2 = set -x; +verbose = $(verbose_$(V)) + +gen_verbose_0 = @echo " GEN " $@; +gen_verbose_2 = set -x; +gen_verbose = $(gen_verbose_$(V)) + +# Temporary files directory. + +ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk +export ERLANG_MK_TMP + +# "erl" command. + +ERL = erl +A0 -noinput -boot start_clean + +# Platform detection. + +ifeq ($(PLATFORM),) +UNAME_S := $(shell uname -s) + +ifeq ($(UNAME_S),Linux) +PLATFORM = linux +else ifeq ($(UNAME_S),Darwin) +PLATFORM = darwin +else ifeq ($(UNAME_S),SunOS) +PLATFORM = solaris +else ifeq ($(UNAME_S),GNU) +PLATFORM = gnu +else ifeq ($(UNAME_S),FreeBSD) +PLATFORM = freebsd +else ifeq ($(UNAME_S),NetBSD) +PLATFORM = netbsd +else ifeq ($(UNAME_S),OpenBSD) +PLATFORM = openbsd +else ifeq ($(UNAME_S),DragonFly) +PLATFORM = dragonfly +else ifeq ($(shell uname -o),Msys) +PLATFORM = msys2 +else +$(error Unable to detect platform. Please open a ticket with the output of uname -a.) +endif + +export PLATFORM +endif + +# Core targets. + +all:: deps app rel + +# Noop to avoid a Make warning when there's nothing to do. +rel:: + $(verbose) : + +check:: clean app tests + +clean:: clean-crashdump + +clean-crashdump: +ifneq ($(wildcard erl_crash.dump),) + $(gen_verbose) rm -f erl_crash.dump +endif + +distclean:: clean distclean-tmp + +distclean-tmp: + $(gen_verbose) rm -rf $(ERLANG_MK_TMP) + +help:: + $(verbose) printf "%s\n" \ + "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \ + "Copyright (c) 2013-2015 Loïc Hoguin " \ + "" \ + "Usage: [V=1] $(MAKE) [target]..." \ + "" \ + "Core targets:" \ + " all Run deps, app and rel targets in that order" \ + " app Compile the project" \ + " deps Fetch dependencies (if needed) and compile them" \ + " fetch-deps Fetch dependencies (if needed) without compiling them" \ + " list-deps Fetch dependencies (if needed) and list them" \ + " search q=... Search for a package in the built-in index" \ + " rel Build a release for this project, if applicable" \ + " docs Build the documentation for this project" \ + " install-docs Install the man pages for this project" \ + " check Compile and run all tests and analysis for this project" \ + " tests Run the tests for this project" \ + " clean Delete temporary and output files from most targets" \ + " distclean Delete all temporary and output files" \ + " help Display this help and exit" \ + " erlang-mk Update erlang.mk to the latest version" + +# Core functions. + +empty := +space := $(empty) $(empty) +tab := $(empty) $(empty) +comma := , + +define newline + + +endef + +define comma_list +$(subst $(space),$(comma),$(strip $(1))) +endef + +# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. +define erlang +$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk +endef + +ifeq ($(PLATFORM),msys2) +core_native_path = $(subst \,\\\\,$(shell cygpath -w $1)) +else +core_native_path = $1 +endif + +ifeq ($(shell which wget 2>/dev/null | wc -l), 1) +define core_http_get + wget --no-check-certificate -O $(1) $(2)|| rm $(1) +endef +else +define core_http_get.erl + ssl:start(), + inets:start(), + case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of + {ok, {{_, 200, _}, _, Body}} -> + case file:write_file("$(1)", Body) of + ok -> ok; + {error, R1} -> halt(R1) + end; + {error, R2} -> + halt(R2) + end, + halt(0). +endef + +define core_http_get + $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2)) +endef +endif + +core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1))) + +core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2))) + +core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1))))))))))))))))))))))))))) + +core_ls = $(filter-out $(1),$(shell echo $(1))) + +# @todo Use a solution that does not require using perl. +core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2) + +# Automated update. + +ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk +ERLANG_MK_COMMIT ?= +ERLANG_MK_BUILD_CONFIG ?= build.config +ERLANG_MK_BUILD_DIR ?= .erlang.mk.build + +erlang-mk: + git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) +ifdef ERLANG_MK_COMMIT + cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) +endif + if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi + $(MAKE) -C $(ERLANG_MK_BUILD_DIR) + cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk + rm -rf $(ERLANG_MK_BUILD_DIR) + +# The erlang.mk package index is bundled in the default erlang.mk build. +# Search for the string "copyright" to skip to the rest of the code. + +PACKAGES += aberth +pkg_aberth_name = aberth +pkg_aberth_description = Generic BERT-RPC server in Erlang +pkg_aberth_homepage = https://github.com/a13x/aberth +pkg_aberth_fetch = git +pkg_aberth_repo = https://github.com/a13x/aberth +pkg_aberth_commit = master + +PACKAGES += active +pkg_active_name = active +pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running +pkg_active_homepage = https://github.com/proger/active +pkg_active_fetch = git +pkg_active_repo = https://github.com/proger/active +pkg_active_commit = master + +PACKAGES += actordb_core +pkg_actordb_core_name = actordb_core +pkg_actordb_core_description = ActorDB main source +pkg_actordb_core_homepage = http://www.actordb.com/ +pkg_actordb_core_fetch = git +pkg_actordb_core_repo = https://github.com/biokoda/actordb_core +pkg_actordb_core_commit = master + +PACKAGES += actordb_thrift +pkg_actordb_thrift_name = actordb_thrift +pkg_actordb_thrift_description = Thrift API for ActorDB +pkg_actordb_thrift_homepage = http://www.actordb.com/ +pkg_actordb_thrift_fetch = git +pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift +pkg_actordb_thrift_commit = master + +PACKAGES += aleppo +pkg_aleppo_name = aleppo +pkg_aleppo_description = Alternative Erlang Pre-Processor +pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo +pkg_aleppo_fetch = git +pkg_aleppo_repo = https://github.com/ErlyORM/aleppo +pkg_aleppo_commit = master + +PACKAGES += alog +pkg_alog_name = alog +pkg_alog_description = Simply the best logging framework for Erlang +pkg_alog_homepage = https://github.com/siberian-fast-food/alogger +pkg_alog_fetch = git +pkg_alog_repo = https://github.com/siberian-fast-food/alogger +pkg_alog_commit = master + +PACKAGES += amqp_client +pkg_amqp_client_name = amqp_client +pkg_amqp_client_description = RabbitMQ Erlang AMQP client +pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html +pkg_amqp_client_fetch = git +pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git +pkg_amqp_client_commit = master + +PACKAGES += annotations +pkg_annotations_name = annotations +pkg_annotations_description = Simple code instrumentation utilities +pkg_annotations_homepage = https://github.com/hyperthunk/annotations +pkg_annotations_fetch = git +pkg_annotations_repo = https://github.com/hyperthunk/annotations +pkg_annotations_commit = master + +PACKAGES += antidote +pkg_antidote_name = antidote +pkg_antidote_description = Large-scale computation without synchronisation +pkg_antidote_homepage = https://syncfree.lip6.fr/ +pkg_antidote_fetch = git +pkg_antidote_repo = https://github.com/SyncFree/antidote +pkg_antidote_commit = master + +PACKAGES += apns +pkg_apns_name = apns +pkg_apns_description = Apple Push Notification Server for Erlang +pkg_apns_homepage = http://inaka.github.com/apns4erl +pkg_apns_fetch = git +pkg_apns_repo = https://github.com/inaka/apns4erl +pkg_apns_commit = 1.0.4 + +PACKAGES += azdht +pkg_azdht_name = azdht +pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang +pkg_azdht_homepage = https://github.com/arcusfelis/azdht +pkg_azdht_fetch = git +pkg_azdht_repo = https://github.com/arcusfelis/azdht +pkg_azdht_commit = master + +PACKAGES += backoff +pkg_backoff_name = backoff +pkg_backoff_description = Simple exponential backoffs in Erlang +pkg_backoff_homepage = https://github.com/ferd/backoff +pkg_backoff_fetch = git +pkg_backoff_repo = https://github.com/ferd/backoff +pkg_backoff_commit = master + +PACKAGES += barrel_tcp +pkg_barrel_tcp_name = barrel_tcp +pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang. +pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_fetch = git +pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_commit = master + +PACKAGES += basho_bench +pkg_basho_bench_name = basho_bench +pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for. +pkg_basho_bench_homepage = https://github.com/basho/basho_bench +pkg_basho_bench_fetch = git +pkg_basho_bench_repo = https://github.com/basho/basho_bench +pkg_basho_bench_commit = master + +PACKAGES += bcrypt +pkg_bcrypt_name = bcrypt +pkg_bcrypt_description = Bcrypt Erlang / C library +pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt +pkg_bcrypt_fetch = git +pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt +pkg_bcrypt_commit = master + +PACKAGES += beam +pkg_beam_name = beam +pkg_beam_description = BEAM emulator written in Erlang +pkg_beam_homepage = https://github.com/tonyrog/beam +pkg_beam_fetch = git +pkg_beam_repo = https://github.com/tonyrog/beam +pkg_beam_commit = master + +PACKAGES += beanstalk +pkg_beanstalk_name = beanstalk +pkg_beanstalk_description = An Erlang client for beanstalkd +pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_fetch = git +pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_commit = master + +PACKAGES += bear +pkg_bear_name = bear +pkg_bear_description = a set of statistics functions for erlang +pkg_bear_homepage = https://github.com/boundary/bear +pkg_bear_fetch = git +pkg_bear_repo = https://github.com/boundary/bear +pkg_bear_commit = master + +PACKAGES += bertconf +pkg_bertconf_name = bertconf +pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded +pkg_bertconf_homepage = https://github.com/ferd/bertconf +pkg_bertconf_fetch = git +pkg_bertconf_repo = https://github.com/ferd/bertconf +pkg_bertconf_commit = master + +PACKAGES += bifrost +pkg_bifrost_name = bifrost +pkg_bifrost_description = Erlang FTP Server Framework +pkg_bifrost_homepage = https://github.com/thorstadt/bifrost +pkg_bifrost_fetch = git +pkg_bifrost_repo = https://github.com/thorstadt/bifrost +pkg_bifrost_commit = master + +PACKAGES += binpp +pkg_binpp_name = binpp +pkg_binpp_description = Erlang Binary Pretty Printer +pkg_binpp_homepage = https://github.com/jtendo/binpp +pkg_binpp_fetch = git +pkg_binpp_repo = https://github.com/jtendo/binpp +pkg_binpp_commit = master + +PACKAGES += bisect +pkg_bisect_name = bisect +pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang +pkg_bisect_homepage = https://github.com/knutin/bisect +pkg_bisect_fetch = git +pkg_bisect_repo = https://github.com/knutin/bisect +pkg_bisect_commit = master + +PACKAGES += bitcask +pkg_bitcask_name = bitcask +pkg_bitcask_description = because you need another a key/value storage engine +pkg_bitcask_homepage = https://github.com/basho/bitcask +pkg_bitcask_fetch = git +pkg_bitcask_repo = https://github.com/basho/bitcask +pkg_bitcask_commit = master + +PACKAGES += bitstore +pkg_bitstore_name = bitstore +pkg_bitstore_description = A document based ontology development environment +pkg_bitstore_homepage = https://github.com/bdionne/bitstore +pkg_bitstore_fetch = git +pkg_bitstore_repo = https://github.com/bdionne/bitstore +pkg_bitstore_commit = master + +PACKAGES += bootstrap +pkg_bootstrap_name = bootstrap +pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application. +pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap +pkg_bootstrap_fetch = git +pkg_bootstrap_repo = https://github.com/schlagert/bootstrap +pkg_bootstrap_commit = master + +PACKAGES += boss +pkg_boss_name = boss +pkg_boss_description = Erlang web MVC, now featuring Comet +pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_fetch = git +pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_commit = master + +PACKAGES += boss_db +pkg_boss_db_name = boss_db +pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang +pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db +pkg_boss_db_fetch = git +pkg_boss_db_repo = https://github.com/ErlyORM/boss_db +pkg_boss_db_commit = master + +PACKAGES += bson +pkg_bson_name = bson +pkg_bson_description = BSON documents in Erlang, see bsonspec.org +pkg_bson_homepage = https://github.com/comtihon/bson-erlang +pkg_bson_fetch = git +pkg_bson_repo = https://github.com/comtihon/bson-erlang +pkg_bson_commit = master + +PACKAGES += bullet +pkg_bullet_name = bullet +pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy. +pkg_bullet_homepage = http://ninenines.eu +pkg_bullet_fetch = git +pkg_bullet_repo = https://github.com/ninenines/bullet +pkg_bullet_commit = master + +PACKAGES += cache +pkg_cache_name = cache +pkg_cache_description = Erlang in-memory cache +pkg_cache_homepage = https://github.com/fogfish/cache +pkg_cache_fetch = git +pkg_cache_repo = https://github.com/fogfish/cache +pkg_cache_commit = master + +PACKAGES += cake +pkg_cake_name = cake +pkg_cake_description = Really simple terminal colorization +pkg_cake_homepage = https://github.com/darach/cake-erl +pkg_cake_fetch = git +pkg_cake_repo = https://github.com/darach/cake-erl +pkg_cake_commit = v0.1.2 + +PACKAGES += carotene +pkg_carotene_name = carotene +pkg_carotene_description = Real-time server +pkg_carotene_homepage = https://github.com/carotene/carotene +pkg_carotene_fetch = git +pkg_carotene_repo = https://github.com/carotene/carotene +pkg_carotene_commit = master + +PACKAGES += cberl +pkg_cberl_name = cberl +pkg_cberl_description = NIF based Erlang bindings for Couchbase +pkg_cberl_homepage = https://github.com/chitika/cberl +pkg_cberl_fetch = git +pkg_cberl_repo = https://github.com/chitika/cberl +pkg_cberl_commit = master + +PACKAGES += cecho +pkg_cecho_name = cecho +pkg_cecho_description = An ncurses library for Erlang +pkg_cecho_homepage = https://github.com/mazenharake/cecho +pkg_cecho_fetch = git +pkg_cecho_repo = https://github.com/mazenharake/cecho +pkg_cecho_commit = master + +PACKAGES += cferl +pkg_cferl_name = cferl +pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client +pkg_cferl_homepage = https://github.com/ddossot/cferl +pkg_cferl_fetch = git +pkg_cferl_repo = https://github.com/ddossot/cferl +pkg_cferl_commit = master + +PACKAGES += chaos_monkey +pkg_chaos_monkey_name = chaos_monkey +pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes. +pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_fetch = git +pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_commit = master + +PACKAGES += check_node +pkg_check_node_name = check_node +pkg_check_node_description = Nagios Scripts for monitoring Riak +pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios +pkg_check_node_fetch = git +pkg_check_node_repo = https://github.com/basho-labs/riak_nagios +pkg_check_node_commit = master + +PACKAGES += chronos +pkg_chronos_name = chronos +pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests. +pkg_chronos_homepage = https://github.com/lehoff/chronos +pkg_chronos_fetch = git +pkg_chronos_repo = https://github.com/lehoff/chronos +pkg_chronos_commit = master + +PACKAGES += cl +pkg_cl_name = cl +pkg_cl_description = OpenCL binding for Erlang +pkg_cl_homepage = https://github.com/tonyrog/cl +pkg_cl_fetch = git +pkg_cl_repo = https://github.com/tonyrog/cl +pkg_cl_commit = master + +PACKAGES += classifier +pkg_classifier_name = classifier +pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier +pkg_classifier_homepage = https://github.com/inaka/classifier +pkg_classifier_fetch = git +pkg_classifier_repo = https://github.com/inaka/classifier +pkg_classifier_commit = master + +PACKAGES += clique +pkg_clique_name = clique +pkg_clique_description = CLI Framework for Erlang +pkg_clique_homepage = https://github.com/basho/clique +pkg_clique_fetch = git +pkg_clique_repo = https://github.com/basho/clique +pkg_clique_commit = develop + +PACKAGES += cloudi_core +pkg_cloudi_core_name = cloudi_core +pkg_cloudi_core_description = CloudI internal service runtime +pkg_cloudi_core_homepage = http://cloudi.org/ +pkg_cloudi_core_fetch = git +pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core +pkg_cloudi_core_commit = master + +PACKAGES += cloudi_service_api_requests +pkg_cloudi_service_api_requests_name = cloudi_service_api_requests +pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support) +pkg_cloudi_service_api_requests_homepage = http://cloudi.org/ +pkg_cloudi_service_api_requests_fetch = git +pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests +pkg_cloudi_service_api_requests_commit = master + +PACKAGES += cloudi_service_db +pkg_cloudi_service_db_name = cloudi_service_db +pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) +pkg_cloudi_service_db_homepage = http://cloudi.org/ +pkg_cloudi_service_db_fetch = git +pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db +pkg_cloudi_service_db_commit = master + +PACKAGES += cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service +pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_fetch = git +pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_commit = master + +PACKAGES += cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service +pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_cql_fetch = git +pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_commit = master + +PACKAGES += cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service +pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/ +pkg_cloudi_service_db_couchdb_fetch = git +pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_commit = master + +PACKAGES += cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service +pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/ +pkg_cloudi_service_db_elasticsearch_fetch = git +pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_commit = master + +PACKAGES += cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_description = memcached CloudI Service +pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/ +pkg_cloudi_service_db_memcached_fetch = git +pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_commit = master + +PACKAGES += cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_description = MySQL CloudI Service +pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_mysql_fetch = git +pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_commit = master + +PACKAGES += cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service +pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_pgsql_fetch = git +pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_commit = master + +PACKAGES += cloudi_service_db_riak +pkg_cloudi_service_db_riak_name = cloudi_service_db_riak +pkg_cloudi_service_db_riak_description = Riak CloudI Service +pkg_cloudi_service_db_riak_homepage = http://cloudi.org/ +pkg_cloudi_service_db_riak_fetch = git +pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak +pkg_cloudi_service_db_riak_commit = master + +PACKAGES += cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service +pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/ +pkg_cloudi_service_db_tokyotyrant_fetch = git +pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_commit = master + +PACKAGES += cloudi_service_filesystem +pkg_cloudi_service_filesystem_name = cloudi_service_filesystem +pkg_cloudi_service_filesystem_description = Filesystem CloudI Service +pkg_cloudi_service_filesystem_homepage = http://cloudi.org/ +pkg_cloudi_service_filesystem_fetch = git +pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem +pkg_cloudi_service_filesystem_commit = master + +PACKAGES += cloudi_service_http_client +pkg_cloudi_service_http_client_name = cloudi_service_http_client +pkg_cloudi_service_http_client_description = HTTP client CloudI Service +pkg_cloudi_service_http_client_homepage = http://cloudi.org/ +pkg_cloudi_service_http_client_fetch = git +pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client +pkg_cloudi_service_http_client_commit = master + +PACKAGES += cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service +pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/ +pkg_cloudi_service_http_cowboy_fetch = git +pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_commit = master + +PACKAGES += cloudi_service_http_elli +pkg_cloudi_service_http_elli_name = cloudi_service_http_elli +pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service +pkg_cloudi_service_http_elli_homepage = http://cloudi.org/ +pkg_cloudi_service_http_elli_fetch = git +pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli +pkg_cloudi_service_http_elli_commit = master + +PACKAGES += cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service +pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/ +pkg_cloudi_service_map_reduce_fetch = git +pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_commit = master + +PACKAGES += cloudi_service_oauth1 +pkg_cloudi_service_oauth1_name = cloudi_service_oauth1 +pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service +pkg_cloudi_service_oauth1_homepage = http://cloudi.org/ +pkg_cloudi_service_oauth1_fetch = git +pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1 +pkg_cloudi_service_oauth1_commit = master + +PACKAGES += cloudi_service_queue +pkg_cloudi_service_queue_name = cloudi_service_queue +pkg_cloudi_service_queue_description = Persistent Queue Service +pkg_cloudi_service_queue_homepage = http://cloudi.org/ +pkg_cloudi_service_queue_fetch = git +pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue +pkg_cloudi_service_queue_commit = master + +PACKAGES += cloudi_service_quorum +pkg_cloudi_service_quorum_name = cloudi_service_quorum +pkg_cloudi_service_quorum_description = CloudI Quorum Service +pkg_cloudi_service_quorum_homepage = http://cloudi.org/ +pkg_cloudi_service_quorum_fetch = git +pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum +pkg_cloudi_service_quorum_commit = master + +PACKAGES += cloudi_service_router +pkg_cloudi_service_router_name = cloudi_service_router +pkg_cloudi_service_router_description = CloudI Router Service +pkg_cloudi_service_router_homepage = http://cloudi.org/ +pkg_cloudi_service_router_fetch = git +pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router +pkg_cloudi_service_router_commit = master + +PACKAGES += cloudi_service_tcp +pkg_cloudi_service_tcp_name = cloudi_service_tcp +pkg_cloudi_service_tcp_description = TCP CloudI Service +pkg_cloudi_service_tcp_homepage = http://cloudi.org/ +pkg_cloudi_service_tcp_fetch = git +pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp +pkg_cloudi_service_tcp_commit = master + +PACKAGES += cloudi_service_timers +pkg_cloudi_service_timers_name = cloudi_service_timers +pkg_cloudi_service_timers_description = Timers CloudI Service +pkg_cloudi_service_timers_homepage = http://cloudi.org/ +pkg_cloudi_service_timers_fetch = git +pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers +pkg_cloudi_service_timers_commit = master + +PACKAGES += cloudi_service_udp +pkg_cloudi_service_udp_name = cloudi_service_udp +pkg_cloudi_service_udp_description = UDP CloudI Service +pkg_cloudi_service_udp_homepage = http://cloudi.org/ +pkg_cloudi_service_udp_fetch = git +pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp +pkg_cloudi_service_udp_commit = master + +PACKAGES += cloudi_service_validate +pkg_cloudi_service_validate_name = cloudi_service_validate +pkg_cloudi_service_validate_description = CloudI Validate Service +pkg_cloudi_service_validate_homepage = http://cloudi.org/ +pkg_cloudi_service_validate_fetch = git +pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate +pkg_cloudi_service_validate_commit = master + +PACKAGES += cloudi_service_zeromq +pkg_cloudi_service_zeromq_name = cloudi_service_zeromq +pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service +pkg_cloudi_service_zeromq_homepage = http://cloudi.org/ +pkg_cloudi_service_zeromq_fetch = git +pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq +pkg_cloudi_service_zeromq_commit = master + +PACKAGES += cluster_info +pkg_cluster_info_name = cluster_info +pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app +pkg_cluster_info_homepage = https://github.com/basho/cluster_info +pkg_cluster_info_fetch = git +pkg_cluster_info_repo = https://github.com/basho/cluster_info +pkg_cluster_info_commit = master + +PACKAGES += color +pkg_color_name = color +pkg_color_description = ANSI colors for your Erlang +pkg_color_homepage = https://github.com/julianduque/erlang-color +pkg_color_fetch = git +pkg_color_repo = https://github.com/julianduque/erlang-color +pkg_color_commit = master + +PACKAGES += confetti +pkg_confetti_name = confetti +pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids +pkg_confetti_homepage = https://github.com/jtendo/confetti +pkg_confetti_fetch = git +pkg_confetti_repo = https://github.com/jtendo/confetti +pkg_confetti_commit = master + +PACKAGES += couchbeam +pkg_couchbeam_name = couchbeam +pkg_couchbeam_description = Apache CouchDB client in Erlang +pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam +pkg_couchbeam_fetch = git +pkg_couchbeam_repo = https://github.com/benoitc/couchbeam +pkg_couchbeam_commit = master + +PACKAGES += covertool +pkg_covertool_name = covertool +pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports +pkg_covertool_homepage = https://github.com/idubrov/covertool +pkg_covertool_fetch = git +pkg_covertool_repo = https://github.com/idubrov/covertool +pkg_covertool_commit = master + +PACKAGES += cowboy +pkg_cowboy_name = cowboy +pkg_cowboy_description = Small, fast and modular HTTP server. +pkg_cowboy_homepage = http://ninenines.eu +pkg_cowboy_fetch = git +pkg_cowboy_repo = https://github.com/ninenines/cowboy +pkg_cowboy_commit = 1.0.1 + +PACKAGES += cowdb +pkg_cowdb_name = cowdb +pkg_cowdb_description = Pure Key/Value database library for Erlang Applications +pkg_cowdb_homepage = https://github.com/refuge/cowdb +pkg_cowdb_fetch = git +pkg_cowdb_repo = https://github.com/refuge/cowdb +pkg_cowdb_commit = master + +PACKAGES += cowlib +pkg_cowlib_name = cowlib +pkg_cowlib_description = Support library for manipulating Web protocols. +pkg_cowlib_homepage = http://ninenines.eu +pkg_cowlib_fetch = git +pkg_cowlib_repo = https://github.com/ninenines/cowlib +pkg_cowlib_commit = 1.0.1 + +PACKAGES += cpg +pkg_cpg_name = cpg +pkg_cpg_description = CloudI Process Groups +pkg_cpg_homepage = https://github.com/okeuday/cpg +pkg_cpg_fetch = git +pkg_cpg_repo = https://github.com/okeuday/cpg +pkg_cpg_commit = master + +PACKAGES += cqerl +pkg_cqerl_name = cqerl +pkg_cqerl_description = Native Erlang CQL client for Cassandra +pkg_cqerl_homepage = https://matehat.github.io/cqerl/ +pkg_cqerl_fetch = git +pkg_cqerl_repo = https://github.com/matehat/cqerl +pkg_cqerl_commit = master + +PACKAGES += cr +pkg_cr_name = cr +pkg_cr_description = Chain Replication +pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm +pkg_cr_fetch = git +pkg_cr_repo = https://github.com/spawnproc/cr +pkg_cr_commit = master + +PACKAGES += cuttlefish +pkg_cuttlefish_name = cuttlefish +pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me? +pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish +pkg_cuttlefish_fetch = git +pkg_cuttlefish_repo = https://github.com/basho/cuttlefish +pkg_cuttlefish_commit = master + +PACKAGES += damocles +pkg_damocles_name = damocles +pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box. +pkg_damocles_homepage = https://github.com/lostcolony/damocles +pkg_damocles_fetch = git +pkg_damocles_repo = https://github.com/lostcolony/damocles +pkg_damocles_commit = master + +PACKAGES += debbie +pkg_debbie_name = debbie +pkg_debbie_description = .DEB Built In Erlang +pkg_debbie_homepage = https://github.com/crownedgrouse/debbie +pkg_debbie_fetch = git +pkg_debbie_repo = https://github.com/crownedgrouse/debbie +pkg_debbie_commit = master + +PACKAGES += decimal +pkg_decimal_name = decimal +pkg_decimal_description = An Erlang decimal arithmetic library +pkg_decimal_homepage = https://github.com/tim/erlang-decimal +pkg_decimal_fetch = git +pkg_decimal_repo = https://github.com/tim/erlang-decimal +pkg_decimal_commit = master + +PACKAGES += detergent +pkg_detergent_name = detergent +pkg_detergent_description = An emulsifying Erlang SOAP library +pkg_detergent_homepage = https://github.com/devinus/detergent +pkg_detergent_fetch = git +pkg_detergent_repo = https://github.com/devinus/detergent +pkg_detergent_commit = master + +PACKAGES += detest +pkg_detest_name = detest +pkg_detest_description = Tool for running tests on a cluster of erlang nodes +pkg_detest_homepage = https://github.com/biokoda/detest +pkg_detest_fetch = git +pkg_detest_repo = https://github.com/biokoda/detest +pkg_detest_commit = master + +PACKAGES += dh_date +pkg_dh_date_name = dh_date +pkg_dh_date_description = Date formatting / parsing library for erlang +pkg_dh_date_homepage = https://github.com/daleharvey/dh_date +pkg_dh_date_fetch = git +pkg_dh_date_repo = https://github.com/daleharvey/dh_date +pkg_dh_date_commit = master + +PACKAGES += dhtcrawler +pkg_dhtcrawler_name = dhtcrawler +pkg_dhtcrawler_description = dhtcrawler is a DHT crawler written in erlang. It can join a DHT network and crawl many P2P torrents. +pkg_dhtcrawler_homepage = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_fetch = git +pkg_dhtcrawler_repo = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_commit = master + +PACKAGES += dirbusterl +pkg_dirbusterl_name = dirbusterl +pkg_dirbusterl_description = DirBuster successor in Erlang +pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_fetch = git +pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_commit = master + +PACKAGES += dispcount +pkg_dispcount_name = dispcount +pkg_dispcount_description = Erlang task dispatcher based on ETS counters. +pkg_dispcount_homepage = https://github.com/ferd/dispcount +pkg_dispcount_fetch = git +pkg_dispcount_repo = https://github.com/ferd/dispcount +pkg_dispcount_commit = master + +PACKAGES += dlhttpc +pkg_dlhttpc_name = dlhttpc +pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints +pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc +pkg_dlhttpc_fetch = git +pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc +pkg_dlhttpc_commit = master + +PACKAGES += dns +pkg_dns_name = dns +pkg_dns_description = Erlang DNS library +pkg_dns_homepage = https://github.com/aetrion/dns_erlang +pkg_dns_fetch = git +pkg_dns_repo = https://github.com/aetrion/dns_erlang +pkg_dns_commit = master + +PACKAGES += dnssd +pkg_dnssd_name = dnssd +pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation +pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_fetch = git +pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_commit = master + +PACKAGES += dtl +pkg_dtl_name = dtl +pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang. +pkg_dtl_homepage = https://github.com/oinksoft/dtl +pkg_dtl_fetch = git +pkg_dtl_repo = https://github.com/oinksoft/dtl +pkg_dtl_commit = master + +PACKAGES += dynamic_compile +pkg_dynamic_compile_name = dynamic_compile +pkg_dynamic_compile_description = compile and load erlang modules from string input +pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_fetch = git +pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_commit = master + +PACKAGES += e2 +pkg_e2_name = e2 +pkg_e2_description = Library to simply writing correct OTP applications. +pkg_e2_homepage = http://e2project.org +pkg_e2_fetch = git +pkg_e2_repo = https://github.com/gar1t/e2 +pkg_e2_commit = master + +PACKAGES += eamf +pkg_eamf_name = eamf +pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang +pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf +pkg_eamf_fetch = git +pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf +pkg_eamf_commit = master + +PACKAGES += eavro +pkg_eavro_name = eavro +pkg_eavro_description = Apache Avro encoder/decoder +pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_fetch = git +pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_commit = master + +PACKAGES += ecapnp +pkg_ecapnp_name = ecapnp +pkg_ecapnp_description = Cap'n Proto library for Erlang +pkg_ecapnp_homepage = https://github.com/kaos/ecapnp +pkg_ecapnp_fetch = git +pkg_ecapnp_repo = https://github.com/kaos/ecapnp +pkg_ecapnp_commit = master + +PACKAGES += econfig +pkg_econfig_name = econfig +pkg_econfig_description = simple Erlang config handler using INI files +pkg_econfig_homepage = https://github.com/benoitc/econfig +pkg_econfig_fetch = git +pkg_econfig_repo = https://github.com/benoitc/econfig +pkg_econfig_commit = master + +PACKAGES += edate +pkg_edate_name = edate +pkg_edate_description = date manipulation library for erlang +pkg_edate_homepage = https://github.com/dweldon/edate +pkg_edate_fetch = git +pkg_edate_repo = https://github.com/dweldon/edate +pkg_edate_commit = master + +PACKAGES += edgar +pkg_edgar_name = edgar +pkg_edgar_description = Erlang Does GNU AR +pkg_edgar_homepage = https://github.com/crownedgrouse/edgar +pkg_edgar_fetch = git +pkg_edgar_repo = https://github.com/crownedgrouse/edgar +pkg_edgar_commit = master + +PACKAGES += edis +pkg_edis_name = edis +pkg_edis_description = An Erlang implementation of Redis KV Store +pkg_edis_homepage = http://inaka.github.com/edis/ +pkg_edis_fetch = git +pkg_edis_repo = https://github.com/inaka/edis +pkg_edis_commit = master + +PACKAGES += edns +pkg_edns_name = edns +pkg_edns_description = Erlang/OTP DNS server +pkg_edns_homepage = https://github.com/hcvst/erlang-dns +pkg_edns_fetch = git +pkg_edns_repo = https://github.com/hcvst/erlang-dns +pkg_edns_commit = master + +PACKAGES += edown +pkg_edown_name = edown +pkg_edown_description = EDoc extension for generating Github-flavored Markdown +pkg_edown_homepage = https://github.com/uwiger/edown +pkg_edown_fetch = git +pkg_edown_repo = https://github.com/uwiger/edown +pkg_edown_commit = master + +PACKAGES += eep +pkg_eep_name = eep +pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy +pkg_eep_homepage = https://github.com/virtan/eep +pkg_eep_fetch = git +pkg_eep_repo = https://github.com/virtan/eep +pkg_eep_commit = master + +PACKAGES += eep_app +pkg_eep_app_name = eep_app +pkg_eep_app_description = Embedded Event Processing +pkg_eep_app_homepage = https://github.com/darach/eep-erl +pkg_eep_app_fetch = git +pkg_eep_app_repo = https://github.com/darach/eep-erl +pkg_eep_app_commit = master + +PACKAGES += efene +pkg_efene_name = efene +pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX +pkg_efene_homepage = https://github.com/efene/efene +pkg_efene_fetch = git +pkg_efene_repo = https://github.com/efene/efene +pkg_efene_commit = master + +PACKAGES += eganglia +pkg_eganglia_name = eganglia +pkg_eganglia_description = Erlang library to interact with Ganglia +pkg_eganglia_homepage = https://github.com/inaka/eganglia +pkg_eganglia_fetch = git +pkg_eganglia_repo = https://github.com/inaka/eganglia +pkg_eganglia_commit = v0.9.1 + +PACKAGES += egeoip +pkg_egeoip_name = egeoip +pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database. +pkg_egeoip_homepage = https://github.com/mochi/egeoip +pkg_egeoip_fetch = git +pkg_egeoip_repo = https://github.com/mochi/egeoip +pkg_egeoip_commit = master + +PACKAGES += ehsa +pkg_ehsa_name = ehsa +pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules +pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa +pkg_ehsa_fetch = hg +pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa +pkg_ehsa_commit = 2.0.4 + +PACKAGES += ej +pkg_ej_name = ej +pkg_ej_description = Helper module for working with Erlang terms representing JSON +pkg_ej_homepage = https://github.com/seth/ej +pkg_ej_fetch = git +pkg_ej_repo = https://github.com/seth/ej +pkg_ej_commit = master + +PACKAGES += ejabberd +pkg_ejabberd_name = ejabberd +pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform +pkg_ejabberd_homepage = https://github.com/processone/ejabberd +pkg_ejabberd_fetch = git +pkg_ejabberd_repo = https://github.com/processone/ejabberd +pkg_ejabberd_commit = master + +PACKAGES += ejwt +pkg_ejwt_name = ejwt +pkg_ejwt_description = erlang library for JSON Web Token +pkg_ejwt_homepage = https://github.com/artefactop/ejwt +pkg_ejwt_fetch = git +pkg_ejwt_repo = https://github.com/artefactop/ejwt +pkg_ejwt_commit = master + +PACKAGES += ekaf +pkg_ekaf_name = ekaf +pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang. +pkg_ekaf_homepage = https://github.com/helpshift/ekaf +pkg_ekaf_fetch = git +pkg_ekaf_repo = https://github.com/helpshift/ekaf +pkg_ekaf_commit = master + +PACKAGES += elarm +pkg_elarm_name = elarm +pkg_elarm_description = Alarm Manager for Erlang. +pkg_elarm_homepage = https://github.com/esl/elarm +pkg_elarm_fetch = git +pkg_elarm_repo = https://github.com/esl/elarm +pkg_elarm_commit = master + +PACKAGES += eleveldb +pkg_eleveldb_name = eleveldb +pkg_eleveldb_description = Erlang LevelDB API +pkg_eleveldb_homepage = https://github.com/basho/eleveldb +pkg_eleveldb_fetch = git +pkg_eleveldb_repo = https://github.com/basho/eleveldb +pkg_eleveldb_commit = master + +PACKAGES += elli +pkg_elli_name = elli +pkg_elli_description = Simple, robust and performant Erlang web server +pkg_elli_homepage = https://github.com/knutin/elli +pkg_elli_fetch = git +pkg_elli_repo = https://github.com/knutin/elli +pkg_elli_commit = master + +PACKAGES += elvis +pkg_elvis_name = elvis +pkg_elvis_description = Erlang Style Reviewer +pkg_elvis_homepage = https://github.com/inaka/elvis +pkg_elvis_fetch = git +pkg_elvis_repo = https://github.com/inaka/elvis +pkg_elvis_commit = 0.2.4 + +PACKAGES += emagick +pkg_emagick_name = emagick +pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool. +pkg_emagick_homepage = https://github.com/kivra/emagick +pkg_emagick_fetch = git +pkg_emagick_repo = https://github.com/kivra/emagick +pkg_emagick_commit = master + +PACKAGES += emysql +pkg_emysql_name = emysql +pkg_emysql_description = Stable, pure Erlang MySQL driver. +pkg_emysql_homepage = https://github.com/Eonblast/Emysql +pkg_emysql_fetch = git +pkg_emysql_repo = https://github.com/Eonblast/Emysql +pkg_emysql_commit = master + +PACKAGES += enm +pkg_enm_name = enm +pkg_enm_description = Erlang driver for nanomsg +pkg_enm_homepage = https://github.com/basho/enm +pkg_enm_fetch = git +pkg_enm_repo = https://github.com/basho/enm +pkg_enm_commit = master + +PACKAGES += entop +pkg_entop_name = entop +pkg_entop_description = A top-like tool for monitoring an Erlang node +pkg_entop_homepage = https://github.com/mazenharake/entop +pkg_entop_fetch = git +pkg_entop_repo = https://github.com/mazenharake/entop +pkg_entop_commit = master + +PACKAGES += epcap +pkg_epcap_name = epcap +pkg_epcap_description = Erlang packet capture interface using pcap +pkg_epcap_homepage = https://github.com/msantos/epcap +pkg_epcap_fetch = git +pkg_epcap_repo = https://github.com/msantos/epcap +pkg_epcap_commit = master + +PACKAGES += eper +pkg_eper_name = eper +pkg_eper_description = Erlang performance and debugging tools. +pkg_eper_homepage = https://github.com/massemanet/eper +pkg_eper_fetch = git +pkg_eper_repo = https://github.com/massemanet/eper +pkg_eper_commit = master + +PACKAGES += epgsql +pkg_epgsql_name = epgsql +pkg_epgsql_description = Erlang PostgreSQL client library. +pkg_epgsql_homepage = https://github.com/epgsql/epgsql +pkg_epgsql_fetch = git +pkg_epgsql_repo = https://github.com/epgsql/epgsql +pkg_epgsql_commit = master + +PACKAGES += episcina +pkg_episcina_name = episcina +pkg_episcina_description = A simple non intrusive resource pool for connections +pkg_episcina_homepage = https://github.com/erlware/episcina +pkg_episcina_fetch = git +pkg_episcina_repo = https://github.com/erlware/episcina +pkg_episcina_commit = master + +PACKAGES += eplot +pkg_eplot_name = eplot +pkg_eplot_description = A plot engine written in erlang. +pkg_eplot_homepage = https://github.com/psyeugenic/eplot +pkg_eplot_fetch = git +pkg_eplot_repo = https://github.com/psyeugenic/eplot +pkg_eplot_commit = master + +PACKAGES += epocxy +pkg_epocxy_name = epocxy +pkg_epocxy_description = Erlang Patterns of Concurrency +pkg_epocxy_homepage = https://github.com/duomark/epocxy +pkg_epocxy_fetch = git +pkg_epocxy_repo = https://github.com/duomark/epocxy +pkg_epocxy_commit = master + +PACKAGES += epubnub +pkg_epubnub_name = epubnub +pkg_epubnub_description = Erlang PubNub API +pkg_epubnub_homepage = https://github.com/tsloughter/epubnub +pkg_epubnub_fetch = git +pkg_epubnub_repo = https://github.com/tsloughter/epubnub +pkg_epubnub_commit = master + +PACKAGES += eqm +pkg_eqm_name = eqm +pkg_eqm_description = Erlang pub sub with supply-demand channels +pkg_eqm_homepage = https://github.com/loucash/eqm +pkg_eqm_fetch = git +pkg_eqm_repo = https://github.com/loucash/eqm +pkg_eqm_commit = master + +PACKAGES += eredis +pkg_eredis_name = eredis +pkg_eredis_description = Erlang Redis client +pkg_eredis_homepage = https://github.com/wooga/eredis +pkg_eredis_fetch = git +pkg_eredis_repo = https://github.com/wooga/eredis +pkg_eredis_commit = master + +PACKAGES += eredis_pool +pkg_eredis_pool_name = eredis_pool +pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy. +pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_fetch = git +pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_commit = master + +PACKAGES += erl_streams +pkg_erl_streams_name = erl_streams +pkg_erl_streams_description = Streams in Erlang +pkg_erl_streams_homepage = https://github.com/epappas/erl_streams +pkg_erl_streams_fetch = git +pkg_erl_streams_repo = https://github.com/epappas/erl_streams +pkg_erl_streams_commit = master + +PACKAGES += erlang_cep +pkg_erlang_cep_name = erlang_cep +pkg_erlang_cep_description = A basic CEP package written in erlang +pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_fetch = git +pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_commit = master + +PACKAGES += erlang_js +pkg_erlang_js_name = erlang_js +pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime. +pkg_erlang_js_homepage = https://github.com/basho/erlang_js +pkg_erlang_js_fetch = git +pkg_erlang_js_repo = https://github.com/basho/erlang_js +pkg_erlang_js_commit = master + +PACKAGES += erlang_localtime +pkg_erlang_localtime_name = erlang_localtime +pkg_erlang_localtime_description = Erlang library for conversion from one local time to another +pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_fetch = git +pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_commit = master + +PACKAGES += erlang_smtp +pkg_erlang_smtp_name = erlang_smtp +pkg_erlang_smtp_description = Erlang SMTP and POP3 server code. +pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_fetch = git +pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_commit = master + +PACKAGES += erlang_term +pkg_erlang_term_name = erlang_term +pkg_erlang_term_description = Erlang Term Info +pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term +pkg_erlang_term_fetch = git +pkg_erlang_term_repo = https://github.com/okeuday/erlang_term +pkg_erlang_term_commit = master + +PACKAGES += erlastic_search +pkg_erlastic_search_name = erlastic_search +pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface. +pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_fetch = git +pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_commit = master + +PACKAGES += erlasticsearch +pkg_erlasticsearch_name = erlasticsearch +pkg_erlasticsearch_description = Erlang thrift interface to elastic_search +pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_fetch = git +pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_commit = master + +PACKAGES += erlbrake +pkg_erlbrake_name = erlbrake +pkg_erlbrake_description = Erlang Airbrake notification client +pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake +pkg_erlbrake_fetch = git +pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake +pkg_erlbrake_commit = master + +PACKAGES += erlcloud +pkg_erlcloud_name = erlcloud +pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB) +pkg_erlcloud_homepage = https://github.com/gleber/erlcloud +pkg_erlcloud_fetch = git +pkg_erlcloud_repo = https://github.com/gleber/erlcloud +pkg_erlcloud_commit = master + +PACKAGES += erlcron +pkg_erlcron_name = erlcron +pkg_erlcron_description = Erlang cronish system +pkg_erlcron_homepage = https://github.com/erlware/erlcron +pkg_erlcron_fetch = git +pkg_erlcron_repo = https://github.com/erlware/erlcron +pkg_erlcron_commit = master + +PACKAGES += erldb +pkg_erldb_name = erldb +pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang +pkg_erldb_homepage = http://erldb.org +pkg_erldb_fetch = git +pkg_erldb_repo = https://github.com/erldb/erldb +pkg_erldb_commit = master + +PACKAGES += erldis +pkg_erldis_name = erldis +pkg_erldis_description = redis erlang client library +pkg_erldis_homepage = https://github.com/cstar/erldis +pkg_erldis_fetch = git +pkg_erldis_repo = https://github.com/cstar/erldis +pkg_erldis_commit = master + +PACKAGES += erldns +pkg_erldns_name = erldns +pkg_erldns_description = DNS server, in erlang. +pkg_erldns_homepage = https://github.com/aetrion/erl-dns +pkg_erldns_fetch = git +pkg_erldns_repo = https://github.com/aetrion/erl-dns +pkg_erldns_commit = master + +PACKAGES += erldocker +pkg_erldocker_name = erldocker +pkg_erldocker_description = Docker Remote API client for Erlang +pkg_erldocker_homepage = https://github.com/proger/erldocker +pkg_erldocker_fetch = git +pkg_erldocker_repo = https://github.com/proger/erldocker +pkg_erldocker_commit = master + +PACKAGES += erlfsmon +pkg_erlfsmon_name = erlfsmon +pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX +pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon +pkg_erlfsmon_fetch = git +pkg_erlfsmon_repo = https://github.com/proger/erlfsmon +pkg_erlfsmon_commit = master + +PACKAGES += erlgit +pkg_erlgit_name = erlgit +pkg_erlgit_description = Erlang convenience wrapper around git executable +pkg_erlgit_homepage = https://github.com/gleber/erlgit +pkg_erlgit_fetch = git +pkg_erlgit_repo = https://github.com/gleber/erlgit +pkg_erlgit_commit = master + +PACKAGES += erlguten +pkg_erlguten_name = erlguten +pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang. +pkg_erlguten_homepage = https://github.com/richcarl/erlguten +pkg_erlguten_fetch = git +pkg_erlguten_repo = https://github.com/richcarl/erlguten +pkg_erlguten_commit = master + +PACKAGES += erlmc +pkg_erlmc_name = erlmc +pkg_erlmc_description = Erlang memcached binary protocol client +pkg_erlmc_homepage = https://github.com/jkvor/erlmc +pkg_erlmc_fetch = git +pkg_erlmc_repo = https://github.com/jkvor/erlmc +pkg_erlmc_commit = master + +PACKAGES += erlmongo +pkg_erlmongo_name = erlmongo +pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support +pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_fetch = git +pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_commit = master + +PACKAGES += erlog +pkg_erlog_name = erlog +pkg_erlog_description = Prolog interpreter in and for Erlang +pkg_erlog_homepage = https://github.com/rvirding/erlog +pkg_erlog_fetch = git +pkg_erlog_repo = https://github.com/rvirding/erlog +pkg_erlog_commit = master + +PACKAGES += erlpass +pkg_erlpass_name = erlpass +pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever. +pkg_erlpass_homepage = https://github.com/ferd/erlpass +pkg_erlpass_fetch = git +pkg_erlpass_repo = https://github.com/ferd/erlpass +pkg_erlpass_commit = master + +PACKAGES += erlport +pkg_erlport_name = erlport +pkg_erlport_description = ErlPort - connect Erlang to other languages +pkg_erlport_homepage = https://github.com/hdima/erlport +pkg_erlport_fetch = git +pkg_erlport_repo = https://github.com/hdima/erlport +pkg_erlport_commit = master + +PACKAGES += erlsh +pkg_erlsh_name = erlsh +pkg_erlsh_description = Erlang shell tools +pkg_erlsh_homepage = https://github.com/proger/erlsh +pkg_erlsh_fetch = git +pkg_erlsh_repo = https://github.com/proger/erlsh +pkg_erlsh_commit = master + +PACKAGES += erlsha2 +pkg_erlsha2_name = erlsha2 +pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs. +pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2 +pkg_erlsha2_fetch = git +pkg_erlsha2_repo = https://github.com/vinoski/erlsha2 +pkg_erlsha2_commit = master + +PACKAGES += erlsom +pkg_erlsom_name = erlsom +pkg_erlsom_description = XML parser for Erlang +pkg_erlsom_homepage = https://github.com/willemdj/erlsom +pkg_erlsom_fetch = git +pkg_erlsom_repo = https://github.com/willemdj/erlsom +pkg_erlsom_commit = master + +PACKAGES += erlubi +pkg_erlubi_name = erlubi +pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer) +pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi +pkg_erlubi_fetch = git +pkg_erlubi_repo = https://github.com/krestenkrab/erlubi +pkg_erlubi_commit = master + +PACKAGES += erlvolt +pkg_erlvolt_name = erlvolt +pkg_erlvolt_description = VoltDB Erlang Client Driver +pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_fetch = git +pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_commit = master + +PACKAGES += erlware_commons +pkg_erlware_commons_name = erlware_commons +pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components. +pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons +pkg_erlware_commons_fetch = git +pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons +pkg_erlware_commons_commit = master + +PACKAGES += erlydtl +pkg_erlydtl_name = erlydtl +pkg_erlydtl_description = Django Template Language for Erlang. +pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl +pkg_erlydtl_fetch = git +pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl +pkg_erlydtl_commit = master + +PACKAGES += errd +pkg_errd_name = errd +pkg_errd_description = Erlang RRDTool library +pkg_errd_homepage = https://github.com/archaelus/errd +pkg_errd_fetch = git +pkg_errd_repo = https://github.com/archaelus/errd +pkg_errd_commit = master + +PACKAGES += erserve +pkg_erserve_name = erserve +pkg_erserve_description = Erlang/Rserve communication interface +pkg_erserve_homepage = https://github.com/del/erserve +pkg_erserve_fetch = git +pkg_erserve_repo = https://github.com/del/erserve +pkg_erserve_commit = master + +PACKAGES += erwa +pkg_erwa_name = erwa +pkg_erwa_description = A WAMP router and client written in Erlang. +pkg_erwa_homepage = https://github.com/bwegh/erwa +pkg_erwa_fetch = git +pkg_erwa_repo = https://github.com/bwegh/erwa +pkg_erwa_commit = 0.1.1 + +PACKAGES += espec +pkg_espec_name = espec +pkg_espec_description = ESpec: Behaviour driven development framework for Erlang +pkg_espec_homepage = https://github.com/lucaspiller/espec +pkg_espec_fetch = git +pkg_espec_repo = https://github.com/lucaspiller/espec +pkg_espec_commit = master + +PACKAGES += estatsd +pkg_estatsd_name = estatsd +pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite +pkg_estatsd_homepage = https://github.com/RJ/estatsd +pkg_estatsd_fetch = git +pkg_estatsd_repo = https://github.com/RJ/estatsd +pkg_estatsd_commit = master + +PACKAGES += etap +pkg_etap_name = etap +pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output. +pkg_etap_homepage = https://github.com/ngerakines/etap +pkg_etap_fetch = git +pkg_etap_repo = https://github.com/ngerakines/etap +pkg_etap_commit = master + +PACKAGES += etest +pkg_etest_name = etest +pkg_etest_description = A lightweight, convention over configuration test framework for Erlang +pkg_etest_homepage = https://github.com/wooga/etest +pkg_etest_fetch = git +pkg_etest_repo = https://github.com/wooga/etest +pkg_etest_commit = master + +PACKAGES += etest_http +pkg_etest_http_name = etest_http +pkg_etest_http_description = etest Assertions around HTTP (client-side) +pkg_etest_http_homepage = https://github.com/wooga/etest_http +pkg_etest_http_fetch = git +pkg_etest_http_repo = https://github.com/wooga/etest_http +pkg_etest_http_commit = master + +PACKAGES += etoml +pkg_etoml_name = etoml +pkg_etoml_description = TOML language erlang parser +pkg_etoml_homepage = https://github.com/kalta/etoml +pkg_etoml_fetch = git +pkg_etoml_repo = https://github.com/kalta/etoml +pkg_etoml_commit = master + +PACKAGES += eunit +pkg_eunit_name = eunit +pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. +pkg_eunit_homepage = https://github.com/richcarl/eunit +pkg_eunit_fetch = git +pkg_eunit_repo = https://github.com/richcarl/eunit +pkg_eunit_commit = master + +PACKAGES += eunit_formatters +pkg_eunit_formatters_name = eunit_formatters +pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better. +pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_fetch = git +pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_commit = master + +PACKAGES += euthanasia +pkg_euthanasia_name = euthanasia +pkg_euthanasia_description = Merciful killer for your Erlang processes +pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia +pkg_euthanasia_fetch = git +pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia +pkg_euthanasia_commit = master + +PACKAGES += evum +pkg_evum_name = evum +pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM +pkg_evum_homepage = https://github.com/msantos/evum +pkg_evum_fetch = git +pkg_evum_repo = https://github.com/msantos/evum +pkg_evum_commit = master + +PACKAGES += exec +pkg_exec_name = exec +pkg_exec_description = Execute and control OS processes from Erlang/OTP. +pkg_exec_homepage = http://saleyn.github.com/erlexec +pkg_exec_fetch = git +pkg_exec_repo = https://github.com/saleyn/erlexec +pkg_exec_commit = master + +PACKAGES += exml +pkg_exml_name = exml +pkg_exml_description = XML parsing library in Erlang +pkg_exml_homepage = https://github.com/paulgray/exml +pkg_exml_fetch = git +pkg_exml_repo = https://github.com/paulgray/exml +pkg_exml_commit = master + +PACKAGES += exometer +pkg_exometer_name = exometer +pkg_exometer_description = Basic measurement objects and probe behavior +pkg_exometer_homepage = https://github.com/Feuerlabs/exometer +pkg_exometer_fetch = git +pkg_exometer_repo = https://github.com/Feuerlabs/exometer +pkg_exometer_commit = 1.2 + +PACKAGES += exs1024 +pkg_exs1024_name = exs1024 +pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang. +pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024 +pkg_exs1024_fetch = git +pkg_exs1024_repo = https://github.com/jj1bdx/exs1024 +pkg_exs1024_commit = master + +PACKAGES += exs64 +pkg_exs64_name = exs64 +pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang. +pkg_exs64_homepage = https://github.com/jj1bdx/exs64 +pkg_exs64_fetch = git +pkg_exs64_repo = https://github.com/jj1bdx/exs64 +pkg_exs64_commit = master + +PACKAGES += exsplus116 +pkg_exsplus116_name = exsplus116 +pkg_exsplus116_description = Xorshift116plus for Erlang +pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_fetch = git +pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_commit = master + +PACKAGES += exsplus128 +pkg_exsplus128_name = exsplus128 +pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang. +pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_fetch = git +pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_commit = master + +PACKAGES += ezmq +pkg_ezmq_name = ezmq +pkg_ezmq_description = zMQ implemented in Erlang +pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq +pkg_ezmq_fetch = git +pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq +pkg_ezmq_commit = master + +PACKAGES += ezmtp +pkg_ezmtp_name = ezmtp +pkg_ezmtp_description = ZMTP protocol in pure Erlang. +pkg_ezmtp_homepage = https://github.com/a13x/ezmtp +pkg_ezmtp_fetch = git +pkg_ezmtp_repo = https://github.com/a13x/ezmtp +pkg_ezmtp_commit = master + +PACKAGES += fast_disk_log +pkg_fast_disk_log_name = fast_disk_log +pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger +pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_fetch = git +pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_commit = master + +PACKAGES += feeder +pkg_feeder_name = feeder +pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds. +pkg_feeder_homepage = https://github.com/michaelnisi/feeder +pkg_feeder_fetch = git +pkg_feeder_repo = https://github.com/michaelnisi/feeder +pkg_feeder_commit = v1.4.6 + +PACKAGES += fix +pkg_fix_name = fix +pkg_fix_description = http://fixprotocol.org/ implementation. +pkg_fix_homepage = https://github.com/maxlapshin/fix +pkg_fix_fetch = git +pkg_fix_repo = https://github.com/maxlapshin/fix +pkg_fix_commit = master + +PACKAGES += flower +pkg_flower_name = flower +pkg_flower_description = FlowER - a Erlang OpenFlow development platform +pkg_flower_homepage = https://github.com/travelping/flower +pkg_flower_fetch = git +pkg_flower_repo = https://github.com/travelping/flower +pkg_flower_commit = master + +PACKAGES += fn +pkg_fn_name = fn +pkg_fn_description = Function utilities for Erlang +pkg_fn_homepage = https://github.com/reiddraper/fn +pkg_fn_fetch = git +pkg_fn_repo = https://github.com/reiddraper/fn +pkg_fn_commit = master + +PACKAGES += folsom +pkg_folsom_name = folsom +pkg_folsom_description = Expose Erlang Events and Metrics +pkg_folsom_homepage = https://github.com/boundary/folsom +pkg_folsom_fetch = git +pkg_folsom_repo = https://github.com/boundary/folsom +pkg_folsom_commit = master + +PACKAGES += folsom_cowboy +pkg_folsom_cowboy_name = folsom_cowboy +pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper. +pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_fetch = git +pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_commit = master + +PACKAGES += folsomite +pkg_folsomite_name = folsomite +pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics +pkg_folsomite_homepage = https://github.com/campanja/folsomite +pkg_folsomite_fetch = git +pkg_folsomite_repo = https://github.com/campanja/folsomite +pkg_folsomite_commit = master + +PACKAGES += fs +pkg_fs_name = fs +pkg_fs_description = Erlang FileSystem Listener +pkg_fs_homepage = https://github.com/synrc/fs +pkg_fs_fetch = git +pkg_fs_repo = https://github.com/synrc/fs +pkg_fs_commit = master + +PACKAGES += fuse +pkg_fuse_name = fuse +pkg_fuse_description = A Circuit Breaker for Erlang +pkg_fuse_homepage = https://github.com/jlouis/fuse +pkg_fuse_fetch = git +pkg_fuse_repo = https://github.com/jlouis/fuse +pkg_fuse_commit = master + +PACKAGES += gcm +pkg_gcm_name = gcm +pkg_gcm_description = An Erlang application for Google Cloud Messaging +pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang +pkg_gcm_fetch = git +pkg_gcm_repo = https://github.com/pdincau/gcm-erlang +pkg_gcm_commit = master + +PACKAGES += gcprof +pkg_gcprof_name = gcprof +pkg_gcprof_description = Garbage Collection profiler for Erlang +pkg_gcprof_homepage = https://github.com/knutin/gcprof +pkg_gcprof_fetch = git +pkg_gcprof_repo = https://github.com/knutin/gcprof +pkg_gcprof_commit = master + +PACKAGES += geas +pkg_geas_name = geas +pkg_geas_description = Guess Erlang Application Scattering +pkg_geas_homepage = https://github.com/crownedgrouse/geas +pkg_geas_fetch = git +pkg_geas_repo = https://github.com/crownedgrouse/geas +pkg_geas_commit = master + +PACKAGES += geef +pkg_geef_name = geef +pkg_geef_description = Git NEEEEF (Erlang NIF) +pkg_geef_homepage = https://github.com/carlosmn/geef +pkg_geef_fetch = git +pkg_geef_repo = https://github.com/carlosmn/geef +pkg_geef_commit = master + +PACKAGES += gen_cycle +pkg_gen_cycle_name = gen_cycle +pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks +pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_fetch = git +pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_commit = develop + +PACKAGES += gen_icmp +pkg_gen_icmp_name = gen_icmp +pkg_gen_icmp_description = Erlang interface to ICMP sockets +pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp +pkg_gen_icmp_fetch = git +pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp +pkg_gen_icmp_commit = master + +PACKAGES += gen_nb_server +pkg_gen_nb_server_name = gen_nb_server +pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers +pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_fetch = git +pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_commit = master + +PACKAGES += gen_paxos +pkg_gen_paxos_name = gen_paxos +pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol +pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos +pkg_gen_paxos_fetch = git +pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos +pkg_gen_paxos_commit = master + +PACKAGES += gen_smtp +pkg_gen_smtp_name = gen_smtp +pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules +pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_fetch = git +pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_commit = master + +PACKAGES += gen_tracker +pkg_gen_tracker_name = gen_tracker +pkg_gen_tracker_description = supervisor with ets handling of children and their metadata +pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_fetch = git +pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_commit = master + +PACKAGES += gen_unix +pkg_gen_unix_name = gen_unix +pkg_gen_unix_description = Erlang Unix socket interface +pkg_gen_unix_homepage = https://github.com/msantos/gen_unix +pkg_gen_unix_fetch = git +pkg_gen_unix_repo = https://github.com/msantos/gen_unix +pkg_gen_unix_commit = master + +PACKAGES += getopt +pkg_getopt_name = getopt +pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax +pkg_getopt_homepage = https://github.com/jcomellas/getopt +pkg_getopt_fetch = git +pkg_getopt_repo = https://github.com/jcomellas/getopt +pkg_getopt_commit = master + +PACKAGES += gettext +pkg_gettext_name = gettext +pkg_gettext_description = Erlang internationalization library. +pkg_gettext_homepage = https://github.com/etnt/gettext +pkg_gettext_fetch = git +pkg_gettext_repo = https://github.com/etnt/gettext +pkg_gettext_commit = master + +PACKAGES += giallo +pkg_giallo_name = giallo +pkg_giallo_description = Small and flexible web framework on top of Cowboy +pkg_giallo_homepage = https://github.com/kivra/giallo +pkg_giallo_fetch = git +pkg_giallo_repo = https://github.com/kivra/giallo +pkg_giallo_commit = master + +PACKAGES += gin +pkg_gin_name = gin +pkg_gin_description = The guards and for Erlang parse_transform +pkg_gin_homepage = https://github.com/mad-cocktail/gin +pkg_gin_fetch = git +pkg_gin_repo = https://github.com/mad-cocktail/gin +pkg_gin_commit = master + +PACKAGES += gitty +pkg_gitty_name = gitty +pkg_gitty_description = Git access in erlang +pkg_gitty_homepage = https://github.com/maxlapshin/gitty +pkg_gitty_fetch = git +pkg_gitty_repo = https://github.com/maxlapshin/gitty +pkg_gitty_commit = master + +PACKAGES += gold_fever +pkg_gold_fever_name = gold_fever +pkg_gold_fever_description = A Treasure Hunt for Erlangers +pkg_gold_fever_homepage = https://github.com/inaka/gold_fever +pkg_gold_fever_fetch = git +pkg_gold_fever_repo = https://github.com/inaka/gold_fever +pkg_gold_fever_commit = master + +PACKAGES += gossiperl +pkg_gossiperl_name = gossiperl +pkg_gossiperl_description = Gossip middleware in Erlang +pkg_gossiperl_homepage = http://gossiperl.com/ +pkg_gossiperl_fetch = git +pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl +pkg_gossiperl_commit = master + +PACKAGES += gpb +pkg_gpb_name = gpb +pkg_gpb_description = A Google Protobuf implementation for Erlang +pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_fetch = git +pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_commit = master + +PACKAGES += gproc +pkg_gproc_name = gproc +pkg_gproc_description = Extended process registry for Erlang +pkg_gproc_homepage = https://github.com/uwiger/gproc +pkg_gproc_fetch = git +pkg_gproc_repo = https://github.com/uwiger/gproc +pkg_gproc_commit = master + +PACKAGES += grapherl +pkg_grapherl_name = grapherl +pkg_grapherl_description = Create graphs of Erlang systems and programs +pkg_grapherl_homepage = https://github.com/eproxus/grapherl +pkg_grapherl_fetch = git +pkg_grapherl_repo = https://github.com/eproxus/grapherl +pkg_grapherl_commit = master + +PACKAGES += gun +pkg_gun_name = gun +pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang. +pkg_gun_homepage = http//ninenines.eu +pkg_gun_fetch = git +pkg_gun_repo = https://github.com/ninenines/gun +pkg_gun_commit = master + +PACKAGES += gut +pkg_gut_name = gut +pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman +pkg_gut_homepage = https://github.com/unbalancedparentheses/gut +pkg_gut_fetch = git +pkg_gut_repo = https://github.com/unbalancedparentheses/gut +pkg_gut_commit = master + +PACKAGES += hackney +pkg_hackney_name = hackney +pkg_hackney_description = simple HTTP client in Erlang +pkg_hackney_homepage = https://github.com/benoitc/hackney +pkg_hackney_fetch = git +pkg_hackney_repo = https://github.com/benoitc/hackney +pkg_hackney_commit = master + +PACKAGES += hamcrest +pkg_hamcrest_name = hamcrest +pkg_hamcrest_description = Erlang port of Hamcrest +pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_fetch = git +pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_commit = master + +PACKAGES += hanoidb +pkg_hanoidb_name = hanoidb +pkg_hanoidb_description = Erlang LSM BTree Storage +pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_fetch = git +pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_commit = master + +PACKAGES += hottub +pkg_hottub_name = hottub +pkg_hottub_description = Permanent Erlang Worker Pool +pkg_hottub_homepage = https://github.com/bfrog/hottub +pkg_hottub_fetch = git +pkg_hottub_repo = https://github.com/bfrog/hottub +pkg_hottub_commit = master + +PACKAGES += hpack +pkg_hpack_name = hpack +pkg_hpack_description = HPACK Implementation for Erlang +pkg_hpack_homepage = https://github.com/joedevivo/hpack +pkg_hpack_fetch = git +pkg_hpack_repo = https://github.com/joedevivo/hpack +pkg_hpack_commit = master + +PACKAGES += hyper +pkg_hyper_name = hyper +pkg_hyper_description = Erlang implementation of HyperLogLog +pkg_hyper_homepage = https://github.com/GameAnalytics/hyper +pkg_hyper_fetch = git +pkg_hyper_repo = https://github.com/GameAnalytics/hyper +pkg_hyper_commit = master + +PACKAGES += ibrowse +pkg_ibrowse_name = ibrowse +pkg_ibrowse_description = Erlang HTTP client +pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_fetch = git +pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_commit = v4.1.1 + +PACKAGES += ierlang +pkg_ierlang_name = ierlang +pkg_ierlang_description = An Erlang language kernel for IPython. +pkg_ierlang_homepage = https://github.com/robbielynch/ierlang +pkg_ierlang_fetch = git +pkg_ierlang_repo = https://github.com/robbielynch/ierlang +pkg_ierlang_commit = master + +PACKAGES += iota +pkg_iota_name = iota +pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code +pkg_iota_homepage = https://github.com/jpgneves/iota +pkg_iota_fetch = git +pkg_iota_repo = https://github.com/jpgneves/iota +pkg_iota_commit = master + +PACKAGES += irc_lib +pkg_irc_lib_name = irc_lib +pkg_irc_lib_description = Erlang irc client library +pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_fetch = git +pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_commit = master + +PACKAGES += ircd +pkg_ircd_name = ircd +pkg_ircd_description = A pluggable IRC daemon application/library for Erlang. +pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd +pkg_ircd_fetch = git +pkg_ircd_repo = https://github.com/tonyg/erlang-ircd +pkg_ircd_commit = master + +PACKAGES += iris +pkg_iris_name = iris +pkg_iris_description = Iris Erlang binding +pkg_iris_homepage = https://github.com/project-iris/iris-erl +pkg_iris_fetch = git +pkg_iris_repo = https://github.com/project-iris/iris-erl +pkg_iris_commit = master + +PACKAGES += iso8601 +pkg_iso8601_name = iso8601 +pkg_iso8601_description = Erlang ISO 8601 date formatter/parser +pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_fetch = git +pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_commit = master + +PACKAGES += jamdb_sybase +pkg_jamdb_sybase_name = jamdb_sybase +pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE +pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_fetch = git +pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_commit = 0.6.0 + +PACKAGES += jerg +pkg_jerg_name = jerg +pkg_jerg_description = JSON Schema to Erlang Records Generator +pkg_jerg_homepage = https://github.com/ddossot/jerg +pkg_jerg_fetch = git +pkg_jerg_repo = https://github.com/ddossot/jerg +pkg_jerg_commit = master + +PACKAGES += jesse +pkg_jesse_name = jesse +pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang. +pkg_jesse_homepage = https://github.com/klarna/jesse +pkg_jesse_fetch = git +pkg_jesse_repo = https://github.com/klarna/jesse +pkg_jesse_commit = master + +PACKAGES += jiffy +pkg_jiffy_name = jiffy +pkg_jiffy_description = JSON NIFs for Erlang. +pkg_jiffy_homepage = https://github.com/davisp/jiffy +pkg_jiffy_fetch = git +pkg_jiffy_repo = https://github.com/davisp/jiffy +pkg_jiffy_commit = master + +PACKAGES += jiffy_v +pkg_jiffy_v_name = jiffy_v +pkg_jiffy_v_description = JSON validation utility +pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_fetch = git +pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_commit = 0.3.3 + +PACKAGES += jobs +pkg_jobs_name = jobs +pkg_jobs_description = a Job scheduler for load regulation +pkg_jobs_homepage = https://github.com/esl/jobs +pkg_jobs_fetch = git +pkg_jobs_repo = https://github.com/esl/jobs +pkg_jobs_commit = 0.3 + +PACKAGES += joxa +pkg_joxa_name = joxa +pkg_joxa_description = A Modern Lisp for the Erlang VM +pkg_joxa_homepage = https://github.com/joxa/joxa +pkg_joxa_fetch = git +pkg_joxa_repo = https://github.com/joxa/joxa +pkg_joxa_commit = master + +PACKAGES += json +pkg_json_name = json +pkg_json_description = a high level json library for erlang (17.0+) +pkg_json_homepage = https://github.com/talentdeficit/json +pkg_json_fetch = git +pkg_json_repo = https://github.com/talentdeficit/json +pkg_json_commit = master + +PACKAGES += json_rec +pkg_json_rec_name = json_rec +pkg_json_rec_description = JSON to erlang record +pkg_json_rec_homepage = https://github.com/justinkirby/json_rec +pkg_json_rec_fetch = git +pkg_json_rec_repo = https://github.com/justinkirby/json_rec +pkg_json_rec_commit = master + +PACKAGES += jsonerl +pkg_jsonerl_name = jsonerl +pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder +pkg_jsonerl_homepage = https://github.com/lambder/jsonerl +pkg_jsonerl_fetch = git +pkg_jsonerl_repo = https://github.com/lambder/jsonerl +pkg_jsonerl_commit = master + +PACKAGES += jsonpath +pkg_jsonpath_name = jsonpath +pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation +pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_fetch = git +pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_commit = master + +PACKAGES += jsonx +pkg_jsonx_name = jsonx +pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C. +pkg_jsonx_homepage = https://github.com/iskra/jsonx +pkg_jsonx_fetch = git +pkg_jsonx_repo = https://github.com/iskra/jsonx +pkg_jsonx_commit = master + +PACKAGES += jsx +pkg_jsx_name = jsx +pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON. +pkg_jsx_homepage = https://github.com/talentdeficit/jsx +pkg_jsx_fetch = git +pkg_jsx_repo = https://github.com/talentdeficit/jsx +pkg_jsx_commit = master + +PACKAGES += kafka +pkg_kafka_name = kafka +pkg_kafka_description = Kafka consumer and producer in Erlang +pkg_kafka_homepage = https://github.com/wooga/kafka-erlang +pkg_kafka_fetch = git +pkg_kafka_repo = https://github.com/wooga/kafka-erlang +pkg_kafka_commit = master + +PACKAGES += kai +pkg_kai_name = kai +pkg_kai_description = DHT storage by Takeshi Inoue +pkg_kai_homepage = https://github.com/synrc/kai +pkg_kai_fetch = git +pkg_kai_repo = https://github.com/synrc/kai +pkg_kai_commit = master + +PACKAGES += katja +pkg_katja_name = katja +pkg_katja_description = A simple Riemann client written in Erlang. +pkg_katja_homepage = https://github.com/nifoc/katja +pkg_katja_fetch = git +pkg_katja_repo = https://github.com/nifoc/katja +pkg_katja_commit = master + +PACKAGES += kdht +pkg_kdht_name = kdht +pkg_kdht_description = kdht is an erlang DHT implementation +pkg_kdht_homepage = https://github.com/kevinlynx/kdht +pkg_kdht_fetch = git +pkg_kdht_repo = https://github.com/kevinlynx/kdht +pkg_kdht_commit = master + +PACKAGES += key2value +pkg_key2value_name = key2value +pkg_key2value_description = Erlang 2-way map +pkg_key2value_homepage = https://github.com/okeuday/key2value +pkg_key2value_fetch = git +pkg_key2value_repo = https://github.com/okeuday/key2value +pkg_key2value_commit = master + +PACKAGES += keys1value +pkg_keys1value_name = keys1value +pkg_keys1value_description = Erlang set associative map for key lists +pkg_keys1value_homepage = https://github.com/okeuday/keys1value +pkg_keys1value_fetch = git +pkg_keys1value_repo = https://github.com/okeuday/keys1value +pkg_keys1value_commit = master + +PACKAGES += kinetic +pkg_kinetic_name = kinetic +pkg_kinetic_description = Erlang Kinesis Client +pkg_kinetic_homepage = https://github.com/AdRoll/kinetic +pkg_kinetic_fetch = git +pkg_kinetic_repo = https://github.com/AdRoll/kinetic +pkg_kinetic_commit = master + +PACKAGES += kjell +pkg_kjell_name = kjell +pkg_kjell_description = Erlang Shell +pkg_kjell_homepage = https://github.com/karlll/kjell +pkg_kjell_fetch = git +pkg_kjell_repo = https://github.com/karlll/kjell +pkg_kjell_commit = master + +PACKAGES += kraken +pkg_kraken_name = kraken +pkg_kraken_description = Distributed Pubsub Server for Realtime Apps +pkg_kraken_homepage = https://github.com/Asana/kraken +pkg_kraken_fetch = git +pkg_kraken_repo = https://github.com/Asana/kraken +pkg_kraken_commit = master + +PACKAGES += kucumberl +pkg_kucumberl_name = kucumberl +pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber +pkg_kucumberl_homepage = https://github.com/openshine/kucumberl +pkg_kucumberl_fetch = git +pkg_kucumberl_repo = https://github.com/openshine/kucumberl +pkg_kucumberl_commit = master + +PACKAGES += kvc +pkg_kvc_name = kvc +pkg_kvc_description = KVC - Key Value Coding for Erlang data structures +pkg_kvc_homepage = https://github.com/etrepum/kvc +pkg_kvc_fetch = git +pkg_kvc_repo = https://github.com/etrepum/kvc +pkg_kvc_commit = master + +PACKAGES += kvlists +pkg_kvlists_name = kvlists +pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang +pkg_kvlists_homepage = https://github.com/jcomellas/kvlists +pkg_kvlists_fetch = git +pkg_kvlists_repo = https://github.com/jcomellas/kvlists +pkg_kvlists_commit = master + +PACKAGES += kvs +pkg_kvs_name = kvs +pkg_kvs_description = Container and Iterator +pkg_kvs_homepage = https://github.com/synrc/kvs +pkg_kvs_fetch = git +pkg_kvs_repo = https://github.com/synrc/kvs +pkg_kvs_commit = master + +PACKAGES += lager +pkg_lager_name = lager +pkg_lager_description = A logging framework for Erlang/OTP. +pkg_lager_homepage = https://github.com/basho/lager +pkg_lager_fetch = git +pkg_lager_repo = https://github.com/basho/lager +pkg_lager_commit = master + +PACKAGES += lager_amqp_backend +pkg_lager_amqp_backend_name = lager_amqp_backend +pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend +pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_fetch = git +pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_commit = master + +PACKAGES += lager_syslog +pkg_lager_syslog_name = lager_syslog +pkg_lager_syslog_description = Syslog backend for lager +pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog +pkg_lager_syslog_fetch = git +pkg_lager_syslog_repo = https://github.com/basho/lager_syslog +pkg_lager_syslog_commit = master + +PACKAGES += lambdapad +pkg_lambdapad_name = lambdapad +pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang. +pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad +pkg_lambdapad_fetch = git +pkg_lambdapad_repo = https://github.com/gar1t/lambdapad +pkg_lambdapad_commit = master + +PACKAGES += lasp +pkg_lasp_name = lasp +pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations +pkg_lasp_homepage = http://lasp-lang.org/ +pkg_lasp_fetch = git +pkg_lasp_repo = https://github.com/lasp-lang/lasp +pkg_lasp_commit = master + +PACKAGES += lasse +pkg_lasse_name = lasse +pkg_lasse_description = SSE handler for Cowboy +pkg_lasse_homepage = https://github.com/inaka/lasse +pkg_lasse_fetch = git +pkg_lasse_repo = https://github.com/inaka/lasse +pkg_lasse_commit = 0.1.0 + +PACKAGES += ldap +pkg_ldap_name = ldap +pkg_ldap_description = LDAP server written in Erlang +pkg_ldap_homepage = https://github.com/spawnproc/ldap +pkg_ldap_fetch = git +pkg_ldap_repo = https://github.com/spawnproc/ldap +pkg_ldap_commit = master + +PACKAGES += lethink +pkg_lethink_name = lethink +pkg_lethink_description = erlang driver for rethinkdb +pkg_lethink_homepage = https://github.com/taybin/lethink +pkg_lethink_fetch = git +pkg_lethink_repo = https://github.com/taybin/lethink +pkg_lethink_commit = master + +PACKAGES += lfe +pkg_lfe_name = lfe +pkg_lfe_description = Lisp Flavoured Erlang (LFE) +pkg_lfe_homepage = https://github.com/rvirding/lfe +pkg_lfe_fetch = git +pkg_lfe_repo = https://github.com/rvirding/lfe +pkg_lfe_commit = master + +PACKAGES += ling +pkg_ling_name = ling +pkg_ling_description = Erlang on Xen +pkg_ling_homepage = https://github.com/cloudozer/ling +pkg_ling_fetch = git +pkg_ling_repo = https://github.com/cloudozer/ling +pkg_ling_commit = master + +PACKAGES += live +pkg_live_name = live +pkg_live_description = Automated module and configuration reloader. +pkg_live_homepage = http://ninenines.eu +pkg_live_fetch = git +pkg_live_repo = https://github.com/ninenines/live +pkg_live_commit = master + +PACKAGES += lmq +pkg_lmq_name = lmq +pkg_lmq_description = Lightweight Message Queue +pkg_lmq_homepage = https://github.com/iij/lmq +pkg_lmq_fetch = git +pkg_lmq_repo = https://github.com/iij/lmq +pkg_lmq_commit = master + +PACKAGES += locker +pkg_locker_name = locker +pkg_locker_description = Atomic distributed 'check and set' for short-lived keys +pkg_locker_homepage = https://github.com/wooga/locker +pkg_locker_fetch = git +pkg_locker_repo = https://github.com/wooga/locker +pkg_locker_commit = master + +PACKAGES += locks +pkg_locks_name = locks +pkg_locks_description = A scalable, deadlock-resolving resource locker +pkg_locks_homepage = https://github.com/uwiger/locks +pkg_locks_fetch = git +pkg_locks_repo = https://github.com/uwiger/locks +pkg_locks_commit = master + +PACKAGES += log4erl +pkg_log4erl_name = log4erl +pkg_log4erl_description = A logger for erlang in the spirit of Log4J. +pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl +pkg_log4erl_fetch = git +pkg_log4erl_repo = https://github.com/ahmednawras/log4erl +pkg_log4erl_commit = master + +PACKAGES += lol +pkg_lol_name = lol +pkg_lol_description = Lisp on erLang, and programming is fun again +pkg_lol_homepage = https://github.com/b0oh/lol +pkg_lol_fetch = git +pkg_lol_repo = https://github.com/b0oh/lol +pkg_lol_commit = master + +PACKAGES += lucid +pkg_lucid_name = lucid +pkg_lucid_description = HTTP/2 server written in Erlang +pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid +pkg_lucid_fetch = git +pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid +pkg_lucid_commit = master + +PACKAGES += luerl +pkg_luerl_name = luerl +pkg_luerl_description = Lua in Erlang +pkg_luerl_homepage = https://github.com/rvirding/luerl +pkg_luerl_fetch = git +pkg_luerl_repo = https://github.com/rvirding/luerl +pkg_luerl_commit = develop + +PACKAGES += luwak +pkg_luwak_name = luwak +pkg_luwak_description = Large-object storage interface for Riak +pkg_luwak_homepage = https://github.com/basho/luwak +pkg_luwak_fetch = git +pkg_luwak_repo = https://github.com/basho/luwak +pkg_luwak_commit = master + +PACKAGES += lux +pkg_lux_name = lux +pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands +pkg_lux_homepage = https://github.com/hawk/lux +pkg_lux_fetch = git +pkg_lux_repo = https://github.com/hawk/lux +pkg_lux_commit = master + +PACKAGES += machi +pkg_machi_name = machi +pkg_machi_description = Machi file store +pkg_machi_homepage = https://github.com/basho/machi +pkg_machi_fetch = git +pkg_machi_repo = https://github.com/basho/machi +pkg_machi_commit = master + +PACKAGES += mad +pkg_mad_name = mad +pkg_mad_description = Small and Fast Rebar Replacement +pkg_mad_homepage = https://github.com/synrc/mad +pkg_mad_fetch = git +pkg_mad_repo = https://github.com/synrc/mad +pkg_mad_commit = master + +PACKAGES += marina +pkg_marina_name = marina +pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client +pkg_marina_homepage = https://github.com/lpgauth/marina +pkg_marina_fetch = git +pkg_marina_repo = https://github.com/lpgauth/marina +pkg_marina_commit = master + +PACKAGES += mavg +pkg_mavg_name = mavg +pkg_mavg_description = Erlang :: Exponential moving average library +pkg_mavg_homepage = https://github.com/EchoTeam/mavg +pkg_mavg_fetch = git +pkg_mavg_repo = https://github.com/EchoTeam/mavg +pkg_mavg_commit = master + +PACKAGES += mc_erl +pkg_mc_erl_name = mc_erl +pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. +pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl +pkg_mc_erl_fetch = git +pkg_mc_erl_repo = https://github.com/clonejo/mc-erl +pkg_mc_erl_commit = master + +PACKAGES += mcd +pkg_mcd_name = mcd +pkg_mcd_description = Fast memcached protocol client in pure Erlang +pkg_mcd_homepage = https://github.com/EchoTeam/mcd +pkg_mcd_fetch = git +pkg_mcd_repo = https://github.com/EchoTeam/mcd +pkg_mcd_commit = master + +PACKAGES += mcerlang +pkg_mcerlang_name = mcerlang +pkg_mcerlang_description = The McErlang model checker for Erlang +pkg_mcerlang_homepage = https://github.com/fredlund/McErlang +pkg_mcerlang_fetch = git +pkg_mcerlang_repo = https://github.com/fredlund/McErlang +pkg_mcerlang_commit = master + +PACKAGES += meck +pkg_meck_name = meck +pkg_meck_description = A mocking library for Erlang +pkg_meck_homepage = https://github.com/eproxus/meck +pkg_meck_fetch = git +pkg_meck_repo = https://github.com/eproxus/meck +pkg_meck_commit = master + +PACKAGES += mekao +pkg_mekao_name = mekao +pkg_mekao_description = SQL constructor +pkg_mekao_homepage = https://github.com/ddosia/mekao +pkg_mekao_fetch = git +pkg_mekao_repo = https://github.com/ddosia/mekao +pkg_mekao_commit = master + +PACKAGES += memo +pkg_memo_name = memo +pkg_memo_description = Erlang memoization server +pkg_memo_homepage = https://github.com/tuncer/memo +pkg_memo_fetch = git +pkg_memo_repo = https://github.com/tuncer/memo +pkg_memo_commit = master + +PACKAGES += merge_index +pkg_merge_index_name = merge_index +pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop). +pkg_merge_index_homepage = https://github.com/basho/merge_index +pkg_merge_index_fetch = git +pkg_merge_index_repo = https://github.com/basho/merge_index +pkg_merge_index_commit = master + +PACKAGES += merl +pkg_merl_name = merl +pkg_merl_description = Metaprogramming in Erlang +pkg_merl_homepage = https://github.com/richcarl/merl +pkg_merl_fetch = git +pkg_merl_repo = https://github.com/richcarl/merl +pkg_merl_commit = master + +PACKAGES += mimetypes +pkg_mimetypes_name = mimetypes +pkg_mimetypes_description = Erlang MIME types library +pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes +pkg_mimetypes_fetch = git +pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes +pkg_mimetypes_commit = master + +PACKAGES += mixer +pkg_mixer_name = mixer +pkg_mixer_description = Mix in functions from other modules +pkg_mixer_homepage = https://github.com/chef/mixer +pkg_mixer_fetch = git +pkg_mixer_repo = https://github.com/chef/mixer +pkg_mixer_commit = master + +PACKAGES += mochiweb +pkg_mochiweb_name = mochiweb +pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers. +pkg_mochiweb_homepage = https://github.com/mochi/mochiweb +pkg_mochiweb_fetch = git +pkg_mochiweb_repo = https://github.com/mochi/mochiweb +pkg_mochiweb_commit = master + +PACKAGES += mochiweb_xpath +pkg_mochiweb_xpath_name = mochiweb_xpath +pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser +pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_fetch = git +pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_commit = master + +PACKAGES += mockgyver +pkg_mockgyver_name = mockgyver +pkg_mockgyver_description = A mocking library for Erlang +pkg_mockgyver_homepage = https://github.com/klajo/mockgyver +pkg_mockgyver_fetch = git +pkg_mockgyver_repo = https://github.com/klajo/mockgyver +pkg_mockgyver_commit = master + +PACKAGES += modlib +pkg_modlib_name = modlib +pkg_modlib_description = Web framework based on Erlang's inets httpd +pkg_modlib_homepage = https://github.com/gar1t/modlib +pkg_modlib_fetch = git +pkg_modlib_repo = https://github.com/gar1t/modlib +pkg_modlib_commit = master + +PACKAGES += mongodb +pkg_mongodb_name = mongodb +pkg_mongodb_description = MongoDB driver for Erlang +pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_fetch = git +pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_commit = master + +PACKAGES += mongooseim +pkg_mongooseim_name = mongooseim +pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions +pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform +pkg_mongooseim_fetch = git +pkg_mongooseim_repo = https://github.com/esl/MongooseIM +pkg_mongooseim_commit = master + +PACKAGES += moyo +pkg_moyo_name = moyo +pkg_moyo_description = Erlang utility functions library +pkg_moyo_homepage = https://github.com/dwango/moyo +pkg_moyo_fetch = git +pkg_moyo_repo = https://github.com/dwango/moyo +pkg_moyo_commit = master + +PACKAGES += msgpack +pkg_msgpack_name = msgpack +pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang +pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_fetch = git +pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_commit = master + +PACKAGES += mu2 +pkg_mu2_name = mu2 +pkg_mu2_description = Erlang mutation testing tool +pkg_mu2_homepage = https://github.com/ramsay-t/mu2 +pkg_mu2_fetch = git +pkg_mu2_repo = https://github.com/ramsay-t/mu2 +pkg_mu2_commit = master + +PACKAGES += mustache +pkg_mustache_name = mustache +pkg_mustache_description = Mustache template engine for Erlang. +pkg_mustache_homepage = https://github.com/mojombo/mustache.erl +pkg_mustache_fetch = git +pkg_mustache_repo = https://github.com/mojombo/mustache.erl +pkg_mustache_commit = master + +PACKAGES += myproto +pkg_myproto_name = myproto +pkg_myproto_description = MySQL Server Protocol in Erlang +pkg_myproto_homepage = https://github.com/altenwald/myproto +pkg_myproto_fetch = git +pkg_myproto_repo = https://github.com/altenwald/myproto +pkg_myproto_commit = master + +PACKAGES += mysql +pkg_mysql_name = mysql +pkg_mysql_description = Erlang MySQL Driver (from code.google.com) +pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_fetch = git +pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_commit = master + +PACKAGES += n2o +pkg_n2o_name = n2o +pkg_n2o_description = WebSocket Application Server +pkg_n2o_homepage = https://github.com/5HT/n2o +pkg_n2o_fetch = git +pkg_n2o_repo = https://github.com/5HT/n2o +pkg_n2o_commit = master + +PACKAGES += nat_upnp +pkg_nat_upnp_name = nat_upnp +pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD +pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_fetch = git +pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_commit = master + +PACKAGES += neo4j +pkg_neo4j_name = neo4j +pkg_neo4j_description = Erlang client library for Neo4J. +pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_fetch = git +pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_commit = master + +PACKAGES += neotoma +pkg_neotoma_name = neotoma +pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars. +pkg_neotoma_homepage = https://github.com/seancribbs/neotoma +pkg_neotoma_fetch = git +pkg_neotoma_repo = https://github.com/seancribbs/neotoma +pkg_neotoma_commit = master + +PACKAGES += newrelic +pkg_newrelic_name = newrelic +pkg_newrelic_description = Erlang library for sending metrics to New Relic +pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang +pkg_newrelic_fetch = git +pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang +pkg_newrelic_commit = master + +PACKAGES += nifty +pkg_nifty_name = nifty +pkg_nifty_description = Erlang NIF wrapper generator +pkg_nifty_homepage = https://github.com/parapluu/nifty +pkg_nifty_fetch = git +pkg_nifty_repo = https://github.com/parapluu/nifty +pkg_nifty_commit = master + +PACKAGES += nitrogen_core +pkg_nitrogen_core_name = nitrogen_core +pkg_nitrogen_core_description = The core Nitrogen library. +pkg_nitrogen_core_homepage = http://nitrogenproject.com/ +pkg_nitrogen_core_fetch = git +pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core +pkg_nitrogen_core_commit = master + +PACKAGES += nkbase +pkg_nkbase_name = nkbase +pkg_nkbase_description = NkBASE distributed database +pkg_nkbase_homepage = https://github.com/Nekso/nkbase +pkg_nkbase_fetch = git +pkg_nkbase_repo = https://github.com/Nekso/nkbase +pkg_nkbase_commit = develop + +PACKAGES += nkdocker +pkg_nkdocker_name = nkdocker +pkg_nkdocker_description = Erlang Docker client +pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker +pkg_nkdocker_fetch = git +pkg_nkdocker_repo = https://github.com/Nekso/nkdocker +pkg_nkdocker_commit = master + +PACKAGES += nkpacket +pkg_nkpacket_name = nkpacket +pkg_nkpacket_description = Generic Erlang transport layer +pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket +pkg_nkpacket_fetch = git +pkg_nkpacket_repo = https://github.com/Nekso/nkpacket +pkg_nkpacket_commit = master + +PACKAGES += nksip +pkg_nksip_name = nksip +pkg_nksip_description = Erlang SIP application server +pkg_nksip_homepage = https://github.com/kalta/nksip +pkg_nksip_fetch = git +pkg_nksip_repo = https://github.com/kalta/nksip +pkg_nksip_commit = master + +PACKAGES += nodefinder +pkg_nodefinder_name = nodefinder +pkg_nodefinder_description = automatic node discovery via UDP multicast +pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder +pkg_nodefinder_fetch = git +pkg_nodefinder_repo = https://github.com/okeuday/nodefinder +pkg_nodefinder_commit = master + +PACKAGES += nprocreg +pkg_nprocreg_name = nprocreg +pkg_nprocreg_description = Minimal Distributed Erlang Process Registry +pkg_nprocreg_homepage = http://nitrogenproject.com/ +pkg_nprocreg_fetch = git +pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg +pkg_nprocreg_commit = master + +PACKAGES += oauth +pkg_oauth_name = oauth +pkg_oauth_description = An Erlang OAuth 1.0 implementation +pkg_oauth_homepage = https://github.com/tim/erlang-oauth +pkg_oauth_fetch = git +pkg_oauth_repo = https://github.com/tim/erlang-oauth +pkg_oauth_commit = master + +PACKAGES += oauth2 +pkg_oauth2_name = oauth2 +pkg_oauth2_description = Erlang Oauth2 implementation +pkg_oauth2_homepage = https://github.com/kivra/oauth2 +pkg_oauth2_fetch = git +pkg_oauth2_repo = https://github.com/kivra/oauth2 +pkg_oauth2_commit = master + +PACKAGES += oauth2c +pkg_oauth2c_name = oauth2c +pkg_oauth2c_description = Erlang OAuth2 Client +pkg_oauth2c_homepage = https://github.com/kivra/oauth2_client +pkg_oauth2c_fetch = git +pkg_oauth2c_repo = https://github.com/kivra/oauth2_client +pkg_oauth2c_commit = master + +PACKAGES += octopus +pkg_octopus_name = octopus +pkg_octopus_description = Small and flexible pool manager written in Erlang +pkg_octopus_homepage = https://github.com/erlangbureau/octopus +pkg_octopus_fetch = git +pkg_octopus_repo = https://github.com/erlangbureau/octopus +pkg_octopus_commit = 1.0.0 + +PACKAGES += of_protocol +pkg_of_protocol_name = of_protocol +pkg_of_protocol_description = OpenFlow Protocol Library for Erlang +pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_fetch = git +pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_commit = master + +PACKAGES += opencouch +pkg_opencouch_name = couch +pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB +pkg_opencouch_homepage = https://github.com/benoitc/opencouch +pkg_opencouch_fetch = git +pkg_opencouch_repo = https://github.com/benoitc/opencouch +pkg_opencouch_commit = master + +PACKAGES += openflow +pkg_openflow_name = openflow +pkg_openflow_description = An OpenFlow controller written in pure erlang +pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_fetch = git +pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_commit = master + +PACKAGES += openid +pkg_openid_name = openid +pkg_openid_description = Erlang OpenID +pkg_openid_homepage = https://github.com/brendonh/erl_openid +pkg_openid_fetch = git +pkg_openid_repo = https://github.com/brendonh/erl_openid +pkg_openid_commit = master + +PACKAGES += openpoker +pkg_openpoker_name = openpoker +pkg_openpoker_description = Genesis Texas hold'em Game Server +pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker +pkg_openpoker_fetch = git +pkg_openpoker_repo = https://github.com/hpyhacking/openpoker +pkg_openpoker_commit = master + +PACKAGES += pal +pkg_pal_name = pal +pkg_pal_description = Pragmatic Authentication Library +pkg_pal_homepage = https://github.com/manifest/pal +pkg_pal_fetch = git +pkg_pal_repo = https://github.com/manifest/pal +pkg_pal_commit = master + +PACKAGES += parse_trans +pkg_parse_trans_name = parse_trans +pkg_parse_trans_description = Parse transform utilities for Erlang +pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans +pkg_parse_trans_fetch = git +pkg_parse_trans_repo = https://github.com/uwiger/parse_trans +pkg_parse_trans_commit = master + +PACKAGES += parsexml +pkg_parsexml_name = parsexml +pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API +pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml +pkg_parsexml_fetch = git +pkg_parsexml_repo = https://github.com/maxlapshin/parsexml +pkg_parsexml_commit = master + +PACKAGES += pegjs +pkg_pegjs_name = pegjs +pkg_pegjs_description = An implementation of PEG.js grammar for Erlang. +pkg_pegjs_homepage = https://github.com/dmitriid/pegjs +pkg_pegjs_fetch = git +pkg_pegjs_repo = https://github.com/dmitriid/pegjs +pkg_pegjs_commit = 0.3 + +PACKAGES += percept2 +pkg_percept2_name = percept2 +pkg_percept2_description = Concurrent profiling tool for Erlang +pkg_percept2_homepage = https://github.com/huiqing/percept2 +pkg_percept2_fetch = git +pkg_percept2_repo = https://github.com/huiqing/percept2 +pkg_percept2_commit = master + +PACKAGES += pgsql +pkg_pgsql_name = pgsql +pkg_pgsql_description = Erlang PostgreSQL driver +pkg_pgsql_homepage = https://github.com/semiocast/pgsql +pkg_pgsql_fetch = git +pkg_pgsql_repo = https://github.com/semiocast/pgsql +pkg_pgsql_commit = master + +PACKAGES += pkgx +pkg_pkgx_name = pkgx +pkg_pkgx_description = Build .deb packages from Erlang releases +pkg_pkgx_homepage = https://github.com/arjan/pkgx +pkg_pkgx_fetch = git +pkg_pkgx_repo = https://github.com/arjan/pkgx +pkg_pkgx_commit = master + +PACKAGES += pkt +pkg_pkt_name = pkt +pkg_pkt_description = Erlang network protocol library +pkg_pkt_homepage = https://github.com/msantos/pkt +pkg_pkt_fetch = git +pkg_pkt_repo = https://github.com/msantos/pkt +pkg_pkt_commit = master + +PACKAGES += plain_fsm +pkg_plain_fsm_name = plain_fsm +pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs. +pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_fetch = git +pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_commit = master + +PACKAGES += plumtree +pkg_plumtree_name = plumtree +pkg_plumtree_description = Epidemic Broadcast Trees +pkg_plumtree_homepage = https://github.com/helium/plumtree +pkg_plumtree_fetch = git +pkg_plumtree_repo = https://github.com/helium/plumtree +pkg_plumtree_commit = master + +PACKAGES += pmod_transform +pkg_pmod_transform_name = pmod_transform +pkg_pmod_transform_description = Parse transform for parameterized modules +pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform +pkg_pmod_transform_fetch = git +pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform +pkg_pmod_transform_commit = master + +PACKAGES += pobox +pkg_pobox_name = pobox +pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang +pkg_pobox_homepage = https://github.com/ferd/pobox +pkg_pobox_fetch = git +pkg_pobox_repo = https://github.com/ferd/pobox +pkg_pobox_commit = master + +PACKAGES += ponos +pkg_ponos_name = ponos +pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang +pkg_ponos_homepage = https://github.com/klarna/ponos +pkg_ponos_fetch = git +pkg_ponos_repo = https://github.com/klarna/ponos +pkg_ponos_commit = master + +PACKAGES += poolboy +pkg_poolboy_name = poolboy +pkg_poolboy_description = A hunky Erlang worker pool factory +pkg_poolboy_homepage = https://github.com/devinus/poolboy +pkg_poolboy_fetch = git +pkg_poolboy_repo = https://github.com/devinus/poolboy +pkg_poolboy_commit = master + +PACKAGES += pooler +pkg_pooler_name = pooler +pkg_pooler_description = An OTP Process Pool Application +pkg_pooler_homepage = https://github.com/seth/pooler +pkg_pooler_fetch = git +pkg_pooler_repo = https://github.com/seth/pooler +pkg_pooler_commit = master + +PACKAGES += pqueue +pkg_pqueue_name = pqueue +pkg_pqueue_description = Erlang Priority Queues +pkg_pqueue_homepage = https://github.com/okeuday/pqueue +pkg_pqueue_fetch = git +pkg_pqueue_repo = https://github.com/okeuday/pqueue +pkg_pqueue_commit = master + +PACKAGES += procket +pkg_procket_name = procket +pkg_procket_description = Erlang interface to low level socket operations +pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket +pkg_procket_fetch = git +pkg_procket_repo = https://github.com/msantos/procket +pkg_procket_commit = master + +PACKAGES += prop +pkg_prop_name = prop +pkg_prop_description = An Erlang code scaffolding and generator system. +pkg_prop_homepage = https://github.com/nuex/prop +pkg_prop_fetch = git +pkg_prop_repo = https://github.com/nuex/prop +pkg_prop_commit = master + +PACKAGES += proper +pkg_proper_name = proper +pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang. +pkg_proper_homepage = http://proper.softlab.ntua.gr +pkg_proper_fetch = git +pkg_proper_repo = https://github.com/manopapad/proper +pkg_proper_commit = master + +PACKAGES += props +pkg_props_name = props +pkg_props_description = Property structure library +pkg_props_homepage = https://github.com/greyarea/props +pkg_props_fetch = git +pkg_props_repo = https://github.com/greyarea/props +pkg_props_commit = master + +PACKAGES += protobuffs +pkg_protobuffs_name = protobuffs +pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs. +pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_fetch = git +pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_commit = master + +PACKAGES += psycho +pkg_psycho_name = psycho +pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware. +pkg_psycho_homepage = https://github.com/gar1t/psycho +pkg_psycho_fetch = git +pkg_psycho_repo = https://github.com/gar1t/psycho +pkg_psycho_commit = master + +PACKAGES += purity +pkg_purity_name = purity +pkg_purity_description = A side-effect analyzer for Erlang +pkg_purity_homepage = https://github.com/mpitid/purity +pkg_purity_fetch = git +pkg_purity_repo = https://github.com/mpitid/purity +pkg_purity_commit = master + +PACKAGES += push_service +pkg_push_service_name = push_service +pkg_push_service_description = Push service +pkg_push_service_homepage = https://github.com/hairyhum/push_service +pkg_push_service_fetch = git +pkg_push_service_repo = https://github.com/hairyhum/push_service +pkg_push_service_commit = master + +PACKAGES += qdate +pkg_qdate_name = qdate +pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang. +pkg_qdate_homepage = https://github.com/choptastic/qdate +pkg_qdate_fetch = git +pkg_qdate_repo = https://github.com/choptastic/qdate +pkg_qdate_commit = 0.4.0 + +PACKAGES += qrcode +pkg_qrcode_name = qrcode +pkg_qrcode_description = QR Code encoder in Erlang +pkg_qrcode_homepage = https://github.com/komone/qrcode +pkg_qrcode_fetch = git +pkg_qrcode_repo = https://github.com/komone/qrcode +pkg_qrcode_commit = master + +PACKAGES += quest +pkg_quest_name = quest +pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang. +pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest +pkg_quest_fetch = git +pkg_quest_repo = https://github.com/eriksoe/ErlangQuest +pkg_quest_commit = master + +PACKAGES += quickrand +pkg_quickrand_name = quickrand +pkg_quickrand_description = Quick Erlang Random Number Generation +pkg_quickrand_homepage = https://github.com/okeuday/quickrand +pkg_quickrand_fetch = git +pkg_quickrand_repo = https://github.com/okeuday/quickrand +pkg_quickrand_commit = master + +PACKAGES += rabbit +pkg_rabbit_name = rabbit +pkg_rabbit_description = RabbitMQ Server +pkg_rabbit_homepage = https://www.rabbitmq.com/ +pkg_rabbit_fetch = git +pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git +pkg_rabbit_commit = master + +PACKAGES += rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak +pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_fetch = git +pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_commit = master + +PACKAGES += rack +pkg_rack_name = rack +pkg_rack_description = Rack handler for erlang +pkg_rack_homepage = https://github.com/erlyvideo/rack +pkg_rack_fetch = git +pkg_rack_repo = https://github.com/erlyvideo/rack +pkg_rack_commit = master + +PACKAGES += radierl +pkg_radierl_name = radierl +pkg_radierl_description = RADIUS protocol stack implemented in Erlang. +pkg_radierl_homepage = https://github.com/vances/radierl +pkg_radierl_fetch = git +pkg_radierl_repo = https://github.com/vances/radierl +pkg_radierl_commit = master + +PACKAGES += rafter +pkg_rafter_name = rafter +pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol +pkg_rafter_homepage = https://github.com/andrewjstone/rafter +pkg_rafter_fetch = git +pkg_rafter_repo = https://github.com/andrewjstone/rafter +pkg_rafter_commit = master + +PACKAGES += ranch +pkg_ranch_name = ranch +pkg_ranch_description = Socket acceptor pool for TCP protocols. +pkg_ranch_homepage = http://ninenines.eu +pkg_ranch_fetch = git +pkg_ranch_repo = https://github.com/ninenines/ranch +pkg_ranch_commit = 1.1.0 + +PACKAGES += rbeacon +pkg_rbeacon_name = rbeacon +pkg_rbeacon_description = LAN discovery and presence in Erlang. +pkg_rbeacon_homepage = https://github.com/refuge/rbeacon +pkg_rbeacon_fetch = git +pkg_rbeacon_repo = https://github.com/refuge/rbeacon +pkg_rbeacon_commit = master + +PACKAGES += rebar +pkg_rebar_name = rebar +pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases. +pkg_rebar_homepage = http://www.rebar3.org +pkg_rebar_fetch = git +pkg_rebar_repo = https://github.com/rebar/rebar3 +pkg_rebar_commit = master + +PACKAGES += rebus +pkg_rebus_name = rebus +pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang. +pkg_rebus_homepage = https://github.com/olle/rebus +pkg_rebus_fetch = git +pkg_rebus_repo = https://github.com/olle/rebus +pkg_rebus_commit = master + +PACKAGES += rec2json +pkg_rec2json_name = rec2json +pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily. +pkg_rec2json_homepage = https://github.com/lordnull/rec2json +pkg_rec2json_fetch = git +pkg_rec2json_repo = https://github.com/lordnull/rec2json +pkg_rec2json_commit = master + +PACKAGES += recon +pkg_recon_name = recon +pkg_recon_description = Collection of functions and scripts to debug Erlang in production. +pkg_recon_homepage = https://github.com/ferd/recon +pkg_recon_fetch = git +pkg_recon_repo = https://github.com/ferd/recon +pkg_recon_commit = 2.2.1 + +PACKAGES += record_info +pkg_record_info_name = record_info +pkg_record_info_description = Convert between record and proplist +pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info +pkg_record_info_fetch = git +pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info +pkg_record_info_commit = master + +PACKAGES += redgrid +pkg_redgrid_name = redgrid +pkg_redgrid_description = automatic Erlang node discovery via redis +pkg_redgrid_homepage = https://github.com/jkvor/redgrid +pkg_redgrid_fetch = git +pkg_redgrid_repo = https://github.com/jkvor/redgrid +pkg_redgrid_commit = master + +PACKAGES += redo +pkg_redo_name = redo +pkg_redo_description = pipelined erlang redis client +pkg_redo_homepage = https://github.com/jkvor/redo +pkg_redo_fetch = git +pkg_redo_repo = https://github.com/jkvor/redo +pkg_redo_commit = master + +PACKAGES += reload_mk +pkg_reload_mk_name = reload_mk +pkg_reload_mk_description = Live reload plugin for erlang.mk. +pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk +pkg_reload_mk_fetch = git +pkg_reload_mk_repo = https://github.com/bullno1/reload.mk +pkg_reload_mk_commit = master + +PACKAGES += reltool_util +pkg_reltool_util_name = reltool_util +pkg_reltool_util_description = Erlang reltool utility functionality application +pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util +pkg_reltool_util_fetch = git +pkg_reltool_util_repo = https://github.com/okeuday/reltool_util +pkg_reltool_util_commit = master + +PACKAGES += relx +pkg_relx_name = relx +pkg_relx_description = Sane, simple release creation for Erlang +pkg_relx_homepage = https://github.com/erlware/relx +pkg_relx_fetch = git +pkg_relx_repo = https://github.com/erlware/relx +pkg_relx_commit = master + +PACKAGES += resource_discovery +pkg_resource_discovery_name = resource_discovery +pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster. +pkg_resource_discovery_homepage = http://erlware.org/ +pkg_resource_discovery_fetch = git +pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery +pkg_resource_discovery_commit = master + +PACKAGES += restc +pkg_restc_name = restc +pkg_restc_description = Erlang Rest Client +pkg_restc_homepage = https://github.com/kivra/restclient +pkg_restc_fetch = git +pkg_restc_repo = https://github.com/kivra/restclient +pkg_restc_commit = master + +PACKAGES += rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation. +pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_fetch = git +pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_commit = master + +PACKAGES += riak_control +pkg_riak_control_name = riak_control +pkg_riak_control_description = Webmachine-based administration interface for Riak. +pkg_riak_control_homepage = https://github.com/basho/riak_control +pkg_riak_control_fetch = git +pkg_riak_control_repo = https://github.com/basho/riak_control +pkg_riak_control_commit = master + +PACKAGES += riak_core +pkg_riak_core_name = riak_core +pkg_riak_core_description = Distributed systems infrastructure used by Riak. +pkg_riak_core_homepage = https://github.com/basho/riak_core +pkg_riak_core_fetch = git +pkg_riak_core_repo = https://github.com/basho/riak_core +pkg_riak_core_commit = master + +PACKAGES += riak_dt +pkg_riak_dt_name = riak_dt +pkg_riak_dt_description = Convergent replicated datatypes in Erlang +pkg_riak_dt_homepage = https://github.com/basho/riak_dt +pkg_riak_dt_fetch = git +pkg_riak_dt_repo = https://github.com/basho/riak_dt +pkg_riak_dt_commit = master + +PACKAGES += riak_ensemble +pkg_riak_ensemble_name = riak_ensemble +pkg_riak_ensemble_description = Multi-Paxos framework in Erlang +pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_fetch = git +pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_commit = master + +PACKAGES += riak_kv +pkg_riak_kv_name = riak_kv +pkg_riak_kv_description = Riak Key/Value Store +pkg_riak_kv_homepage = https://github.com/basho/riak_kv +pkg_riak_kv_fetch = git +pkg_riak_kv_repo = https://github.com/basho/riak_kv +pkg_riak_kv_commit = master + +PACKAGES += riak_pg +pkg_riak_pg_name = riak_pg +pkg_riak_pg_description = Distributed process groups with riak_core. +pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_fetch = git +pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_commit = master + +PACKAGES += riak_pipe +pkg_riak_pipe_name = riak_pipe +pkg_riak_pipe_description = Riak Pipelines +pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe +pkg_riak_pipe_fetch = git +pkg_riak_pipe_repo = https://github.com/basho/riak_pipe +pkg_riak_pipe_commit = master + +PACKAGES += riak_sysmon +pkg_riak_sysmon_name = riak_sysmon +pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages +pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_fetch = git +pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_commit = master + +PACKAGES += riak_test +pkg_riak_test_name = riak_test +pkg_riak_test_description = I'm in your cluster, testing your riaks +pkg_riak_test_homepage = https://github.com/basho/riak_test +pkg_riak_test_fetch = git +pkg_riak_test_repo = https://github.com/basho/riak_test +pkg_riak_test_commit = master + +PACKAGES += riakc +pkg_riakc_name = riakc +pkg_riakc_description = Erlang clients for Riak. +pkg_riakc_homepage = https://github.com/basho/riak-erlang-client +pkg_riakc_fetch = git +pkg_riakc_repo = https://github.com/basho/riak-erlang-client +pkg_riakc_commit = master + +PACKAGES += riakhttpc +pkg_riakhttpc_name = riakhttpc +pkg_riakhttpc_description = Riak Erlang client using the HTTP interface +pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_fetch = git +pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_commit = master + +PACKAGES += riaknostic +pkg_riaknostic_name = riaknostic +pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap +pkg_riaknostic_homepage = https://github.com/basho/riaknostic +pkg_riaknostic_fetch = git +pkg_riaknostic_repo = https://github.com/basho/riaknostic +pkg_riaknostic_commit = master + +PACKAGES += riakpool +pkg_riakpool_name = riakpool +pkg_riakpool_description = erlang riak client pool +pkg_riakpool_homepage = https://github.com/dweldon/riakpool +pkg_riakpool_fetch = git +pkg_riakpool_repo = https://github.com/dweldon/riakpool +pkg_riakpool_commit = master + +PACKAGES += rivus_cep +pkg_rivus_cep_name = rivus_cep +pkg_rivus_cep_description = Complex event processing in Erlang +pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_fetch = git +pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_commit = master + +PACKAGES += rlimit +pkg_rlimit_name = rlimit +pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent +pkg_rlimit_homepage = https://github.com/jlouis/rlimit +pkg_rlimit_fetch = git +pkg_rlimit_repo = https://github.com/jlouis/rlimit +pkg_rlimit_commit = master + +PACKAGES += safetyvalve +pkg_safetyvalve_name = safetyvalve +pkg_safetyvalve_description = A safety valve for your erlang node +pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_fetch = git +pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_commit = master + +PACKAGES += seestar +pkg_seestar_name = seestar +pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol +pkg_seestar_homepage = https://github.com/iamaleksey/seestar +pkg_seestar_fetch = git +pkg_seestar_repo = https://github.com/iamaleksey/seestar +pkg_seestar_commit = master + +PACKAGES += service +pkg_service_name = service +pkg_service_description = A minimal Erlang behavior for creating CloudI internal services +pkg_service_homepage = http://cloudi.org/ +pkg_service_fetch = git +pkg_service_repo = https://github.com/CloudI/service +pkg_service_commit = master + +PACKAGES += setup +pkg_setup_name = setup +pkg_setup_description = Generic setup utility for Erlang-based systems +pkg_setup_homepage = https://github.com/uwiger/setup +pkg_setup_fetch = git +pkg_setup_repo = https://github.com/uwiger/setup +pkg_setup_commit = master + +PACKAGES += sext +pkg_sext_name = sext +pkg_sext_description = Sortable Erlang Term Serialization +pkg_sext_homepage = https://github.com/uwiger/sext +pkg_sext_fetch = git +pkg_sext_repo = https://github.com/uwiger/sext +pkg_sext_commit = master + +PACKAGES += sfmt +pkg_sfmt_name = sfmt +pkg_sfmt_description = SFMT pseudo random number generator for Erlang. +pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_fetch = git +pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_commit = master + +PACKAGES += sgte +pkg_sgte_name = sgte +pkg_sgte_description = A simple Erlang Template Engine +pkg_sgte_homepage = https://github.com/filippo/sgte +pkg_sgte_fetch = git +pkg_sgte_repo = https://github.com/filippo/sgte +pkg_sgte_commit = master + +PACKAGES += sheriff +pkg_sheriff_name = sheriff +pkg_sheriff_description = Parse transform for type based validation. +pkg_sheriff_homepage = http://ninenines.eu +pkg_sheriff_fetch = git +pkg_sheriff_repo = https://github.com/extend/sheriff +pkg_sheriff_commit = master + +PACKAGES += shotgun +pkg_shotgun_name = shotgun +pkg_shotgun_description = better than just a gun +pkg_shotgun_homepage = https://github.com/inaka/shotgun +pkg_shotgun_fetch = git +pkg_shotgun_repo = https://github.com/inaka/shotgun +pkg_shotgun_commit = 0.1.0 + +PACKAGES += sidejob +pkg_sidejob_name = sidejob +pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang +pkg_sidejob_homepage = https://github.com/basho/sidejob +pkg_sidejob_fetch = git +pkg_sidejob_repo = https://github.com/basho/sidejob +pkg_sidejob_commit = master + +PACKAGES += sieve +pkg_sieve_name = sieve +pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang +pkg_sieve_homepage = https://github.com/benoitc/sieve +pkg_sieve_fetch = git +pkg_sieve_repo = https://github.com/benoitc/sieve +pkg_sieve_commit = master + +PACKAGES += sighandler +pkg_sighandler_name = sighandler +pkg_sighandler_description = Handle UNIX signals in Er lang +pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler +pkg_sighandler_fetch = git +pkg_sighandler_repo = https://github.com/jkingsbery/sighandler +pkg_sighandler_commit = master + +PACKAGES += simhash +pkg_simhash_name = simhash +pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data. +pkg_simhash_homepage = https://github.com/ferd/simhash +pkg_simhash_fetch = git +pkg_simhash_repo = https://github.com/ferd/simhash +pkg_simhash_commit = master + +PACKAGES += simple_bridge +pkg_simple_bridge_name = simple_bridge +pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers. +pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_fetch = git +pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_commit = master + +PACKAGES += simple_oauth2 +pkg_simple_oauth2_name = simple_oauth2 +pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured) +pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_fetch = git +pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_commit = master + +PACKAGES += skel +pkg_skel_name = skel +pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang +pkg_skel_homepage = https://github.com/ParaPhrase/skel +pkg_skel_fetch = git +pkg_skel_repo = https://github.com/ParaPhrase/skel +pkg_skel_commit = master + +PACKAGES += smother +pkg_smother_name = smother +pkg_smother_description = Extended code coverage metrics for Erlang. +pkg_smother_homepage = https://ramsay-t.github.io/Smother/ +pkg_smother_fetch = git +pkg_smother_repo = https://github.com/ramsay-t/Smother +pkg_smother_commit = master + +PACKAGES += social +pkg_social_name = social +pkg_social_description = Cowboy handler for social login via OAuth2 providers +pkg_social_homepage = https://github.com/dvv/social +pkg_social_fetch = git +pkg_social_repo = https://github.com/dvv/social +pkg_social_commit = master + +PACKAGES += spapi_router +pkg_spapi_router_name = spapi_router +pkg_spapi_router_description = Partially-connected Erlang clustering +pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router +pkg_spapi_router_fetch = git +pkg_spapi_router_repo = https://github.com/spilgames/spapi-router +pkg_spapi_router_commit = master + +PACKAGES += sqerl +pkg_sqerl_name = sqerl +pkg_sqerl_description = An Erlang-flavoured SQL DSL +pkg_sqerl_homepage = https://github.com/hairyhum/sqerl +pkg_sqerl_fetch = git +pkg_sqerl_repo = https://github.com/hairyhum/sqerl +pkg_sqerl_commit = master + +PACKAGES += srly +pkg_srly_name = srly +pkg_srly_description = Native Erlang Unix serial interface +pkg_srly_homepage = https://github.com/msantos/srly +pkg_srly_fetch = git +pkg_srly_repo = https://github.com/msantos/srly +pkg_srly_commit = master + +PACKAGES += sshrpc +pkg_sshrpc_name = sshrpc +pkg_sshrpc_description = Erlang SSH RPC module (experimental) +pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_fetch = git +pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_commit = master + +PACKAGES += stable +pkg_stable_name = stable +pkg_stable_description = Library of assorted helpers for Cowboy web server. +pkg_stable_homepage = https://github.com/dvv/stable +pkg_stable_fetch = git +pkg_stable_repo = https://github.com/dvv/stable +pkg_stable_commit = master + +PACKAGES += statebox +pkg_statebox_name = statebox +pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak. +pkg_statebox_homepage = https://github.com/mochi/statebox +pkg_statebox_fetch = git +pkg_statebox_repo = https://github.com/mochi/statebox +pkg_statebox_commit = master + +PACKAGES += statebox_riak +pkg_statebox_riak_name = statebox_riak +pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media. +pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak +pkg_statebox_riak_fetch = git +pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak +pkg_statebox_riak_commit = master + +PACKAGES += statman +pkg_statman_name = statman +pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM +pkg_statman_homepage = https://github.com/knutin/statman +pkg_statman_fetch = git +pkg_statman_repo = https://github.com/knutin/statman +pkg_statman_commit = master + +PACKAGES += statsderl +pkg_statsderl_name = statsderl +pkg_statsderl_description = StatsD client (erlang) +pkg_statsderl_homepage = https://github.com/lpgauth/statsderl +pkg_statsderl_fetch = git +pkg_statsderl_repo = https://github.com/lpgauth/statsderl +pkg_statsderl_commit = master + +PACKAGES += stdinout_pool +pkg_stdinout_pool_name = stdinout_pool +pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication. +pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_fetch = git +pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_commit = master + +PACKAGES += stockdb +pkg_stockdb_name = stockdb +pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang +pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb +pkg_stockdb_fetch = git +pkg_stockdb_repo = https://github.com/maxlapshin/stockdb +pkg_stockdb_commit = master + +PACKAGES += stripe +pkg_stripe_name = stripe +pkg_stripe_description = Erlang interface to the stripe.com API +pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang +pkg_stripe_fetch = git +pkg_stripe_repo = https://github.com/mattsta/stripe-erlang +pkg_stripe_commit = v1 + +PACKAGES += surrogate +pkg_surrogate_name = surrogate +pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes. +pkg_surrogate_homepage = https://github.com/skruger/Surrogate +pkg_surrogate_fetch = git +pkg_surrogate_repo = https://github.com/skruger/Surrogate +pkg_surrogate_commit = master + +PACKAGES += swab +pkg_swab_name = swab +pkg_swab_description = General purpose buffer handling module +pkg_swab_homepage = https://github.com/crownedgrouse/swab +pkg_swab_fetch = git +pkg_swab_repo = https://github.com/crownedgrouse/swab +pkg_swab_commit = master + +PACKAGES += swarm +pkg_swarm_name = swarm +pkg_swarm_description = Fast and simple acceptor pool for Erlang +pkg_swarm_homepage = https://github.com/jeremey/swarm +pkg_swarm_fetch = git +pkg_swarm_repo = https://github.com/jeremey/swarm +pkg_swarm_commit = master + +PACKAGES += switchboard +pkg_switchboard_name = switchboard +pkg_switchboard_description = A framework for processing email using worker plugins. +pkg_switchboard_homepage = https://github.com/thusfresh/switchboard +pkg_switchboard_fetch = git +pkg_switchboard_repo = https://github.com/thusfresh/switchboard +pkg_switchboard_commit = master + +PACKAGES += syn +pkg_syn_name = syn +pkg_syn_description = A global process registry for Erlang. +pkg_syn_homepage = https://github.com/ostinelli/syn +pkg_syn_fetch = git +pkg_syn_repo = https://github.com/ostinelli/syn +pkg_syn_commit = master + +PACKAGES += sync +pkg_sync_name = sync +pkg_sync_description = On-the-fly recompiling and reloading in Erlang. +pkg_sync_homepage = https://github.com/rustyio/sync +pkg_sync_fetch = git +pkg_sync_repo = https://github.com/rustyio/sync +pkg_sync_commit = master + +PACKAGES += syntaxerl +pkg_syntaxerl_name = syntaxerl +pkg_syntaxerl_description = Syntax checker for Erlang +pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_fetch = git +pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_commit = master + +PACKAGES += syslog +pkg_syslog_name = syslog +pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3) +pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog +pkg_syslog_fetch = git +pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog +pkg_syslog_commit = master + +PACKAGES += taskforce +pkg_taskforce_name = taskforce +pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks. +pkg_taskforce_homepage = https://github.com/g-andrade/taskforce +pkg_taskforce_fetch = git +pkg_taskforce_repo = https://github.com/g-andrade/taskforce +pkg_taskforce_commit = master + +PACKAGES += tddreloader +pkg_tddreloader_name = tddreloader +pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes +pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader +pkg_tddreloader_fetch = git +pkg_tddreloader_repo = https://github.com/version2beta/tddreloader +pkg_tddreloader_commit = master + +PACKAGES += tempo +pkg_tempo_name = tempo +pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang. +pkg_tempo_homepage = https://github.com/selectel/tempo +pkg_tempo_fetch = git +pkg_tempo_repo = https://github.com/selectel/tempo +pkg_tempo_commit = master + +PACKAGES += ticktick +pkg_ticktick_name = ticktick +pkg_ticktick_description = Ticktick is an id generator for message service. +pkg_ticktick_homepage = https://github.com/ericliang/ticktick +pkg_ticktick_fetch = git +pkg_ticktick_repo = https://github.com/ericliang/ticktick +pkg_ticktick_commit = master + +PACKAGES += tinymq +pkg_tinymq_name = tinymq +pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue +pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_fetch = git +pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_commit = master + +PACKAGES += tinymt +pkg_tinymt_name = tinymt +pkg_tinymt_description = TinyMT pseudo random number generator for Erlang. +pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_fetch = git +pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_commit = master + +PACKAGES += tirerl +pkg_tirerl_name = tirerl +pkg_tirerl_description = Erlang interface to Elastic Search +pkg_tirerl_homepage = https://github.com/inaka/tirerl +pkg_tirerl_fetch = git +pkg_tirerl_repo = https://github.com/inaka/tirerl +pkg_tirerl_commit = master + +PACKAGES += traffic_tools +pkg_traffic_tools_name = traffic_tools +pkg_traffic_tools_description = Simple traffic limiting library +pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools +pkg_traffic_tools_fetch = git +pkg_traffic_tools_repo = https://github.com/systra/traffic_tools +pkg_traffic_tools_commit = master + +PACKAGES += trails +pkg_trails_name = trails +pkg_trails_description = A couple of improvements over Cowboy Routes +pkg_trails_homepage = http://inaka.github.io/cowboy-trails/ +pkg_trails_fetch = git +pkg_trails_repo = https://github.com/inaka/cowboy-trails +pkg_trails_commit = master + +PACKAGES += trane +pkg_trane_name = trane +pkg_trane_description = SAX style broken HTML parser in Erlang +pkg_trane_homepage = https://github.com/massemanet/trane +pkg_trane_fetch = git +pkg_trane_repo = https://github.com/massemanet/trane +pkg_trane_commit = master + +PACKAGES += transit +pkg_transit_name = transit +pkg_transit_description = transit format for erlang +pkg_transit_homepage = https://github.com/isaiah/transit-erlang +pkg_transit_fetch = git +pkg_transit_repo = https://github.com/isaiah/transit-erlang +pkg_transit_commit = master + +PACKAGES += trie +pkg_trie_name = trie +pkg_trie_description = Erlang Trie Implementation +pkg_trie_homepage = https://github.com/okeuday/trie +pkg_trie_fetch = git +pkg_trie_repo = https://github.com/okeuday/trie +pkg_trie_commit = master + +PACKAGES += triq +pkg_triq_name = triq +pkg_triq_description = Trifork QuickCheck +pkg_triq_homepage = https://github.com/krestenkrab/triq +pkg_triq_fetch = git +pkg_triq_repo = https://github.com/krestenkrab/triq +pkg_triq_commit = master + +PACKAGES += tunctl +pkg_tunctl_name = tunctl +pkg_tunctl_description = Erlang TUN/TAP interface +pkg_tunctl_homepage = https://github.com/msantos/tunctl +pkg_tunctl_fetch = git +pkg_tunctl_repo = https://github.com/msantos/tunctl +pkg_tunctl_commit = master + +PACKAGES += twerl +pkg_twerl_name = twerl +pkg_twerl_description = Erlang client for the Twitter Streaming API +pkg_twerl_homepage = https://github.com/lucaspiller/twerl +pkg_twerl_fetch = git +pkg_twerl_repo = https://github.com/lucaspiller/twerl +pkg_twerl_commit = oauth + +PACKAGES += twitter_erlang +pkg_twitter_erlang_name = twitter_erlang +pkg_twitter_erlang_description = An Erlang twitter client +pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_fetch = git +pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_commit = master + +PACKAGES += ucol_nif +pkg_ucol_nif_name = ucol_nif +pkg_ucol_nif_description = ICU based collation Erlang module +pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif +pkg_ucol_nif_fetch = git +pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif +pkg_ucol_nif_commit = master + +PACKAGES += unicorn +pkg_unicorn_name = unicorn +pkg_unicorn_description = Generic configuration server +pkg_unicorn_homepage = https://github.com/shizzard/unicorn +pkg_unicorn_fetch = git +pkg_unicorn_repo = https://github.com/shizzard/unicorn +pkg_unicorn_commit = 0.3.0 + +PACKAGES += unsplit +pkg_unsplit_name = unsplit +pkg_unsplit_description = Resolves conflicts in Mnesia after network splits +pkg_unsplit_homepage = https://github.com/uwiger/unsplit +pkg_unsplit_fetch = git +pkg_unsplit_repo = https://github.com/uwiger/unsplit +pkg_unsplit_commit = master + +PACKAGES += uuid +pkg_uuid_name = uuid +pkg_uuid_description = Erlang UUID Implementation +pkg_uuid_homepage = https://github.com/okeuday/uuid +pkg_uuid_fetch = git +pkg_uuid_repo = https://github.com/okeuday/uuid +pkg_uuid_commit = v1.4.0 + +PACKAGES += ux +pkg_ux_name = ux +pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation) +pkg_ux_homepage = https://github.com/erlang-unicode/ux +pkg_ux_fetch = git +pkg_ux_repo = https://github.com/erlang-unicode/ux +pkg_ux_commit = master + +PACKAGES += vert +pkg_vert_name = vert +pkg_vert_description = Erlang binding to libvirt virtualization API +pkg_vert_homepage = https://github.com/msantos/erlang-libvirt +pkg_vert_fetch = git +pkg_vert_repo = https://github.com/msantos/erlang-libvirt +pkg_vert_commit = master + +PACKAGES += verx +pkg_verx_name = verx +pkg_verx_description = Erlang implementation of the libvirtd remote protocol +pkg_verx_homepage = https://github.com/msantos/verx +pkg_verx_fetch = git +pkg_verx_repo = https://github.com/msantos/verx +pkg_verx_commit = master + +PACKAGES += vmq_acl +pkg_vmq_acl_name = vmq_acl +pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_acl_homepage = https://verne.mq/ +pkg_vmq_acl_fetch = git +pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl +pkg_vmq_acl_commit = master + +PACKAGES += vmq_bridge +pkg_vmq_bridge_name = vmq_bridge +pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_bridge_homepage = https://verne.mq/ +pkg_vmq_bridge_fetch = git +pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge +pkg_vmq_bridge_commit = master + +PACKAGES += vmq_graphite +pkg_vmq_graphite_name = vmq_graphite +pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_graphite_homepage = https://verne.mq/ +pkg_vmq_graphite_fetch = git +pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite +pkg_vmq_graphite_commit = master + +PACKAGES += vmq_passwd +pkg_vmq_passwd_name = vmq_passwd +pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_passwd_homepage = https://verne.mq/ +pkg_vmq_passwd_fetch = git +pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd +pkg_vmq_passwd_commit = master + +PACKAGES += vmq_server +pkg_vmq_server_name = vmq_server +pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_server_homepage = https://verne.mq/ +pkg_vmq_server_fetch = git +pkg_vmq_server_repo = https://github.com/erlio/vmq_server +pkg_vmq_server_commit = master + +PACKAGES += vmq_snmp +pkg_vmq_snmp_name = vmq_snmp +pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_snmp_homepage = https://verne.mq/ +pkg_vmq_snmp_fetch = git +pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp +pkg_vmq_snmp_commit = master + +PACKAGES += vmq_systree +pkg_vmq_systree_name = vmq_systree +pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_systree_homepage = https://verne.mq/ +pkg_vmq_systree_fetch = git +pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree +pkg_vmq_systree_commit = master + +PACKAGES += vmstats +pkg_vmstats_name = vmstats +pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs. +pkg_vmstats_homepage = https://github.com/ferd/vmstats +pkg_vmstats_fetch = git +pkg_vmstats_repo = https://github.com/ferd/vmstats +pkg_vmstats_commit = master + +PACKAGES += walrus +pkg_walrus_name = walrus +pkg_walrus_description = Walrus - Mustache-like Templating +pkg_walrus_homepage = https://github.com/devinus/walrus +pkg_walrus_fetch = git +pkg_walrus_repo = https://github.com/devinus/walrus +pkg_walrus_commit = master + +PACKAGES += webmachine +pkg_webmachine_name = webmachine +pkg_webmachine_description = A REST-based system for building web applications. +pkg_webmachine_homepage = https://github.com/basho/webmachine +pkg_webmachine_fetch = git +pkg_webmachine_repo = https://github.com/basho/webmachine +pkg_webmachine_commit = master + +PACKAGES += websocket_client +pkg_websocket_client_name = websocket_client +pkg_websocket_client_description = Erlang websocket client (ws and wss supported) +pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client +pkg_websocket_client_fetch = git +pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client +pkg_websocket_client_commit = master + +PACKAGES += worker_pool +pkg_worker_pool_name = worker_pool +pkg_worker_pool_description = a simple erlang worker pool +pkg_worker_pool_homepage = https://github.com/inaka/worker_pool +pkg_worker_pool_fetch = git +pkg_worker_pool_repo = https://github.com/inaka/worker_pool +pkg_worker_pool_commit = 1.0.3 + +PACKAGES += wrangler +pkg_wrangler_name = wrangler +pkg_wrangler_description = Import of the Wrangler svn repository. +pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html +pkg_wrangler_fetch = git +pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler +pkg_wrangler_commit = master + +PACKAGES += wsock +pkg_wsock_name = wsock +pkg_wsock_description = Erlang library to build WebSocket clients and servers +pkg_wsock_homepage = https://github.com/madtrick/wsock +pkg_wsock_fetch = git +pkg_wsock_repo = https://github.com/madtrick/wsock +pkg_wsock_commit = master + +PACKAGES += xhttpc +pkg_xhttpc_name = xhttpc +pkg_xhttpc_description = Extensible HTTP Client for Erlang +pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc +pkg_xhttpc_fetch = git +pkg_xhttpc_repo = https://github.com/seriyps/xhttpc +pkg_xhttpc_commit = master + +PACKAGES += xref_runner +pkg_xref_runner_name = xref_runner +pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref) +pkg_xref_runner_homepage = https://github.com/inaka/xref_runner +pkg_xref_runner_fetch = git +pkg_xref_runner_repo = https://github.com/inaka/xref_runner +pkg_xref_runner_commit = 0.2.0 + +PACKAGES += yamerl +pkg_yamerl_name = yamerl +pkg_yamerl_description = YAML 1.2 parser in pure Erlang +pkg_yamerl_homepage = https://github.com/yakaz/yamerl +pkg_yamerl_fetch = git +pkg_yamerl_repo = https://github.com/yakaz/yamerl +pkg_yamerl_commit = master + +PACKAGES += yamler +pkg_yamler_name = yamler +pkg_yamler_description = libyaml-based yaml loader for Erlang +pkg_yamler_homepage = https://github.com/goertzenator/yamler +pkg_yamler_fetch = git +pkg_yamler_repo = https://github.com/goertzenator/yamler +pkg_yamler_commit = master + +PACKAGES += yaws +pkg_yaws_name = yaws +pkg_yaws_description = Yaws webserver +pkg_yaws_homepage = http://yaws.hyber.org +pkg_yaws_fetch = git +pkg_yaws_repo = https://github.com/klacke/yaws +pkg_yaws_commit = master + +PACKAGES += zab_engine +pkg_zab_engine_name = zab_engine +pkg_zab_engine_description = zab propotocol implement by erlang +pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_fetch = git +pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_commit = master + +PACKAGES += zeta +pkg_zeta_name = zeta +pkg_zeta_description = HTTP access log parser in Erlang +pkg_zeta_homepage = https://github.com/s1n4/zeta +pkg_zeta_fetch = git +pkg_zeta_repo = https://github.com/s1n4/zeta +pkg_zeta_commit = + +PACKAGES += zippers +pkg_zippers_name = zippers +pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers +pkg_zippers_homepage = https://github.com/ferd/zippers +pkg_zippers_fetch = git +pkg_zippers_repo = https://github.com/ferd/zippers +pkg_zippers_commit = master + +PACKAGES += zlists +pkg_zlists_name = zlists +pkg_zlists_description = Erlang lazy lists library. +pkg_zlists_homepage = https://github.com/vjache/erlang-zlists +pkg_zlists_fetch = git +pkg_zlists_repo = https://github.com/vjache/erlang-zlists +pkg_zlists_commit = master + +PACKAGES += zraft_lib +pkg_zraft_lib_name = zraft_lib +pkg_zraft_lib_description = Erlang raft consensus protocol implementation +pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_fetch = git +pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_commit = master + +PACKAGES += zucchini +pkg_zucchini_name = zucchini +pkg_zucchini_description = An Erlang INI parser +pkg_zucchini_homepage = https://github.com/devinus/zucchini +pkg_zucchini_fetch = git +pkg_zucchini_repo = https://github.com/devinus/zucchini +pkg_zucchini_commit = master + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: search + +define pkg_print + $(verbose) printf "%s\n" \ + $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \ + "App name: $(pkg_$(1)_name)" \ + "Description: $(pkg_$(1)_description)" \ + "Home page: $(pkg_$(1)_homepage)" \ + "Fetch with: $(pkg_$(1)_fetch)" \ + "Repository: $(pkg_$(1)_repo)" \ + "Commit: $(pkg_$(1)_commit)" \ + "" + +endef + +search: +ifdef q + $(foreach p,$(PACKAGES), \ + $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \ + $(call pkg_print,$(p)))) +else + $(foreach p,$(PACKAGES),$(call pkg_print,$(p))) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-deps + +# Configuration. + +ifdef OTP_DEPS +$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.) +endif + +IGNORE_DEPS ?= +export IGNORE_DEPS + +APPS_DIR ?= $(CURDIR)/apps +export APPS_DIR + +DEPS_DIR ?= $(CURDIR)/deps +export DEPS_DIR + +REBAR_DEPS_DIR = $(DEPS_DIR) +export REBAR_DEPS_DIR + +dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) +dep_repo = $(patsubst git://github.com/%,https://github.com/%, \ + $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))) +dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) + +ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) +ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) + +ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),) +ifeq ($(ERL_LIBS),) + ERL_LIBS = $(APPS_DIR):$(DEPS_DIR) +else + ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR) +endif +endif +export ERL_LIBS + +export NO_AUTOPATCH + +# Verbosity. + +dep_verbose_0 = @echo " DEP " $(1); +dep_verbose_2 = set -x; +dep_verbose = $(dep_verbose_$(V)) + +# Core targets. + +ifneq ($(SKIP_DEPS),) +deps:: +else +deps:: $(ALL_DEPS_DIRS) +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \ + if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \ + :; \ + else \ + echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ + if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ + $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \ + else \ + echo "Error: No Makefile to build dependency $$dep."; \ + exit 2; \ + fi \ + fi \ + done +endif + +# Deps related targets. + +# @todo rename GNUmakefile and makefile into Makefile first, if they exist +# While Makefile file could be GNUmakefile or makefile, +# in practice only Makefile is needed so far. +define dep_autopatch + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \ + $(call dep_autopatch2,$(1)); \ + else \ + if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + $(call dep_autopatch_erlang_mk,$(1)); \ + else \ + $(call erlang,$(call dep_autopatch_app.erl,$(1))); \ + fi \ + fi \ + else \ + if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ + $(call dep_autopatch_noop,$(1)); \ + else \ + $(call dep_autopatch2,$(1)); \ + fi \ + fi +endef + +define dep_autopatch2 + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + if [ -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \ + $(call dep_autopatch_fetch_rebar); \ + $(call dep_autopatch_rebar,$(1)); \ + else \ + $(call dep_autopatch_gen,$(1)); \ + fi +endef + +define dep_autopatch_noop + printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile +endef + +# Overwrite erlang.mk with the current file by default. +ifeq ($(NO_AUTOPATCH_ERLANG_MK),) +define dep_autopatch_erlang_mk + echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \ + > $(DEPS_DIR)/$1/erlang.mk +endef +else +define dep_autopatch_erlang_mk + : +endef +endif + +define dep_autopatch_gen + printf "%s\n" \ + "ERLC_OPTS = +debug_info" \ + "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile +endef + +define dep_autopatch_fetch_rebar + mkdir -p $(ERLANG_MK_TMP); \ + if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ + git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \ + cd $(ERLANG_MK_TMP)/rebar; \ + git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \ + $(MAKE); \ + cd -; \ + fi +endef + +define dep_autopatch_rebar + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \ + fi; \ + $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \ + rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app +endef + +define dep_autopatch_rebar.erl + application:load(rebar), + application:set_env(rebar, log_level, debug), + Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of + {ok, Conf0} -> Conf0; + _ -> [] + end, + {Conf, OsEnv} = fun() -> + case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of + false -> {Conf1, []}; + true -> + Bindings0 = erl_eval:new_bindings(), + Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0), + Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1), + Before = os:getenv(), + {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings), + {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)} + end + end(), + Write = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append]) + end, + Escape = fun (Text) -> + re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}]) + end, + Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package " + "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"), + Write("C_SRC_DIR = /path/do/not/exist\n"), + Write("C_SRC_TYPE = rebar\n"), + Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"), + Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]), + fun() -> + Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), + case lists:keyfind(erl_opts, 1, Conf) of + false -> ok; + {_, ErlOpts} -> + lists:foreach(fun + ({d, D}) -> + Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + ({i, I}) -> + Write(["ERLC_OPTS += -I ", I, "\n"]); + ({platform_define, Regex, D}) -> + case rebar_utils:is_arch(Regex) of + true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + false -> ok + end; + ({parse_transform, PT}) -> + Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n"); + (_) -> ok + end, ErlOpts) + end, + Write("\n") + end(), + fun() -> + File = case lists:keyfind(deps, 1, Conf) of + false -> []; + {_, Deps} -> + [begin case case Dep of + {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}}; + {N, S} when is_tuple(S) -> {N, S}; + {N, _, S} -> {N, S}; + {N, _, S, _} -> {N, S}; + _ -> false + end of + false -> ok; + {Name, Source} -> + {Method, Repo, Commit} = case Source of + {hex, V} -> {hex, V, undefined}; + {git, R} -> {git, R, master}; + {M, R, {branch, C}} -> {M, R, C}; + {M, R, {ref, C}} -> {M, R, C}; + {M, R, {tag, C}} -> {M, R, C}; + {M, R, C} -> {M, R, C} + end, + Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit])) + end end || Dep <- Deps] + end + end(), + fun() -> + case lists:keyfind(erl_first_files, 1, Conf) of + false -> ok; + {_, Files} -> + Names = [[" ", case lists:reverse(F) of + "lre." ++ Elif -> lists:reverse(Elif); + Elif -> lists:reverse(Elif) + end] || "src/" ++ F <- Files], + Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) + end + end(), + FindFirst = fun(F, Fd) -> + case io:parse_erl_form(Fd, undefined) of + {ok, {attribute, _, compile, {parse_transform, PT}}, _} -> + [PT, F(F, Fd)]; + {ok, {attribute, _, compile, CompileOpts}, _} when is_list(CompileOpts) -> + case proplists:get_value(parse_transform, CompileOpts) of + undefined -> [F(F, Fd)]; + PT -> [PT, F(F, Fd)] + end; + {ok, {attribute, _, include, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end + end; + {ok, {attribute, _, include_lib, "$(1)/include/" ++ Hrl}, _} -> + {ok, HrlFd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]), + [F(F, HrlFd), F(F, Fd)]; + {ok, {attribute, _, include_lib, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {ok, {attribute, _, import, {Imp, _}}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(Imp) ++ ".erl", [read]) of + {ok, ImpFd} -> [Imp, F(F, ImpFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {eof, _} -> + file:close(Fd), + []; + _ -> + F(F, Fd) + end + end, + fun() -> + ErlFiles = filelib:wildcard("$(call core_native_path,$(DEPS_DIR)/$1/src/)*.erl"), + First0 = lists:usort(lists:flatten([begin + {ok, Fd} = file:open(F, [read]), + FindFirst(FindFirst, Fd) + end || F <- ErlFiles])), + First = lists:flatten([begin + {ok, Fd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", [read]), + FindFirst(FindFirst, Fd) + end || M <- First0, lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)]) ++ First0, + Write(["COMPILE_FIRST +=", [[" ", atom_to_list(M)] || M <- First, + lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)], "\n"]) + end(), + Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"), + Write("\npreprocess::\n"), + Write("\npre-deps::\n"), + Write("\npre-app::\n"), + PatchHook = fun(Cmd) -> + case Cmd of + "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + _ -> Escape(Cmd) + end + end, + fun() -> + case lists:keyfind(pre_hooks, 1, Conf) of + false -> ok; + {_, Hooks} -> + [case H of + {'get-deps', Cmd} -> + Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n"); + {compile, Cmd} -> + Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + {Regex, compile, Cmd} -> + case rebar_utils:is_arch(Regex) of + true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + false -> ok + end; + _ -> ok + end || H <- Hooks] + end + end(), + ShellToMk = fun(V) -> + re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]), + "-Werror\\\\b", "", [{return, list}, global]) + end, + PortSpecs = fun() -> + case lists:keyfind(port_specs, 1, Conf) of + false -> + case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of + false -> []; + true -> + [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"), + proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}] + end; + {_, Specs} -> + lists:flatten([case S of + {Output, Input} -> {ShellToMk(Output), Input, []}; + {Regex, Output, Input} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, []}; + false -> [] + end; + {Regex, Output, Input, [{env, Env}]} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, Env}; + false -> [] + end + end || S <- Specs]) + end + end(), + PortSpecWrite = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append]) + end, + case PortSpecs of + [] -> ok; + _ -> + Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"), + PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I ~s/erts-~s/include -I ~s\n", + [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])), + PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L ~s -lerl_interface -lei\n", + [code:lib_dir(erl_interface, lib)])), + [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv], + FilterEnv = fun(Env) -> + lists:flatten([case E of + {_, _} -> E; + {Regex, K, V} -> + case rebar_utils:is_arch(Regex) of + true -> {K, V}; + false -> [] + end + end || E <- Env]) + end, + MergeEnv = fun(Env) -> + lists:foldl(fun ({K, V}, Acc) -> + case lists:keyfind(K, 1, Acc) of + false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc]; + {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc] + end + end, [], Env) + end, + PortEnv = case lists:keyfind(port_env, 1, Conf) of + false -> []; + {_, PortEnv0} -> FilterEnv(PortEnv0) + end, + PortSpec = fun ({Output, Input0, Env}) -> + filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output), + Input = [[" ", I] || I <- Input0], + PortSpecWrite([ + [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))], + case $(PLATFORM) of + darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress"; + _ -> "" + end, + "\n\nall:: ", Output, "\n\n", + "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))], + Output, ": $$\(foreach ext,.c .C .cc .cpp,", + "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n", + "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)", + case filename:extension(Output) of + [] -> "\n"; + _ -> " -shared\n" + end]) + end, + [PortSpec(S) || S <- PortSpecs] + end, + Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"), + RunPlugin = fun(Plugin, Step) -> + case erlang:function_exported(Plugin, Step, 2) of + false -> ok; + true -> + c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"), + Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(), + dict:store(base_dir, "", dict:new())}, undefined), + io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret]) + end + end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins} -> + [begin + case lists:keyfind(deps, 1, Conf) of + false -> ok; + {_, Deps} -> + case lists:keyfind(P, 1, Deps) of + false -> ok; + _ -> + Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P), + io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]), + io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]), + code:add_patha(Path ++ "/ebin") + end + end + end || P <- Plugins], + [case code:load_file(P) of + {module, P} -> ok; + _ -> + case lists:keyfind(plugin_dir, 1, Conf) of + false -> ok; + {_, PluginsDir} -> + ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl", + {ok, P, Bin} = compile:file(ErlFile, [binary]), + {module, P} = code:load_binary(P, ErlFile, Bin) + end + end || P <- Plugins], + [RunPlugin(P, preprocess) || P <- Plugins], + [RunPlugin(P, pre_compile) || P <- Plugins], + [RunPlugin(P, compile) || P <- Plugins] + end + end(), + halt() +endef + +define dep_autopatch_app.erl + UpdateModules = fun(App) -> + case filelib:is_regular(App) of + false -> ok; + true -> + {ok, [{application, '$(1)', L0}]} = file:consult(App), + Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true, + fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []), + L = lists:keystore(modules, 1, L0, {modules, Mods}), + ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}])) + end + end, + UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"), + halt() +endef + +define dep_autopatch_appsrc.erl + AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", + AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end, + case filelib:is_regular(AppSrcIn) of + false -> ok; + true -> + {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), + L1 = lists:keystore(modules, 1, L0, {modules, []}), + L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end, + L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end, + ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])), + case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end + end, + halt() +endef + +define dep_fetch_git + git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1)); +endef + +define dep_fetch_git-submodule + git submodule update --init -- $(DEPS_DIR)/$1; +endef + +define dep_fetch_hg + hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1)); +endef + +define dep_fetch_svn + svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_cp + cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_hex.erl + ssl:start(), + inets:start(), + {ok, {{_, 200, _}, _, Body}} = httpc:request(get, + {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []}, + [], [{body_format, binary}]), + {ok, Files} = erl_tar:extract({binary, Body}, [memory]), + {_, Source} = lists:keyfind("contents.tar.gz", 1, Files), + ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]), + halt() +endef + +# Hex only has a package version. No need to look in the Erlang.mk packages. +define dep_fetch_hex + $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1)))))); +endef + +define dep_fetch_fail + echo "Error: Unknown or invalid dependency: $(1)." >&2; \ + exit 78; +endef + +# Kept for compatibility purposes with older Erlang.mk configuration. +define dep_fetch_legacy + $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \ + git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \ + cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master); +endef + +define dep_fetch + $(if $(dep_$(1)), \ + $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \ + $(word 1,$(dep_$(1))), \ + $(if $(IS_DEP),legacy,fail)), \ + $(if $(filter $(1),$(PACKAGES)), \ + $(pkg_$(1)_fetch), \ + fail)) +endef + +define dep_target +$(DEPS_DIR)/$(call dep_name,$1): + $(eval DEP_NAME := $(call dep_name,$1)) + $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) + $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ + echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \ + exit 17; \ + fi + $(verbose) mkdir -p $(DEPS_DIR) + $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$1)),$1) + $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure.ac -o -f $(DEPS_DIR)/$(DEP_NAME)/configure.in ]; then \ + echo " AUTO " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && autoreconf -Wall -vif -I m4; \ + fi + - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \ + echo " CONF " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \ + fi +ifeq ($(filter $(1),$(NO_AUTOPATCH)),) + $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi; \ + if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \ + echo " PATCH Downloading rabbitmq-server"; \ + git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \ + fi; \ + ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \ + elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi \ + else \ + $$(call dep_autopatch,$(DEP_NAME)) \ + fi +endif +endef + +$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep)))) + +ifndef IS_APP +clean:: clean-apps + +clean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \ + done + +distclean:: distclean-apps + +distclean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \ + done +endif + +ifndef SKIP_DEPS +distclean:: distclean-deps + +distclean-deps: + $(gen_verbose) rm -rf $(DEPS_DIR) +endif + +# Forward-declare variables used in core/deps-tools.mk. This is required +# in case plugins use them. + +ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/list-deps.log +ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/list-doc-deps.log +ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/list-rel-deps.log +ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/list-test-deps.log +ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/list-shell-deps.log + +# External plugins. + +DEP_PLUGINS ?= + +define core_dep_plugin +-include $(DEPS_DIR)/$(1) + +$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; +endef + +$(foreach p,$(DEP_PLUGINS),\ + $(eval $(if $(findstring /,$p),\ + $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ + $(call core_dep_plugin,$p/plugins.mk,$p)))) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Configuration. + +DTL_FULL_PATH ?= +DTL_PATH ?= templates/ +DTL_SUFFIX ?= _dtl + +# Verbosity. + +dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); +dtl_verbose = $(dtl_verbose_$(V)) + +# Core targets. + +define erlydtl_compile.erl + [begin + Module0 = case "$(strip $(DTL_FULL_PATH))" of + "" -> + filename:basename(F, ".dtl"); + _ -> + "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"), + re:replace(F2, "/", "_", [{return, list}, global]) + end, + Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), + case erlydtl:compile(F, Module, [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of + ok -> ok; + {ok, _} -> ok + end + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) + +DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl)) + +ifdef DTL_FULL_PATH +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%)))) +else +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES)))) +endif + +ifneq ($(words $(DTL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(DTL_FILES); \ + fi + @touch $@ + +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl +endif + +ebin/$(PROJECT).app:: $(DTL_FILES) + $(if $(strip $?),\ + $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?,-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Verbosity. + +proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); +proto_verbose = $(proto_verbose_$(V)) + +# Core targets. + +define compile_proto + $(verbose) mkdir -p ebin/ include/ + $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1))) + $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl + $(verbose) rm ebin/*.erl +endef + +define compile_proto.erl + [begin + Dir = filename:dirname(filename:dirname(F)), + protobuffs_compile:generate_source(F, + [{output_include_dir, Dir ++ "/include"}, + {output_src_dir, Dir ++ "/ebin"}]) + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) +ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto)) + $(if $(strip $?),$(call compile_proto,$?)) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-app + +# Configuration. + +ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \ + +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec +COMPILE_FIRST ?= +COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST))) +ERLC_EXCLUDE ?= +ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE))) + +ERLC_MIB_OPTS ?= +COMPILE_MIB_FIRST ?= +COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST))) + +# Verbosity. + +app_verbose_0 = @echo " APP " $(PROJECT); +app_verbose_2 = set -x; +app_verbose = $(app_verbose_$(V)) + +appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src; +appsrc_verbose_2 = set -x; +appsrc_verbose = $(appsrc_verbose_$(V)) + +makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d; +makedep_verbose_2 = set -x; +makedep_verbose = $(makedep_verbose_$(V)) + +erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\ + $(filter %.erl %.core,$(?F))); +erlc_verbose_2 = set -x; +erlc_verbose = $(erlc_verbose_$(V)) + +xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F)); +xyrl_verbose_2 = set -x; +xyrl_verbose = $(xyrl_verbose_$(V)) + +asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F)); +asn1_verbose_2 = set -x; +asn1_verbose = $(asn1_verbose_$(V)) + +mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F)); +mib_verbose_2 = set -x; +mib_verbose = $(mib_verbose_$(V)) + +ifneq ($(wildcard src/),) + +# Targets. + +ifeq ($(wildcard ebin/test),) +app:: deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +else +app:: clean deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +endif + +ifeq ($(wildcard src/$(PROJECT)_app.erl),) +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, []}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]} +]}. +endef +else +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}, + {mod, {$(PROJECT)_app, []}} +]}. +endef +endif + +app-build: ebin/$(PROJECT).app + $(verbose) : + +# Source files. + +ERL_FILES = $(sort $(call core_find,src/,*.erl)) +CORE_FILES = $(sort $(call core_find,src/,*.core)) + +# ASN.1 files. + +ifneq ($(wildcard asn1/),) +ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1)) +ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +define compile_asn1 + $(verbose) mkdir -p include/ + $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1) + $(verbose) mv asn1/*.erl src/ + $(verbose) mv asn1/*.hrl include/ + $(verbose) mv asn1/*.asn1db include/ +endef + +$(PROJECT).d:: $(ASN1_FILES) + $(if $(strip $?),$(call compile_asn1,$?)) +endif + +# SNMP MIB files. + +ifneq ($(wildcard mibs/),) +MIB_FILES = $(sort $(call core_find,mibs/,*.mib)) + +$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES) + $(verbose) mkdir -p include/ priv/mibs/ + $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $? + $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?))) +endif + +# Leex and Yecc files. + +XRL_FILES = $(sort $(call core_find,src/,*.xrl)) +XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES)))) +ERL_FILES += $(XRL_ERL_FILES) + +YRL_FILES = $(sort $(call core_find,src/,*.yrl)) +YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES)))) +ERL_FILES += $(YRL_ERL_FILES) + +$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES) + $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?) + +# Erlang and Core Erlang files. + +define makedep.erl + ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")), + Modules = [{filename:basename(F, ".erl"), F} || F <- ErlFiles], + Add = fun (Dep, Acc) -> + case lists:keyfind(atom_to_list(Dep), 1, Modules) of + {_, DepFile} -> [DepFile|Acc]; + false -> Acc + end + end, + AddHd = fun (Dep, Acc) -> + case {Dep, lists:keymember(Dep, 2, Modules)} of + {"src/" ++ _, false} -> [Dep|Acc]; + {"include/" ++ _, false} -> [Dep|Acc]; + _ -> Acc + end + end, + CompileFirst = fun (Deps) -> + First0 = [case filename:extension(D) of + ".erl" -> filename:basename(D, ".erl"); + _ -> [] + end || D <- Deps], + case lists:usort(First0) of + [] -> []; + [[]] -> []; + First -> ["COMPILE_FIRST +=", [[" ", F] || F <- First], "\n"] + end + end, + Depend = [begin + case epp:parse_file(F, ["include/"], []) of + {ok, Forms} -> + Deps = lists:usort(lists:foldl(fun + ({attribute, _, behavior, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, behaviour, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, compile, {parse_transform, Dep}}, Acc) -> Add(Dep, Acc); + ({attribute, _, file, {Dep, _}}, Acc) -> AddHd(Dep, Acc); + (_, Acc) -> Acc + end, [], Forms)), + case Deps of + [] -> ""; + _ -> [F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n", CompileFirst(Deps)] + end; + {error, enoent} -> + [] + end + end || F <- ErlFiles], + ok = file:write_file("$(1)", Depend), + halt() +endef + +ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),) +$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST) + $(makedep_verbose) $(call erlang,$(call makedep.erl,$@)) +endif + +ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \ + touch -c $(PROJECT).d; \ + fi + @touch $@ + +$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change +endif + +-include $(PROJECT).d + +ebin/$(PROJECT).app:: ebin/ + +ebin/: + $(verbose) mkdir -p ebin/ + +define compile_erl + $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \ + -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1)) +endef + +ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src) + $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?)) + $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE))) + $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true)) + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES))))))) +ifeq ($(wildcard src/$(PROJECT).app.src),) + $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \ + > ebin/$(PROJECT).app +else + $(verbose) if [ -z "$$(grep -E '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \ + echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \ + exit 1; \ + fi + $(appsrc_verbose) cat src/$(PROJECT).app.src \ + | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \ + | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(GITDESCRIBE)\"}/" \ + > ebin/$(PROJECT).app +endif + +clean:: clean-app + +clean-app: + $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \ + $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \ + $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +endif + +# Copyright (c) 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: docs-deps + +# Configuration. + +ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS)) + +# Targets. + +$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +doc-deps: +else +doc-deps: $(ALL_DOC_DEPS_DIRS) + $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rel-deps + +# Configuration. + +ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS)) + +# Targets. + +$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +rel-deps: +else +rel-deps: $(ALL_REL_DEPS_DIRS) + $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: test-deps test-dir test-build clean-test-dir + +# Configuration. + +TEST_DIR ?= $(CURDIR)/test + +ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS)) + +TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard +TEST_ERLC_OPTS += -DTEST=1 + +# Targets. + +$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +test-deps: +else +test-deps: $(ALL_TEST_DEPS_DIRS) + $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done +endif + +ifneq ($(wildcard $(TEST_DIR)),) +test-dir: + $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \ + $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/ +endif + +ifeq ($(wildcard ebin/test),) +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: clean deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" + $(gen_verbose) touch ebin/test +else +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" +endif + +clean:: clean-test-dir + +clean-test-dir: +ifneq ($(wildcard $(TEST_DIR)/*.beam),) + $(gen_verbose) rm -f $(TEST_DIR)/*.beam +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rebar.config + +# We strip out -Werror because we don't want to fail due to +# warnings when used as a dependency. + +compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/') + +define compat_convert_erlc_opts +$(if $(filter-out -Werror,$1),\ + $(if $(findstring +,$1),\ + $(shell echo $1 | cut -b 2-))) +endef + +define compat_rebar_config +{deps, [$(call comma_list,$(foreach d,$(DEPS),\ + {$(call dep_name,$d),".*",{git,"$(call dep_repo,$d)","$(call dep_commit,$d)"}}))]}. +{erl_opts, [$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$(ERLC_OPTS)),\ + $(call compat_convert_erlc_opts,$o)))]}. +endef + +$(eval _compat_rebar_config = $$(compat_rebar_config)) +$(eval export _compat_rebar_config) + +rebar.config: + $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc + +MAN_INSTALL_PATH ?= /usr/local/share/man +MAN_SECTIONS ?= 3 7 + +docs:: asciidoc + +asciidoc: distclean-asciidoc doc-deps asciidoc-guide asciidoc-manual + +ifeq ($(wildcard doc/src/guide/book.asciidoc),) +asciidoc-guide: +else +asciidoc-guide: + a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf + a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ +endif + +ifeq ($(wildcard doc/src/manual/*.asciidoc),) +asciidoc-manual: +else +asciidoc-manual: + for f in doc/src/manual/*.asciidoc ; do \ + a2x -v -f manpage $$f ; \ + done + for s in $(MAN_SECTIONS); do \ + mkdir -p doc/man$$s/ ; \ + mv doc/src/manual/*.$$s doc/man$$s/ ; \ + gzip doc/man$$s/*.$$s ; \ + done + +install-docs:: install-asciidoc + +install-asciidoc: asciidoc-manual + for s in $(MAN_SECTIONS); do \ + mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \ + install -g 0 -o 0 -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \ + done +endif + +distclean:: distclean-asciidoc + +distclean-asciidoc: + $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/ + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Bootstrap targets:" \ + " bootstrap Generate a skeleton of an OTP application" \ + " bootstrap-lib Generate a skeleton of an OTP library" \ + " bootstrap-rel Generate the files needed to build a release" \ + " new-app n=NAME Create a new local OTP application NAME" \ + " new-lib n=NAME Create a new local OTP library NAME" \ + " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ + " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \ + " list-templates List available templates" + +# Bootstrap templates. + +define bs_appsrc +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]}, + {mod, {$p_app, []}}, + {env, []} +]}. +endef + +define bs_appsrc_lib +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. +endef + +ifdef SP +define bs_Makefile +PROJECT = $p +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.0.1 + +# Whitespace to be used when creating files from templates. +SP = $(SP) + +include erlang.mk +endef +else +define bs_Makefile +PROJECT = $p +include erlang.mk +endef +endif + +define bs_apps_Makefile +PROJECT = $p +include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk +endef + +define bs_app +-module($p_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). + +start(_Type, _Args) -> + $p_sup:start_link(). + +stop(_State) -> + ok. +endef + +define bs_relx_config +{release, {$p_release, "1"}, [$p]}. +{extended_start_script, true}. +{sys_config, "rel/sys.config"}. +{vm_args, "rel/vm.args"}. +endef + +define bs_sys_config +[ +]. +endef + +define bs_vm_args +-name $p@127.0.0.1 +-setcookie $p +-heart +endef + +# Normal templates. + +define tpl_supervisor +-module($(n)). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +init([]) -> + Procs = [], + {ok, {{one_for_one, 1, 5}, Procs}}. +endef + +define tpl_gen_server +-module($(n)). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +%% gen_server. + +init([]) -> + {ok, #state{}}. + +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +endef + +define tpl_cowboy_http +-module($(n)). +-behaviour(cowboy_http_handler). + +-export([init/3]). +-export([handle/2]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {ok, Req, #state{}}. + +handle(Req, State=#state{}) -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_gen_fsm +-module($(n)). +-behaviour(gen_fsm). + +%% API. +-export([start_link/0]). + +%% gen_fsm. +-export([init/1]). +-export([state_name/2]). +-export([handle_event/3]). +-export([state_name/3]). +-export([handle_sync_event/4]). +-export([handle_info/3]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_fsm:start_link(?MODULE, [], []). + +%% gen_fsm. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_Event, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_Event, StateName, StateData) -> + {next_state, StateName, StateData}. + +state_name(_Event, _From, StateData) -> + {reply, ignored, state_name, StateData}. + +handle_sync_event(_Event, _From, StateName, StateData) -> + {reply, ignored, StateName, StateData}. + +handle_info(_Info, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + +define tpl_cowboy_loop +-module($(n)). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {loop, Req, #state{}, 5000, hibernate}. + +info(_Info, Req, State) -> + {loop, Req, State, hibernate}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_cowboy_rest +-module($(n)). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_html/2]). + +init(_, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. + +get_html(Req, State) -> + {<<"This is REST!">>, Req, State}. +endef + +define tpl_cowboy_ws +-module($(n)). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +-record(state, { +}). + +init(_, _, _) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, #state{}}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_ranch_protocol +-module($(n)). +-behaviour(ranch_protocol). + +-export([start_link/4]). +-export([init/4]). + +-type opts() :: []. +-export_type([opts/0]). + +-record(state, { + socket :: inet:socket(), + transport :: module() +}). + +start_link(Ref, Socket, Transport, Opts) -> + Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]), + {ok, Pid}. + +-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Ref, Socket, Transport, _Opts) -> + ok = ranch:accept_ack(Ref), + loop(#state{socket=Socket, transport=Transport}). + +loop(State) -> + loop(State). +endef + +# Plugin-specific targets. + +define render_template + $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) +endef + +ifndef WS +ifdef SP +WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a)) +else +WS = $(tab) +endif +endif + +bootstrap: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(eval n := $(PROJECT)_sup) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc,src/$(PROJECT).app.src) +endif + $(call render_template,bs_app,src/$(PROJECT)_app.erl) + $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl) + +bootstrap-lib: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src) +endif + +bootstrap-rel: +ifneq ($(wildcard relx.config),) + $(error Error: relx.config already exists) +endif +ifneq ($(wildcard rel/),) + $(error Error: rel/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_relx_config,relx.config) + $(verbose) mkdir rel/ + $(call render_template,bs_sys_config,rel/sys.config) + $(call render_template,bs_vm_args,rel/vm.args) + +new-app: +ifndef in + $(error Usage: $(MAKE) new-app in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(eval n := $(in)_sup) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) +endif + $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) + $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) + +new-lib: +ifndef in + $(error Usage: $(MAKE) new-lib in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) +endif + +new: +ifeq ($(wildcard src/)$(in),) + $(error Error: src/ directory does not exist) +endif +ifndef t + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifndef tpl_$(t) + $(error Unknown template) +endif +ifndef n + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in= +else + $(call render_template,tpl_$(t),src/$(n).erl) +endif + +list-templates: + $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-c_src distclean-c_src-env + +# Configuration. + +C_SRC_DIR ?= $(CURDIR)/c_src +C_SRC_ENV ?= $(C_SRC_DIR)/env.mk +C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT).so +C_SRC_TYPE ?= shared + +# System type and C compiler/flags. + +ifeq ($(PLATFORM),darwin) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall + LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress +else ifeq ($(PLATFORM),freebsd) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +else ifeq ($(PLATFORM),linux) + CC ?= gcc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +endif + +CFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) +CXXFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) + +LDLIBS += -L $(ERL_INTERFACE_LIB_DIR) -lerl_interface -lei + +# Verbosity. + +c_verbose_0 = @echo " C " $(?F); +c_verbose = $(c_verbose_$(V)) + +cpp_verbose_0 = @echo " CPP " $(?F); +cpp_verbose = $(cpp_verbose_$(V)) + +link_verbose_0 = @echo " LD " $(@F); +link_verbose = $(link_verbose_$(V)) + +# Targets. + +ifeq ($(wildcard $(C_SRC_DIR)),) +else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),) +app:: app-c_src + +test-build:: app-c_src + +app-c_src: + $(MAKE) -C $(C_SRC_DIR) + +clean:: + $(MAKE) -C $(C_SRC_DIR) clean + +else + +ifeq ($(SOURCES),) +SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat)))) +endif +OBJECTS = $(addsuffix .o, $(basename $(SOURCES))) + +COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c +COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c + +app:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +$(C_SRC_OUTPUT): $(OBJECTS) + $(verbose) mkdir -p priv/ + $(link_verbose) $(CC) $(OBJECTS) \ + $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \ + -o $(C_SRC_OUTPUT) + +%.o: %.c + $(COMPILE_C) $(OUTPUT_OPTION) $< + +%.o: %.cc + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.C + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.cpp + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +clean:: clean-c_src + +clean-c_src: + $(gen_verbose) rm -f $(C_SRC_OUTPUT) $(OBJECTS) + +endif + +ifneq ($(wildcard $(C_SRC_DIR)),) +$(C_SRC_ENV): + $(verbose) $(ERL) -eval "file:write_file(\"$(C_SRC_ENV)\", \ + io_lib:format( \ + \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \ + \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \ + \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \ + [code:root_dir(), erlang:system_info(version), \ + code:lib_dir(erl_interface, include), \ + code:lib_dir(erl_interface, lib)])), \ + halt()." + +distclean:: distclean-c_src-env + +distclean-c_src-env: + $(gen_verbose) rm -f $(C_SRC_ENV) + +-include $(C_SRC_ENV) +endif + +# Templates. + +define bs_c_nif +#include "erl_nif.h" + +static int loads = 0; + +static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info) +{ + /* Initialize private data. */ + *priv_data = NULL; + + loads++; + + return 0; +} + +static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info) +{ + /* Convert the private data to the new version. */ + *priv_data = *old_priv_data; + + loads++; + + return 0; +} + +static void unload(ErlNifEnv* env, void* priv_data) +{ + if (loads == 1) { + /* Destroy the private data. */ + } + + loads--; +} + +static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{ + if (enif_is_atom(env, argv[0])) { + return enif_make_tuple2(env, + enif_make_atom(env, "hello"), + argv[0]); + } + + return enif_make_tuple2(env, + enif_make_atom(env, "error"), + enif_make_atom(env, "badarg")); +} + +static ErlNifFunc nif_funcs[] = { + {"hello", 1, hello} +}; + +ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload) +endef + +define bs_erl_nif +-module($n). + +-export([hello/1]). + +-on_load(on_load/0). +on_load() -> + PrivDir = case code:priv_dir(?MODULE) of + {error, _} -> + AppPath = filename:dirname(filename:dirname(code:which(?MODULE))), + filename:join(AppPath, "priv"); + Path -> + Path + end, + erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0). + +hello(_) -> + erlang:nif_error({not_loaded, ?MODULE}). +endef + +new-nif: +ifneq ($(wildcard $(C_SRC_DIR)/$n.c),) + $(error Error: $(C_SRC_DIR)/$n.c already exists) +endif +ifneq ($(wildcard src/$n.erl),) + $(error Error: src/$n.erl already exists) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in= +else + $(verbose) mkdir -p $(C_SRC_DIR) src/ + $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c) + $(call render_template,bs_erl_nif,src/$n.erl) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ci ci-setup distclean-kerl + +KERL ?= $(CURDIR)/kerl +export KERL + +KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl + +OTP_GIT ?= https://github.com/erlang/otp + +CI_INSTALL_DIR ?= $(HOME)/erlang +CI_OTP ?= + +ifeq ($(strip $(CI_OTP)),) +ci:: +else +ci:: $(addprefix ci-,$(CI_OTP)) + +ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP)) + +ci-setup:: + +ci_verbose_0 = @echo " CI " $(1); +ci_verbose = $(ci_verbose_$(V)) + +define ci_target +ci-$(1): $(CI_INSTALL_DIR)/$(1) + $(ci_verbose) \ + PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \ + CI_OTP_RELEASE="$(1)" \ + CT_OPTS="-label $(1)" \ + $(MAKE) clean ci-setup tests +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp)))) + +define ci_otp_target +ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),) +$(CI_INSTALL_DIR)/$(1): $(KERL) + $(KERL) build git $(OTP_GIT) $(1) $(1) + $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1) +endif +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp)))) + +$(KERL): + $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL)) + $(verbose) chmod +x $(KERL) + +help:: + $(verbose) printf "%s\n" "" \ + "Continuous Integration targets:" \ + " ci Run '$(MAKE) tests' on all configured Erlang versions." \ + "" \ + "The CI_OTP variable must be defined with the Erlang versions" \ + "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3" + +distclean:: distclean-kerl + +distclean-kerl: + $(gen_verbose) rm -rf $(KERL) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ct distclean-ct + +# Configuration. + +CT_OPTS ?= +ifneq ($(wildcard $(TEST_DIR)),) + CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl)))) +else + CT_SUITES ?= +endif + +# Core targets. + +tests:: ct + +distclean:: distclean-ct + +help:: + $(verbose) printf "%s\n" "" \ + "Common_test targets:" \ + " ct Run all the common_test suites for this project" \ + "" \ + "All your common_test suites have their associated targets." \ + "A suite named http_SUITE can be ran using the ct-http target." + +# Plugin-specific targets. + +CT_RUN = ct_run \ + -no_auto_compile \ + -noinput \ + -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(TEST_DIR) \ + -dir $(TEST_DIR) \ + -logdir $(CURDIR)/logs + +ifeq ($(CT_SUITES),) +ct: +else +ct: test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) +endif + +define ct_suite_target +ct-$(1): test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(1)) $(CT_OPTS) +endef + +$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) + +distclean-ct: + $(gen_verbose) rm -rf $(CURDIR)/logs/ + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: plt distclean-plt dialyze + +# Configuration. + +DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt +export DIALYZER_PLT + +PLT_APPS ?= +DIALYZER_DIRS ?= --src -r src +DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions \ + -Wunmatched_returns # -Wunderspecs + +# Core targets. + +check:: dialyze + +distclean:: distclean-plt + +help:: + $(verbose) printf "%s\n" "" \ + "Dialyzer targets:" \ + " plt Build a PLT file for this project" \ + " dialyze Analyze the project using Dialyzer" + +# Plugin-specific targets. + +$(DIALYZER_PLT): deps app + $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS) + +plt: $(DIALYZER_PLT) + +distclean-plt: + $(gen_verbose) rm -f $(DIALYZER_PLT) + +ifneq ($(wildcard $(DIALYZER_PLT)),) +dialyze: +else +dialyze: $(DIALYZER_PLT) +endif + $(verbose) dialyzer --no_native $(DIALYZER_DIRS) $(DIALYZER_OPTS) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-edoc edoc + +# Configuration. + +EDOC_OPTS ?= + +# Core targets. + +docs:: distclean-edoc edoc + +distclean:: distclean-edoc + +# Plugin-specific targets. + +edoc: doc-deps + $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().' + +distclean-edoc: + $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: elvis distclean-elvis + +# Configuration. + +ELVIS_CONFIG ?= $(CURDIR)/elvis.config + +ELVIS ?= $(CURDIR)/elvis +export ELVIS + +ELVIS_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis +ELVIS_CONFIG_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis.config +ELVIS_OPTS ?= + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Elvis targets:" \ + " elvis Run Elvis using the local elvis.config or download the default otherwise" + +distclean:: distclean-elvis + +# Plugin-specific targets. + +$(ELVIS): + $(gen_verbose) $(call core_http_get,$(ELVIS),$(ELVIS_URL)) + $(verbose) chmod +x $(ELVIS) + +$(ELVIS_CONFIG): + $(verbose) $(call core_http_get,$(ELVIS_CONFIG),$(ELVIS_CONFIG_URL)) + +elvis: $(ELVIS) $(ELVIS_CONFIG) + $(verbose) $(ELVIS) rock -c $(ELVIS_CONFIG) $(ELVIS_OPTS) + +distclean-elvis: + $(gen_verbose) rm -rf $(ELVIS) + +# Copyright (c) 2014 Dave Cottlehuber +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-escript escript + +# Configuration. + +ESCRIPT_NAME ?= $(PROJECT) +ESCRIPT_COMMENT ?= This is an -*- erlang -*- file + +ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*" +ESCRIPT_SYS_CONFIG ?= "rel/sys.config" +ESCRIPT_EMU_ARGS ?= -pa . \ + -sasl errlog_type error \ + -escript main $(ESCRIPT_NAME) +ESCRIPT_SHEBANG ?= /usr/bin/env escript +ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**" + +# Core targets. + +distclean:: distclean-escript + +help:: + $(verbose) printf "%s\n" "" \ + "Escript targets:" \ + " escript Build an executable escript archive" \ + +# Plugin-specific targets. + +# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl +# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center +# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE : +# Software may only be used for the great good and the true happiness of all +# sentient beings. + +define ESCRIPT_RAW +'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\ +'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\ +' [F || F <- A, not filelib:is_dir(F) ] end,'\ +'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\ +'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\ +'Ez = fun(Escript) ->'\ +' Static = Files([$(ESCRIPT_STATIC)]),'\ +' Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\ +' Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\ +' escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\ +' {archive, Archive, [memory]},'\ +' {shebang, "$(ESCRIPT_SHEBANG)"},'\ +' {comment, "$(ESCRIPT_COMMENT)"},'\ +' {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\ +' ]),'\ +' file:change_mode(Escript, 8#755)'\ +'end,'\ +'Ez("$(ESCRIPT_NAME)"),'\ +'halt().' +endef + +ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW)) + +escript:: distclean-escript deps app + $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND) + +distclean-escript: + $(gen_verbose) rm -f $(ESCRIPT_NAME) + +# Copyright (c) 2014, Enrique Fernandez +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: eunit + +# Configuration + +EUNIT_OPTS ?= + +# Core targets. + +tests:: eunit + +help:: + $(verbose) printf "%s\n" "" \ + "EUnit targets:" \ + " eunit Run all the EUnit tests for this project" + +# Plugin-specific targets. + +define eunit.erl + case "$(COVER)" of + "" -> ok; + _ -> + case cover:compile_beam_directory("ebin") of + {error, _} -> halt(1); + _ -> ok + end + end, + case eunit:test([$(call comma_list,$(1))], [$(EUNIT_OPTS)]) of + ok -> ok; + error -> halt(2) + end, + case "$(COVER)" of + "" -> ok; + _ -> + cover:export("eunit.coverdata") + end, + halt() +endef + +EUNIT_EBIN_MODS = $(notdir $(basename $(call core_find,ebin/,*.beam))) +EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.beam))) +EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \ + $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),{module,'$(mod)'}) + +eunit: test-build + $(gen_verbose) $(ERL) -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin ebin \ + -eval "$(subst $(newline),,$(subst ",\",$(call eunit.erl,$(EUNIT_MODS))))" + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: relx-rel distclean-relx-rel distclean-relx run + +# Configuration. + +RELX ?= $(CURDIR)/relx +RELX_CONFIG ?= $(CURDIR)/relx.config + +RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.5.0/relx +RELX_OPTS ?= +RELX_OUTPUT_DIR ?= _rel + +ifeq ($(firstword $(RELX_OPTS)),-o) + RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) +else + RELX_OPTS += -o $(RELX_OUTPUT_DIR) +endif + +# Core targets. + +ifeq ($(IS_DEP),) +ifneq ($(wildcard $(RELX_CONFIG)),) +rel:: relx-rel +endif +endif + +distclean:: distclean-relx-rel distclean-relx + +# Plugin-specific targets. + +$(RELX): + $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) + $(verbose) chmod +x $(RELX) + +relx-rel: $(RELX) rel-deps app + $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) + +distclean-relx-rel: + $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) + +distclean-relx: + $(gen_verbose) rm -rf $(RELX) + +# Run target. + +ifeq ($(wildcard $(RELX_CONFIG)),) +run: +else + +define get_relx_release.erl + {ok, Config} = file:consult("$(RELX_CONFIG)"), + {release, {Name, _}, _} = lists:keyfind(release, 1, Config), + io:format("~s", [Name]), + halt(0). +endef + +RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))` + +run: all + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console + +help:: + $(verbose) printf "%s\n" "" \ + "Relx targets:" \ + " run Compile the project, build the release and run it" + +endif + +# Copyright (c) 2014, M Robert Martin +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: shell + +# Configuration. + +SHELL_ERL ?= erl +SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin +SHELL_OPTS ?= + +ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) + +# Core targets + +help:: + $(verbose) printf "%s\n" "" \ + "Shell targets:" \ + " shell Run an erlang shell with SHELL_OPTS or reasonable default" + +# Plugin-specific targets. + +$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) + +build-shell-deps: $(ALL_SHELL_DEPS_DIRS) + $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done + +shell: build-shell-deps + $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS) + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq) +.PHONY: triq + +# Targets. + +tests:: triq + +define triq_check.erl + code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]), + try + case $(1) of + all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]); + module -> triq:check($(2)); + function -> triq:check($(2)) + end + of + true -> halt(0); + _ -> halt(1) + catch error:undef -> + io:format("Undefined property or module~n"), + halt(0) + end. +endef + +ifdef t +ifeq (,$(findstring :,$(t))) +triq: test-build + $(verbose) $(call erlang,$(call triq_check.erl,module,$(t))) +else +triq: test-build + $(verbose) echo Testing $(t)/0 + $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)())) +endif +else +triq: test-build + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam)))))) + $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES))) +endif +endif + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: xref distclean-xref + +# Configuration. + +ifeq ($(XREF_CONFIG),) + XREF_ARGS := +else + XREF_ARGS := -c $(XREF_CONFIG) +endif + +XREFR ?= $(CURDIR)/xrefr +export XREFR + +XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Xref targets:" \ + " xref Run Xrefr using $XREF_CONFIG as config file if defined" + +distclean:: distclean-xref + +# Plugin-specific targets. + +$(XREFR): + $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL)) + $(verbose) chmod +x $(XREFR) + +xref: deps app $(XREFR) + $(gen_verbose) $(XREFR) $(XREFR_ARGS) + +distclean-xref: + $(gen_verbose) rm -rf $(XREFR) + +# Copyright 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +COVER_REPORT_DIR = cover + +# Hook in coverage to ct + +ifdef COVER +ifdef CT_RUN +# All modules in 'ebin' +COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam))) + +test-build:: $(TEST_DIR)/ct.cover.spec + +$(TEST_DIR)/ct.cover.spec: + $(verbose) echo Cover mods: $(COVER_MODS) + $(gen_verbose) printf "%s\n" \ + '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \ + '{export,"$(CURDIR)/ct.coverdata"}.' > $@ + +CT_RUN += -cover $(TEST_DIR)/ct.cover.spec +endif +endif + +# Core targets + +ifdef COVER +ifneq ($(COVER_REPORT_DIR),) +tests:: + $(verbose) $(MAKE) --no-print-directory cover-report +endif +endif + +clean:: coverdata-clean + +ifneq ($(COVER_REPORT_DIR),) +distclean:: cover-report-clean +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Cover targets:" \ + " cover-report Generate a HTML coverage report from previously collected" \ + " cover data." \ + " all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \ + "" \ + "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \ + "target tests additionally generates a HTML coverage report from the combined" \ + "coverdata files from each of these testing tools. HTML reports can be disabled" \ + "by setting COVER_REPORT_DIR to empty." + +# Plugin specific targets + +COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata)) + +.PHONY: coverdata-clean +coverdata-clean: + $(gen_verbose) rm -f *.coverdata ct.cover.spec + +# Merge all coverdata files into one. +all.coverdata: $(COVERDATA) + $(gen_verbose) $(ERL) -eval ' \ + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \ + cover:export("$@"), halt(0).' + +# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to +# empty if you want the coverdata files but not the HTML report. +ifneq ($(COVER_REPORT_DIR),) + +.PHONY: cover-report-clean cover-report + +cover-report-clean: + $(gen_verbose) rm -rf $(COVER_REPORT_DIR) + +ifeq ($(COVERDATA),) +cover-report: +else + +# Modules which include eunit.hrl always contain one line without coverage +# because eunit defines test/0 which is never called. We compensate for this. +EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \ + grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ + | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq)) + +define cover_report.erl + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) + Ms = cover:imported_modules(), + [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M) + ++ ".COVER.html", [html]) || M <- Ms], + Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms], + EunitHrlMods = [$(EUNIT_HRL_MODS)], + Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of + true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report], + TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]), + TotalN = lists:sum([N || {_, {_, N}} <- Report1]), + TotalPerc = round(100 * TotalY / (TotalY + TotalN)), + {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]), + io:format(F, "~n" + "~n" + "Coverage report~n" + "~n", []), + io:format(F, "

Coverage

~n

Total: ~p%

~n", [TotalPerc]), + io:format(F, "~n", []), + [io:format(F, "" + "~n", + [M, M, round(100 * Y / (Y + N))]) || {M, {Y, N}} <- Report1], + How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))", + Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")", + io:format(F, "
ModuleCoverage
~p~p%
~n" + "

Generated using ~s and erlang.mk on ~s.

~n" + "", [How, Date]), + halt(). +endef + +cover-report: + $(gen_verbose) mkdir -p $(COVER_REPORT_DIR) + $(gen_verbose) $(call erlang,$(cover_report.erl)) + +endif +endif # ifneq ($(COVER_REPORT_DIR),) + +# Copyright (c) 2013-2015, Loïc Hoguin +# Copyright (c) 2015, Jean-Sébastien Pédron +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Fetch dependencies (without building them). + +.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \ + fetch-shell-deps + +ifneq ($(SKIP_DEPS),) +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: + @: +else +# By default, we fetch "normal" dependencies. They are also included no +# matter the type of requested dependencies. +# +# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS). +fetch-deps: $(ALL_DEPS_DIRS) +fetch-doc-deps: $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS) +fetch-rel-deps: $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS) +fetch-test-deps: $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS) +fetch-shell-deps: $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(filter doc,$(DEP_TYPES)),) +fetch-deps: $(ALL_DOC_DEPS_DIRS) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +fetch-deps: $(ALL_REL_DEPS_DIRS) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +fetch-deps: $(ALL_TEST_DEPS_DIRS) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +fetch-deps: $(ALL_SHELL_DEPS_DIRS) +endif + +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep $@ IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/$@.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $^ ; do \ + if ! grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/$@.log; then \ + echo $$dep >> $(ERLANG_MK_TMP)/$@.log; \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C $$dep fetch-deps IS_DEP=1 || exit $$?; \ + fi \ + fi \ + done +endif # ifneq ($(SKIP_DEPS),) + +# List dependencies recursively. + +.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \ + list-shell-deps + +ifneq ($(SKIP_DEPS),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): + $(verbose) :> $@ +else +LIST_DIRS = $(ALL_DEPS_DIRS) +LIST_DEPS = $(BUILD_DEPS) $(DEPS) + +$(ERLANG_MK_RECURSIVE_DEPS_LIST): fetch-deps + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DIRS += $(ALL_DOC_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DEPS += $(DOC_DEPS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-doc-deps +else +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DIRS += $(ALL_REL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DEPS += $(REL_DEPS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-rel-deps +else +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DIRS += $(ALL_TEST_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DEPS += $(TEST_DEPS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-test-deps +else +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DIRS += $(ALL_SHELL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DEPS += $(SHELL_DEPS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-shell-deps +else +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-deps +endif + +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): +ifneq ($(IS_DEP),1) + $(verbose) rm -f $@.orig +endif +ifndef IS_APP + $(verbose) for app in $(filter-out $(CURDIR),$(ALL_APPS_DIRS)); do \ + $(MAKE) -C "$$app" --no-print-directory $@ IS_APP=1 || :; \ + done +endif + $(verbose) for dep in $(filter-out $(CURDIR),$(LIST_DIRS)); do \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C "$$dep" --no-print-directory $@ IS_DEP=1; \ + fi; \ + done + $(verbose) for dep in $(LIST_DEPS); do \ + echo $(DEPS_DIR)/$$dep; \ + done >> $@.orig +ifndef IS_APP +ifneq ($(IS_DEP),1) + $(verbose) sort < $@.orig | uniq > $@ + $(verbose) rm -f $@.orig +endif +endif +endif # ifneq ($(SKIP_DEPS),) + +ifneq ($(SKIP_DEPS),) +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + @: +else +list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST) +list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(IS_DEP),1) +ifneq ($(filter doc,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) +endif +endif + +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + $(verbose) cat $^ | sort | uniq +endif # ifneq ($(SKIP_DEPS),) diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/federation.js b/rabbitmq-server/deps/rabbitmq_federation_management/priv/www/js/federation.js similarity index 90% rename from rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/federation.js rename to rabbitmq-server/deps/rabbitmq_federation_management/priv/www/js/federation.js index c989072..a5f74b4 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/federation.js +++ b/rabbitmq-server/deps/rabbitmq_federation_management/priv/www/js/federation.js @@ -37,21 +37,14 @@ NAVIGATION['Admin'][0]['Federation Upstreams'] = ['#/federation-upstreams', "pol HELP['federation-uri'] = 'URI to connect to. If upstream is a cluster and can have several URIs, you can enter them here separated by spaces.'; -HELP['federation-expires'] = - 'Time in milliseconds that the upstream should remember about this node for. After this time all upstream state will be removed. Leave this blank to mean "forever".'; - -HELP['federation-ttl'] = - 'Time in milliseconds that undelivered messages should be held upstream when there is a network outage or backlog. Leave this blank to mean "forever".'; - -HELP['federation-max-hops'] = - 'Maximum number of federation links that messages can traverse before being dropped. Defaults to 1 if not set.'; - HELP['federation-prefetch'] = 'Maximum number of unacknowledged messages that may be in flight over a federation link at one time. Defaults to 1000 if not set.'; + HELP['federation-reconnect'] = 'Time in seconds to wait after a network link goes down before attempting reconnection. Defaults to 5 if not set.'; + HELP['federation-ack-mode'] = '
\
on-confirm
\ @@ -65,6 +58,25 @@ HELP['federation-ack-mode'] = HELP['federation-trust-user-id'] = 'Set "Yes" to preserve the "user-id" field across a federation link, even if the user-id does not match that used to republish the message. Set to "No" to clear the "user-id" field when messages are federated. Only set this to "Yes" if you trust the upstream broker not to forge user-ids.'; +HELP['exchange'] = + 'The name of the upstream exchange. Default is to use the same name as the federated exchange.'; + +HELP['federation-max-hops'] = + 'Maximum number of federation links that messages can traverse before being dropped. Defaults to 1 if not set.'; + +HELP['federation-expires'] = + 'Time in milliseconds that the upstream should remember about this node for. After this time all upstream state will be removed. Leave this blank to mean "forever".'; + +HELP['federation-ttl'] = + 'Time in milliseconds that undelivered messages should be held upstream when there is a network outage or backlog. Leave this blank to mean "forever".'; + +HELP['ha-policy'] = + 'Determines the "x-ha-policy" argument for the upstream queue for a federated exchange. Default is "none", meaning the queue is not HA.'; + +HELP['queue'] = + 'The name of the upstream queue. Default is to use the same name as the federated queue.'; + + function link_fed_conn(vhost, name) { return _link_to(name, '#/federation-upstreams/' + esc(vhost) + '/' + esc(name)); } diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/tmpl/federation-upstream.ejs b/rabbitmq-server/deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstream.ejs similarity index 72% rename from rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/tmpl/federation-upstream.ejs rename to rabbitmq-server/deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstream.ejs index 6e66b4e..da1331b 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/tmpl/federation-upstream.ejs +++ b/rabbitmq-server/deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstream.ejs @@ -4,22 +4,14 @@

Overview

+ + - - - - - - - - - - - - @@ -36,6 +28,51 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+

General parameters

+
URI <%= fmt_string(upstream.value.uri) %>
Expires<%= fmt_time(upstream.value.expires, 'ms') %>
Message TTL<%= fmt_time(upstream.value['message-ttl'], 'ms') %>
Max Hops<%= fmt_string(upstream.value['max-hops']) %>
Prefetch Count <%= fmt_string(upstream.value['prefetch-count']) %>Trust User-ID <%= fmt_boolean(upstream.value['trust-user-id']) %>
+

Federated exchange parameters

+
Exchange<%= fmt_string(upstream.value['exchange']) %>
Max Hops<%= fmt_string(upstream.value['max-hops']) %>
Expires<%= fmt_time(upstream.value.expires, 'ms') %>
Message TTL<%= fmt_time(upstream.value['message-ttl'], 'ms') %>
HA Policy<%= fmt_string(upstream.value['ha-policy']) %>
+

Federated queue parameters

+
Queue<%= fmt_string(upstream.value['queue']) %>
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/tmpl/federation-upstreams.ejs b/rabbitmq-server/deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstreams.ejs similarity index 81% rename from rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/tmpl/federation-upstreams.ejs rename to rabbitmq-server/deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstreams.ejs index ff23d38..dc23417 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/tmpl/federation-upstreams.ejs +++ b/rabbitmq-server/deps/rabbitmq_federation_management/priv/www/js/tmpl/federation-upstreams.ejs @@ -11,13 +11,18 @@ <% } %> Name URI - Expiry - Message TTL - Max Hops Prefetch Count Reconnect Delay Ack mode Trust User-ID + Exchange + Max Hops + Expiry + Message TTL + HA Policy + Queue + + @@ -31,13 +36,16 @@ <% } %> <%= link_fed_conn(upstream.vhost, upstream.name) %> <%= fmt_shortened_uri(upstream.value.uri) %> - <%= fmt_time(upstream.value.expires, 'ms') %> - <%= fmt_time(upstream.value['message-ttl'], 'ms') %> - <%= upstream.value['max-hops'] %> <%= upstream.value['prefetch-count'] %> <%= fmt_time(upstream.value['reconnect-delay'], 's') %> <%= fmt_string(upstream.value['ack-mode']) %> <%= fmt_boolean(upstream.value['trust-user-id']) %> + <%= fmt_string(upstream.value['exchange']) %> + <%= upstream.value['max-hops'] %> + <%= fmt_time(upstream.value.expires, 'ms') %> + <%= fmt_time(upstream.value['message-ttl'], 'ms') %> + <%= upstream.value['ha-policy'] %> + <%= upstream.value['queue'] %> <% } %> @@ -54,6 +62,11 @@
+ + + <% if (vhosts_interesting) { %> @@ -72,6 +85,8 @@ + + - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+

General parameters

+
*
*
- - ms
- - ms
- -
+

Federated exchanges parameters

+
+ +
+ +
+ + ms
+ + ms
+ +
+

Federated queues parameter

+
+ +
diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/tmpl/federation.ejs b/rabbitmq-server/deps/rabbitmq_federation_management/priv/www/js/tmpl/federation.ejs similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-federation-management/priv/www/js/tmpl/federation.ejs rename to rabbitmq-server/deps/rabbitmq_federation_management/priv/www/js/tmpl/federation.ejs diff --git a/rabbitmq-server/deps/rabbitmq_federation_management/rabbitmq-components.mk b/rabbitmq-server/deps/rabbitmq_federation_management/rabbitmq-components.mk new file mode 100644 index 0000000..eed26fd --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_federation_management/rabbitmq-components.mk @@ -0,0 +1,331 @@ +ifeq ($(.DEFAULT_GOAL),) +# Define default goal to `all` because this file defines some targets +# before the inclusion of erlang.mk leading to the wrong target becoming +# the default. +.DEFAULT_GOAL = all +endif + +# Automatically add rabbitmq-common to the dependencies, at least for +# the Makefiles. +ifneq ($(PROJECT),rabbit_common) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +ifeq ($(filter rabbit_common,$(DEPS)),) +DEPS += rabbit_common +endif +endif +endif + +# -------------------------------------------------------------------- +# RabbitMQ components. +# -------------------------------------------------------------------- + +# For RabbitMQ repositories, we want to checkout branches which match +# the parent project. For instance, if the parent project is on a +# release tag, dependencies must be on the same release tag. If the +# parent project is on a topic branch, dependencies must be on the same +# topic branch or fallback to `stable` or `master` whichever was the +# base of the topic branch. + +dep_amqp_client = git_rmq rabbitmq-erlang-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit = git_rmq rabbitmq-server $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbit_common = git_rmq rabbitmq-common $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_amqp1_0 = git_rmq rabbitmq-amqp1.0 $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_amqp = git_rmq rabbitmq-auth-backend-amqp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_http = git_rmq rabbitmq-auth-backend-http $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_backend_ldap = git_rmq rabbitmq-auth-backend-ldap $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_auth_mechanism_ssl = git_rmq rabbitmq-auth-mechanism-ssl $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_boot_steps_visualiser = git_rmq rabbitmq-boot-steps-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_clusterer = git_rmq rabbitmq-clusterer $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_codegen = git_rmq rabbitmq-codegen $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_consistent_hash_exchange = git_rmq rabbitmq-consistent-hash-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_delayed_message_exchange = git_rmq rabbitmq-delayed-message-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_dotnet_client = git_rmq rabbitmq-dotnet-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_event_exchange = git_rmq rabbitmq-event-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation = git_rmq rabbitmq-federation $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_federation_management = git_rmq rabbitmq-federation-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_java_client = git_rmq rabbitmq-java-client $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_lvc = git_rmq rabbitmq-lvc-plugin $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management = git_rmq rabbitmq-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_agent = git_rmq rabbitmq-management-agent $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_exchange = git_rmq rabbitmq-management-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_themes = git_rmq rabbitmq-management-themes $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_management_visualiser = git_rmq rabbitmq-management-visualiser $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_message_timestamp = git_rmq rabbitmq-message-timestamp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_metronome = git_rmq rabbitmq-metronome $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_mqtt = git_rmq rabbitmq-mqtt $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_recent_history_exchange = git_rmq rabbitmq-recent-history-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_rtopic_exchange = git_rmq rabbitmq-rtopic-exchange $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_sharding = git_rmq rabbitmq-sharding $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel = git_rmq rabbitmq-shovel $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_shovel_management = git_rmq rabbitmq-shovel-management $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_stomp = git_rmq rabbitmq-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_toke = git_rmq rabbitmq-toke $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_top = git_rmq rabbitmq-top $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_tracing = git_rmq rabbitmq-tracing $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_test = git_rmq rabbitmq-test $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_dispatch = git_rmq rabbitmq-web-dispatch $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp = git_rmq rabbitmq-web-stomp $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_web_stomp_examples = git_rmq rabbitmq-web-stomp-examples $(current_rmq_ref) $(base_rmq_ref) master +dep_rabbitmq_website = git_rmq rabbitmq-website $(current_rmq_ref) $(base_rmq_ref) live master +dep_sockjs = git_rmq sockjs-erlang $(current_rmq_ref) $(base_rmq_ref) master +dep_toke = git_rmq toke $(current_rmq_ref) $(base_rmq_ref) master + +dep_rabbitmq_public_umbrella = git_rmq rabbitmq-public-umbrella $(current_rmq_ref) $(base_rmq_ref) master + +# FIXME: As of 2015-11-20, we depend on Ranch 1.2.1, but erlang.mk +# defaults to Ranch 1.1.0. All projects depending indirectly on Ranch +# needs to add "ranch" as a BUILD_DEPS. The list of projects needing +# this workaround are: +# o rabbitmq-web-stomp +dep_ranch = git https://github.com/ninenines/ranch 1.2.1 + +RABBITMQ_COMPONENTS = amqp_client \ + rabbit \ + rabbit_common \ + rabbitmq_amqp1_0 \ + rabbitmq_auth_backend_amqp \ + rabbitmq_auth_backend_http \ + rabbitmq_auth_backend_ldap \ + rabbitmq_auth_mechanism_ssl \ + rabbitmq_boot_steps_visualiser \ + rabbitmq_clusterer \ + rabbitmq_codegen \ + rabbitmq_consistent_hash_exchange \ + rabbitmq_delayed_message_exchange \ + rabbitmq_dotnet_client \ + rabbitmq_event_exchange \ + rabbitmq_federation \ + rabbitmq_federation_management \ + rabbitmq_java_client \ + rabbitmq_lvc \ + rabbitmq_management \ + rabbitmq_management_agent \ + rabbitmq_management_exchange \ + rabbitmq_management_themes \ + rabbitmq_management_visualiser \ + rabbitmq_message_timestamp \ + rabbitmq_metronome \ + rabbitmq_mqtt \ + rabbitmq_recent_history_exchange \ + rabbitmq_rtopic_exchange \ + rabbitmq_sharding \ + rabbitmq_shovel \ + rabbitmq_shovel_management \ + rabbitmq_stomp \ + rabbitmq_test \ + rabbitmq_toke \ + rabbitmq_top \ + rabbitmq_tracing \ + rabbitmq_web_dispatch \ + rabbitmq_web_stomp \ + rabbitmq_web_stomp_examples \ + rabbitmq_website + +# Several components have a custom erlang.mk/build.config, mainly +# to disable eunit. Therefore, we can't use the top-level project's +# erlang.mk copy. +NO_AUTOPATCH += $(RABBITMQ_COMPONENTS) + +ifeq ($(origin current_rmq_ref),undefined) +ifneq ($(wildcard .git),) +current_rmq_ref := $(shell (\ + ref=$$(git branch --list | awk '/^\* \(.*detached / {ref=$$0; sub(/.*detached [^ ]+ /, "", ref); sub(/\)$$/, "", ref); print ref; exit;} /^\* / {ref=$$0; sub(/^\* /, "", ref); print ref; exit}');\ + if test "$$(git rev-parse --short HEAD)" != "$$ref"; then echo "$$ref"; fi)) +else +current_rmq_ref := master +endif +endif +export current_rmq_ref + +ifeq ($(origin base_rmq_ref),undefined) +ifneq ($(wildcard .git),) +base_rmq_ref := $(shell \ + (git rev-parse --verify -q stable >/dev/null && \ + git merge-base --is-ancestor $$(git merge-base master HEAD) stable && \ + echo stable) || \ + echo master) +else +base_rmq_ref := master +endif +endif +export base_rmq_ref + +# Repository URL selection. +# +# First, we infer other components' location from the current project +# repository URL, if it's a Git repository: +# - We take the "origin" remote URL as the base +# - The current project name and repository name is replaced by the +# target's properties: +# eg. rabbitmq-common is replaced by rabbitmq-codegen +# eg. rabbit_common is replaced by rabbitmq_codegen +# +# If cloning from this computed location fails, we fallback to RabbitMQ +# upstream which is GitHub. + +# Maccro to transform eg. "rabbit_common" to "rabbitmq-common". +rmq_cmp_repo_name = $(word 2,$(dep_$(1))) + +# Upstream URL for the current project. +RABBITMQ_COMPONENT_REPO_NAME := $(call rmq_cmp_repo_name,$(PROJECT)) +RABBITMQ_UPSTREAM_FETCH_URL ?= https://github.com/rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git +RABBITMQ_UPSTREAM_PUSH_URL ?= git@github.com:rabbitmq/$(RABBITMQ_COMPONENT_REPO_NAME).git + +# Current URL for the current project. If this is not a Git clone, +# default to the upstream Git repository. +ifneq ($(wildcard .git),) +git_origin_fetch_url := $(shell git config remote.origin.url) +git_origin_push_url := $(shell git config remote.origin.pushurl || git config remote.origin.url) +RABBITMQ_CURRENT_FETCH_URL ?= $(git_origin_fetch_url) +RABBITMQ_CURRENT_PUSH_URL ?= $(git_origin_push_url) +else +RABBITMQ_CURRENT_FETCH_URL ?= $(RABBITMQ_UPSTREAM_FETCH_URL) +RABBITMQ_CURRENT_PUSH_URL ?= $(RABBITMQ_UPSTREAM_PUSH_URL) +endif + +# Macro to replace the following pattern: +# 1. /foo.git -> /bar.git +# 2. /foo -> /bar +# 3. /foo/ -> /bar/ +subst_repo_name = $(patsubst %/$(1)/%,%/$(2)/%,$(patsubst %/$(1),%/$(2),$(patsubst %/$(1).git,%/$(2).git,$(3)))) + +# Macro to replace both the project's name (eg. "rabbit_common") and +# repository name (eg. "rabbitmq-common") by the target's equivalent. +# +# This macro is kept on one line because we don't want whitespaces in +# the returned value, as it's used in $(dep_fetch_git_rmq) in a shell +# single-quoted string. +dep_rmq_repo = $(if $(dep_$(2)),$(call subst_repo_name,$(PROJECT),$(2),$(call subst_repo_name,$(RABBITMQ_COMPONENT_REPO_NAME),$(call rmq_cmp_repo_name,$(2)),$(1))),$(pkg_$(1)_repo)) + +dep_rmq_commits = $(if $(dep_$(1)), \ + $(wordlist 3,$(words $(dep_$(1))),$(dep_$(1))), \ + $(pkg_$(1)_commit)) + +define dep_fetch_git_rmq + fetch_url1='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_FETCH_URL),$(1))'; \ + fetch_url2='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_FETCH_URL),$(1))'; \ + if test "$$$$fetch_url1" != '$(RABBITMQ_CURRENT_FETCH_URL)' && \ + git clone -q -n -- "$$$$fetch_url1" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url1"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_CURRENT_PUSH_URL),$(1))'; \ + elif git clone -q -n -- "$$$$fetch_url2" $(DEPS_DIR)/$(call dep_name,$(1)); then \ + fetch_url="$$$$fetch_url2"; \ + push_url='$(call dep_rmq_repo,$(RABBITMQ_UPSTREAM_PUSH_URL),$(1))'; \ + fi; \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && ( \ + $(foreach ref,$(call dep_rmq_commits,$(1)), \ + git checkout -q $(ref) >/dev/null 2>&1 || \ + ) \ + (echo "error: no valid pathspec among: $(call dep_rmq_commits,$(1))" \ + 1>&2 && false) ) && \ + (test "$$$$fetch_url" = "$$$$push_url" || \ + git remote set-url --push origin "$$$$push_url") +endef + +# -------------------------------------------------------------------- +# Component distribution. +# -------------------------------------------------------------------- + +list-dist-deps:: + @: + +prepare-dist:: + @: + +# -------------------------------------------------------------------- +# Run a RabbitMQ node (moved from rabbitmq-run.mk as a workaround). +# -------------------------------------------------------------------- + +# Add "rabbit" to the build dependencies when the user wants to start +# a broker or to the test dependencies when the user wants to test a +# project. +# +# NOTE: This should belong to rabbitmq-run.mk. Unfortunately, it is +# loaded *after* erlang.mk which is too late to add a dependency. That's +# why rabbitmq-components.mk knows the list of targets which start a +# broker and add "rabbit" to the dependencies in this case. + +ifneq ($(PROJECT),rabbit) +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS)),) +RUN_RMQ_TARGETS = run-broker \ + run-background-broker \ + run-node \ + run-background-node \ + start-background-node + +ifneq ($(filter $(RUN_RMQ_TARGETS),$(MAKECMDGOALS)),) +BUILD_DEPS += rabbit +endif +endif + +ifeq ($(filter rabbit,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +ifneq ($(filter check tests tests-with-broker test,$(MAKECMDGOALS)),) +TEST_DEPS += rabbit +endif +endif +endif + +ifeq ($(filter rabbit_public_umbrella amqp_client rabbit_common rabbitmq_test,$(PROJECT)),) +ifeq ($(filter rabbitmq_test,$(DEPS) $(BUILD_DEPS) $(TEST_DEPS)),) +TEST_DEPS += rabbitmq_test +endif +endif + +# -------------------------------------------------------------------- +# rabbitmq-components.mk checks. +# -------------------------------------------------------------------- + +ifeq ($(PROJECT),rabbit_common) +else ifdef SKIP_RMQCOMP_CHECK +else ifeq ($(IS_DEP),1) +else ifneq ($(filter co up,$(MAKECMDGOALS)),) +else +# In all other cases, rabbitmq-components.mk must be in sync. +deps:: check-rabbitmq-components.mk +fetch-deps: check-rabbitmq-components.mk +endif + +# If this project is under the Umbrella project, we override $(DEPS_DIR) +# to point to the Umbrella's one. We also disable `make distclean` so +# $(DEPS_DIR) is not accidentally removed. + +ifneq ($(wildcard ../../UMBRELLA.md),) +UNDER_UMBRELLA = 1 +else ifneq ($(wildcard UMBRELLA.md),) +UNDER_UMBRELLA = 1 +endif + +ifeq ($(UNDER_UMBRELLA),1) +ifneq ($(PROJECT),rabbitmq_public_umbrella) +DEPS_DIR ?= $(abspath ..) + +distclean:: distclean-components + @: + +distclean-components: +endif + +ifneq ($(filter distclean distclean-deps,$(MAKECMDGOALS)),) +SKIP_DEPS = 1 +endif +endif + +UPSTREAM_RMQ_COMPONENTS_MK = $(DEPS_DIR)/rabbit_common/mk/rabbitmq-components.mk + +check-rabbitmq-components.mk: + $(verbose) cmp -s rabbitmq-components.mk \ + $(UPSTREAM_RMQ_COMPONENTS_MK) || \ + (echo "error: rabbitmq-components.mk must be updated!" 1>&2; \ + false) + +ifeq ($(PROJECT),rabbit_common) +rabbitmq-components-mk: + @: +else +rabbitmq-components-mk: + $(gen_verbose) cp -a $(UPSTREAM_RMQ_COMPONENTS_MK) . +ifeq ($(DO_COMMIT),yes) + $(verbose) git diff --quiet rabbitmq-components.mk \ + || git commit -m 'Update rabbitmq-components.mk' rabbitmq-components.mk +endif +endif diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/src/rabbit_federation_mgmt.erl b/rabbitmq-server/deps/rabbitmq_federation_management/src/rabbit_federation_mgmt.erl similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-federation-management/src/rabbit_federation_mgmt.erl rename to rabbitmq-server/deps/rabbitmq_federation_management/src/rabbit_federation_mgmt.erl index 10dde01..b4dc5f5 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation-management/src/rabbit_federation_mgmt.erl +++ b/rabbitmq-server/deps/rabbitmq_federation_management/src/rabbit_federation_mgmt.erl @@ -11,7 +11,7 @@ %% The Original Code is RabbitMQ. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% -module(rabbit_federation_mgmt). diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/src/rabbitmq_federation_management.app.src b/rabbitmq-server/deps/rabbitmq_federation_management/src/rabbitmq_federation_management.app.src similarity index 91% rename from rabbitmq-server/plugins-src/rabbitmq-federation-management/src/rabbitmq_federation_management.app.src rename to rabbitmq-server/deps/rabbitmq_federation_management/src/rabbitmq_federation_management.app.src index e8374dc..f3b5c91 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-federation-management/src/rabbitmq_federation_management.app.src +++ b/rabbitmq-server/deps/rabbitmq_federation_management/src/rabbitmq_federation_management.app.src @@ -1,6 +1,6 @@ {application, rabbitmq_federation_management, [{description, "RabbitMQ Federation Management"}, - {vsn, "%%VSN%%"}, + {vsn, "3.6.1"}, {modules, []}, {registered, []}, {env, []}, diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/CONTRIBUTING.md b/rabbitmq-server/deps/rabbitmq_management/CONTRIBUTING.md similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-federation-management/CONTRIBUTING.md rename to rabbitmq-server/deps/rabbitmq_management/CONTRIBUTING.md diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/LICENSE b/rabbitmq-server/deps/rabbitmq_management/LICENSE similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/LICENSE rename to rabbitmq-server/deps/rabbitmq_management/LICENSE diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/LICENSE-Apache-Basho b/rabbitmq-server/deps/rabbitmq_management/LICENSE-APACHE2-ExplorerCanvas similarity index 89% rename from rabbitmq-server/plugins-src/webmachine-wrapper/LICENSE-Apache-Basho rename to rabbitmq-server/deps/rabbitmq_management/LICENSE-APACHE2-ExplorerCanvas index e454a52..d645695 100644 --- a/rabbitmq-server/plugins-src/webmachine-wrapper/LICENSE-Apache-Basho +++ b/rabbitmq-server/deps/rabbitmq_management/LICENSE-APACHE2-ExplorerCanvas @@ -176,3 +176,27 @@ END OF TERMS AND CONDITIONS + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/rabbitmq-server/deps/rabbitmq_management/LICENSE-BSD-base64js b/rabbitmq-server/deps/rabbitmq_management/LICENSE-BSD-base64js new file mode 100644 index 0000000..7073704 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_management/LICENSE-BSD-base64js @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2010 Nick Galbreath + * http://code.google.com/p/stringencoders/source/browse/#svn/trunk/javascript + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + * OTHER DEALINGS IN THE SOFTWARE. +*/ diff --git a/rabbitmq-server/LICENSE-MIT-eldap b/rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-EJS10 similarity index 94% rename from rabbitmq-server/LICENSE-MIT-eldap rename to rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-EJS10 index 1f62009..f3bdcd8 100644 --- a/rabbitmq-server/LICENSE-MIT-eldap +++ b/rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-EJS10 @@ -1,6 +1,7 @@ +EJS - Embedded JavaScript + +Copyright (c) 2007 Edward Benson -Copyright (c) 2010, Torbjorn Tornkvist - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights @@ -19,3 +20,4 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-Flot b/rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-Flot new file mode 100644 index 0000000..67f4625 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-Flot @@ -0,0 +1,22 @@ +Copyright (c) 2007-2013 IOLA and Ole Laursen + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-Sammy060 b/rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-Sammy060 new file mode 100644 index 0000000..3debf5a --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-Sammy060 @@ -0,0 +1,25 @@ +Copyright (c) 2008 Aaron Quint, Quirkey NYC, LLC + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + + + diff --git a/rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-jQuery164 b/rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-jQuery164 new file mode 100644 index 0000000..5a87162 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_management/LICENSE-MIT-jQuery164 @@ -0,0 +1,21 @@ +Copyright (c) 2011 John Resig, http://jquery.com/ + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/rabbitmq-server/deps/rabbitmq_management/LICENSE-MPL-RabbitMQ b/rabbitmq-server/deps/rabbitmq_management/LICENSE-MPL-RabbitMQ new file mode 100644 index 0000000..e163fcc --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_management/LICENSE-MPL-RabbitMQ @@ -0,0 +1,455 @@ + MOZILLA PUBLIC LICENSE + Version 1.1 + + --------------- + +1. Definitions. + + 1.0.1. "Commercial Use" means distribution or otherwise making the + Covered Code available to a third party. + + 1.1. "Contributor" means each entity that creates or contributes to + the creation of Modifications. + + 1.2. "Contributor Version" means the combination of the Original + Code, prior Modifications used by a Contributor, and the Modifications + made by that particular Contributor. + + 1.3. "Covered Code" means the Original Code or Modifications or the + combination of the Original Code and Modifications, in each case + including portions thereof. + + 1.4. "Electronic Distribution Mechanism" means a mechanism generally + accepted in the software development community for the electronic + transfer of data. + + 1.5. "Executable" means Covered Code in any form other than Source + Code. + + 1.6. "Initial Developer" means the individual or entity identified + as the Initial Developer in the Source Code notice required by Exhibit + A. + + 1.7. "Larger Work" means a work which combines Covered Code or + portions thereof with code not governed by the terms of this License. + + 1.8. "License" means this document. + + 1.8.1. "Licensable" means having the right to grant, to the maximum + extent possible, whether at the time of the initial grant or + subsequently acquired, any and all of the rights conveyed herein. + + 1.9. "Modifications" means any addition to or deletion from the + substance or structure of either the Original Code or any previous + Modifications. When Covered Code is released as a series of files, a + Modification is: + A. Any addition to or deletion from the contents of a file + containing Original Code or previous Modifications. + + B. Any new file that contains any part of the Original Code or + previous Modifications. + + 1.10. "Original Code" means Source Code of computer software code + which is described in the Source Code notice required by Exhibit A as + Original Code, and which, at the time of its release under this + License is not already Covered Code governed by this License. + + 1.10.1. "Patent Claims" means any patent claim(s), now owned or + hereafter acquired, including without limitation, method, process, + and apparatus claims, in any patent Licensable by grantor. + + 1.11. "Source Code" means the preferred form of the Covered Code for + making modifications to it, including all modules it contains, plus + any associated interface definition files, scripts used to control + compilation and installation of an Executable, or source code + differential comparisons against either the Original Code or another + well known, available Covered Code of the Contributor's choice. The + Source Code can be in a compressed or archival form, provided the + appropriate decompression or de-archiving software is widely available + for no charge. + + 1.12. "You" (or "Your") means an individual or a legal entity + exercising rights under, and complying with all of the terms of, this + License or a future version of this License issued under Section 6.1. + For legal entities, "You" includes any entity which controls, is + controlled by, or is under common control with You. For purposes of + this definition, "control" means (a) the power, direct or indirect, + to cause the direction or management of such entity, whether by + contract or otherwise, or (b) ownership of more than fifty percent + (50%) of the outstanding shares or beneficial ownership of such + entity. + +2. Source Code License. + + 2.1. The Initial Developer Grant. + The Initial Developer hereby grants You a world-wide, royalty-free, + non-exclusive license, subject to third party intellectual property + claims: + (a) under intellectual property rights (other than patent or + trademark) Licensable by Initial Developer to use, reproduce, + modify, display, perform, sublicense and distribute the Original + Code (or portions thereof) with or without Modifications, and/or + as part of a Larger Work; and + + (b) under Patents Claims infringed by the making, using or + selling of Original Code, to make, have made, use, practice, + sell, and offer for sale, and/or otherwise dispose of the + Original Code (or portions thereof). + + (c) the licenses granted in this Section 2.1(a) and (b) are + effective on the date Initial Developer first distributes + Original Code under the terms of this License. + + (d) Notwithstanding Section 2.1(b) above, no patent license is + granted: 1) for code that You delete from the Original Code; 2) + separate from the Original Code; or 3) for infringements caused + by: i) the modification of the Original Code or ii) the + combination of the Original Code with other software or devices. + + 2.2. Contributor Grant. + Subject to third party intellectual property claims, each Contributor + hereby grants You a world-wide, royalty-free, non-exclusive license + + (a) under intellectual property rights (other than patent or + trademark) Licensable by Contributor, to use, reproduce, modify, + display, perform, sublicense and distribute the Modifications + created by such Contributor (or portions thereof) either on an + unmodified basis, with other Modifications, as Covered Code + and/or as part of a Larger Work; and + + (b) under Patent Claims infringed by the making, using, or + selling of Modifications made by that Contributor either alone + and/or in combination with its Contributor Version (or portions + of such combination), to make, use, sell, offer for sale, have + made, and/or otherwise dispose of: 1) Modifications made by that + Contributor (or portions thereof); and 2) the combination of + Modifications made by that Contributor with its Contributor + Version (or portions of such combination). + + (c) the licenses granted in Sections 2.2(a) and 2.2(b) are + effective on the date Contributor first makes Commercial Use of + the Covered Code. + + (d) Notwithstanding Section 2.2(b) above, no patent license is + granted: 1) for any code that Contributor has deleted from the + Contributor Version; 2) separate from the Contributor Version; + 3) for infringements caused by: i) third party modifications of + Contributor Version or ii) the combination of Modifications made + by that Contributor with other software (except as part of the + Contributor Version) or other devices; or 4) under Patent Claims + infringed by Covered Code in the absence of Modifications made by + that Contributor. + +3. Distribution Obligations. + + 3.1. Application of License. + The Modifications which You create or to which You contribute are + governed by the terms of this License, including without limitation + Section 2.2. The Source Code version of Covered Code may be + distributed only under the terms of this License or a future version + of this License released under Section 6.1, and You must include a + copy of this License with every copy of the Source Code You + distribute. You may not offer or impose any terms on any Source Code + version that alters or restricts the applicable version of this + License or the recipients' rights hereunder. However, You may include + an additional document offering the additional rights described in + Section 3.5. + + 3.2. Availability of Source Code. + Any Modification which You create or to which You contribute must be + made available in Source Code form under the terms of this License + either on the same media as an Executable version or via an accepted + Electronic Distribution Mechanism to anyone to whom you made an + Executable version available; and if made available via Electronic + Distribution Mechanism, must remain available for at least twelve (12) + months after the date it initially became available, or at least six + (6) months after a subsequent version of that particular Modification + has been made available to such recipients. You are responsible for + ensuring that the Source Code version remains available even if the + Electronic Distribution Mechanism is maintained by a third party. + + 3.3. Description of Modifications. + You must cause all Covered Code to which You contribute to contain a + file documenting the changes You made to create that Covered Code and + the date of any change. You must include a prominent statement that + the Modification is derived, directly or indirectly, from Original + Code provided by the Initial Developer and including the name of the + Initial Developer in (a) the Source Code, and (b) in any notice in an + Executable version or related documentation in which You describe the + origin or ownership of the Covered Code. + + 3.4. Intellectual Property Matters + (a) Third Party Claims. + If Contributor has knowledge that a license under a third party's + intellectual property rights is required to exercise the rights + granted by such Contributor under Sections 2.1 or 2.2, + Contributor must include a text file with the Source Code + distribution titled "LEGAL" which describes the claim and the + party making the claim in sufficient detail that a recipient will + know whom to contact. If Contributor obtains such knowledge after + the Modification is made available as described in Section 3.2, + Contributor shall promptly modify the LEGAL file in all copies + Contributor makes available thereafter and shall take other steps + (such as notifying appropriate mailing lists or newsgroups) + reasonably calculated to inform those who received the Covered + Code that new knowledge has been obtained. + + (b) Contributor APIs. + If Contributor's Modifications include an application programming + interface and Contributor has knowledge of patent licenses which + are reasonably necessary to implement that API, Contributor must + also include this information in the LEGAL file. + + (c) Representations. + Contributor represents that, except as disclosed pursuant to + Section 3.4(a) above, Contributor believes that Contributor's + Modifications are Contributor's original creation(s) and/or + Contributor has sufficient rights to grant the rights conveyed by + this License. + + 3.5. Required Notices. + You must duplicate the notice in Exhibit A in each file of the Source + Code. If it is not possible to put such notice in a particular Source + Code file due to its structure, then You must include such notice in a + location (such as a relevant directory) where a user would be likely + to look for such a notice. If You created one or more Modification(s) + You may add your name as a Contributor to the notice described in + Exhibit A. You must also duplicate this License in any documentation + for the Source Code where You describe recipients' rights or ownership + rights relating to Covered Code. You may choose to offer, and to + charge a fee for, warranty, support, indemnity or liability + obligations to one or more recipients of Covered Code. However, You + may do so only on Your own behalf, and not on behalf of the Initial + Developer or any Contributor. You must make it absolutely clear than + any such warranty, support, indemnity or liability obligation is + offered by You alone, and You hereby agree to indemnify the Initial + Developer and every Contributor for any liability incurred by the + Initial Developer or such Contributor as a result of warranty, + support, indemnity or liability terms You offer. + + 3.6. Distribution of Executable Versions. + You may distribute Covered Code in Executable form only if the + requirements of Section 3.1-3.5 have been met for that Covered Code, + and if You include a notice stating that the Source Code version of + the Covered Code is available under the terms of this License, + including a description of how and where You have fulfilled the + obligations of Section 3.2. The notice must be conspicuously included + in any notice in an Executable version, related documentation or + collateral in which You describe recipients' rights relating to the + Covered Code. You may distribute the Executable version of Covered + Code or ownership rights under a license of Your choice, which may + contain terms different from this License, provided that You are in + compliance with the terms of this License and that the license for the + Executable version does not attempt to limit or alter the recipient's + rights in the Source Code version from the rights set forth in this + License. If You distribute the Executable version under a different + license You must make it absolutely clear that any terms which differ + from this License are offered by You alone, not by the Initial + Developer or any Contributor. You hereby agree to indemnify the + Initial Developer and every Contributor for any liability incurred by + the Initial Developer or such Contributor as a result of any such + terms You offer. + + 3.7. Larger Works. + You may create a Larger Work by combining Covered Code with other code + not governed by the terms of this License and distribute the Larger + Work as a single product. In such a case, You must make sure the + requirements of this License are fulfilled for the Covered Code. + +4. Inability to Comply Due to Statute or Regulation. + + If it is impossible for You to comply with any of the terms of this + License with respect to some or all of the Covered Code due to + statute, judicial order, or regulation then You must: (a) comply with + the terms of this License to the maximum extent possible; and (b) + describe the limitations and the code they affect. Such description + must be included in the LEGAL file described in Section 3.4 and must + be included with all distributions of the Source Code. Except to the + extent prohibited by statute or regulation, such description must be + sufficiently detailed for a recipient of ordinary skill to be able to + understand it. + +5. Application of this License. + + This License applies to code to which the Initial Developer has + attached the notice in Exhibit A and to related Covered Code. + +6. Versions of the License. + + 6.1. New Versions. + Netscape Communications Corporation ("Netscape") may publish revised + and/or new versions of the License from time to time. Each version + will be given a distinguishing version number. + + 6.2. Effect of New Versions. + Once Covered Code has been published under a particular version of the + License, You may always continue to use it under the terms of that + version. You may also choose to use such Covered Code under the terms + of any subsequent version of the License published by Netscape. No one + other than Netscape has the right to modify the terms applicable to + Covered Code created under this License. + + 6.3. Derivative Works. + If You create or use a modified version of this License (which you may + only do in order to apply it to code which is not already Covered Code + governed by this License), You must (a) rename Your license so that + the phrases "Mozilla", "MOZILLAPL", "MOZPL", "Netscape", + "MPL", "NPL" or any confusingly similar phrase do not appear in your + license (except to note that your license differs from this License) + and (b) otherwise make it clear that Your version of the license + contains terms which differ from the Mozilla Public License and + Netscape Public License. (Filling in the name of the Initial + Developer, Original Code or Contributor in the notice described in + Exhibit A shall not of themselves be deemed to be modifications of + this License.) + +7. DISCLAIMER OF WARRANTY. + + COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS, + WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, + WITHOUT LIMITATION, WARRANTIES THAT THE COVERED CODE IS FREE OF + DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. + THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED CODE + IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE IN ANY RESPECT, + YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE + COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER + OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF + ANY COVERED CODE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. + +8. TERMINATION. + + 8.1. This License and the rights granted hereunder will terminate + automatically if You fail to comply with terms herein and fail to cure + such breach within 30 days of becoming aware of the breach. All + sublicenses to the Covered Code which are properly granted shall + survive any termination of this License. Provisions which, by their + nature, must remain in effect beyond the termination of this License + shall survive. + + 8.2. If You initiate litigation by asserting a patent infringement + claim (excluding declatory judgment actions) against Initial Developer + or a Contributor (the Initial Developer or Contributor against whom + You file such action is referred to as "Participant") alleging that: + + (a) such Participant's Contributor Version directly or indirectly + infringes any patent, then any and all rights granted by such + Participant to You under Sections 2.1 and/or 2.2 of this License + shall, upon 60 days notice from Participant terminate prospectively, + unless if within 60 days after receipt of notice You either: (i) + agree in writing to pay Participant a mutually agreeable reasonable + royalty for Your past and future use of Modifications made by such + Participant, or (ii) withdraw Your litigation claim with respect to + the Contributor Version against such Participant. If within 60 days + of notice, a reasonable royalty and payment arrangement are not + mutually agreed upon in writing by the parties or the litigation claim + is not withdrawn, the rights granted by Participant to You under + Sections 2.1 and/or 2.2 automatically terminate at the expiration of + the 60 day notice period specified above. + + (b) any software, hardware, or device, other than such Participant's + Contributor Version, directly or indirectly infringes any patent, then + any rights granted to You by such Participant under Sections 2.1(b) + and 2.2(b) are revoked effective as of the date You first made, used, + sold, distributed, or had made, Modifications made by that + Participant. + + 8.3. If You assert a patent infringement claim against Participant + alleging that such Participant's Contributor Version directly or + indirectly infringes any patent where such claim is resolved (such as + by license or settlement) prior to the initiation of patent + infringement litigation, then the reasonable value of the licenses + granted by such Participant under Sections 2.1 or 2.2 shall be taken + into account in determining the amount or value of any payment or + license. + + 8.4. In the event of termination under Sections 8.1 or 8.2 above, + all end user license agreements (excluding distributors and resellers) + which have been validly granted by You or any distributor hereunder + prior to termination shall survive termination. + +9. LIMITATION OF LIABILITY. + + UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT + (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL + DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED CODE, + OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR + ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY + CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL, + WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER + COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN + INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF + LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY + RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW + PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE + EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO + THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU. + +10. U.S. GOVERNMENT END USERS. + + The Covered Code is a "commercial item," as that term is defined in + 48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer + software" and "commercial computer software documentation," as such + terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48 + C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995), + all U.S. Government End Users acquire Covered Code with only those + rights set forth herein. + +11. MISCELLANEOUS. + + This License represents the complete agreement concerning subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. This License shall be governed by + California law provisions (except to the extent applicable law, if + any, provides otherwise), excluding its conflict-of-law provisions. + With respect to disputes in which at least one party is a citizen of, + or an entity chartered or registered to do business in the United + States of America, any litigation relating to this License shall be + subject to the jurisdiction of the Federal Courts of the Northern + District of California, with venue lying in Santa Clara County, + California, with the losing party responsible for costs, including + without limitation, court costs and reasonable attorneys' fees and + expenses. The application of the United Nations Convention on + Contracts for the International Sale of Goods is expressly excluded. + Any law or regulation which provides that the language of a contract + shall be construed against the drafter shall not apply to this + License. + +12. RESPONSIBILITY FOR CLAIMS. + + As between Initial Developer and the Contributors, each party is + responsible for claims and damages arising, directly or indirectly, + out of its utilization of rights under this License and You agree to + work with Initial Developer and Contributors to distribute such + responsibility on an equitable basis. Nothing herein is intended or + shall be deemed to constitute any admission of liability. + +13. MULTIPLE-LICENSED CODE. + + Initial Developer may designate portions of the Covered Code as + "Multiple-Licensed". "Multiple-Licensed" means that the Initial + Developer permits you to utilize portions of the Covered Code under + Your choice of the NPL or the alternative licenses, if any, specified + by the Initial Developer in the file described in Exhibit A. + +EXHIBIT A -Mozilla Public License. + + ``The contents of this file are subject to the Mozilla Public License + Version 1.1 (the "License"); you may not use this file except in + compliance with the License. You may obtain a copy of the License at + http://www.mozilla.org/MPL/ + + Software distributed under the License is distributed on an "AS IS" + basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the + License for the specific language governing rights and limitations + under the License. + + The Original Code is RabbitMQ Management Plugin. + + The Initial Developer of the Original Code is GoPivotal, Inc. + Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved.'' + + [NOTE: The text of this Exhibit A may differ slightly from the text of + the notices in the Source Code files of the Original Code. You should + use the text of this Exhibit A rather than the text found in the + Original Code Source Code for Your Modifications.] diff --git a/rabbitmq-server/deps/rabbitmq_management/Makefile b/rabbitmq-server/deps/rabbitmq_management/Makefile new file mode 100644 index 0000000..8c66c17 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_management/Makefile @@ -0,0 +1,52 @@ +PROJECT = rabbitmq_management + +DEPS = amqp_client webmachine rabbitmq_web_dispatch rabbitmq_management_agent +dep_webmachine = git https://github.com/rabbitmq/webmachine.git 6b5210c0ed07159f43222255e05a90bbef6c8cbe +dep_rabbitmq_web_dispatch = git https://github.com/rabbitmq/rabbitmq-web-dispatch.git stable + +DEP_PLUGINS = rabbit_common/mk/rabbitmq-plugin.mk + +# FIXME: Use erlang.mk patched for RabbitMQ, while waiting for PRs to be +# reviewed and merged. + +ERLANG_MK_REPO = https://github.com/rabbitmq/erlang.mk.git +ERLANG_MK_COMMIT = rabbitmq-tmp + +include rabbitmq-components.mk +include erlang.mk + +# -------------------------------------------------------------------- +# Distribution. +# -------------------------------------------------------------------- + +list-dist-deps:: + @echo bin/rabbitmqadmin + +prepare-dist:: + $(verbose) sed 's/%%VSN%%/$(VSN)/' bin/rabbitmqadmin \ + > $(EZ_DIR)/priv/www/cli/rabbitmqadmin + +# -------------------------------------------------------------------- +# Testing. +# -------------------------------------------------------------------- + +FILTER := all +COVER := false + +WITH_BROKER_TEST_MAKEVARS := \ + RABBITMQ_CONFIG_FILE=$(CURDIR)/etc/rabbit-test +WITH_BROKER_TEST_ENVVARS := \ + RABBITMQADMIN=$(CURDIR)/bin/rabbitmqadmin +WITH_BROKER_TEST_COMMANDS := \ + rabbit_test_runner:run_in_broker(\"$(CURDIR)/test\",\"$(FILTER)\") +WITH_BROKER_TEST_SCRIPTS := $(CURDIR)/test/src/rabbitmqadmin-test-wrapper.sh + +TEST_PLUGINS_ROOTDIR = $(TEST_TMPDIR)/PLUGINS + +STANDALONE_TEST_COMMANDS := \ + rabbit_test_runner:run_multi(\"$(DEPS_DIR)\",\"$(CURDIR)/test\",\"$(FILTER)\",$(COVER),\"$(TEST_PLUGINS_ROOTDIR)\") + +pre-standalone-tests:: test-tmpdir test-dist + $(verbose) rm -rf $(TEST_PLUGINS_ROOTDIR) + $(exec_verbose) mkdir -p $(TEST_PLUGINS_ROOTDIR) + $(verbose) cp -a $(DIST_DIR) $(TEST_PLUGINS_ROOTDIR) diff --git a/rabbitmq-server/deps/rabbitmq_management/README.md b/rabbitmq-server/deps/rabbitmq_management/README.md new file mode 100644 index 0000000..fa8bbfe --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_management/README.md @@ -0,0 +1,13 @@ +# RabbitMQ Management Plugin + +This plugin provides a management UI and HTTP API for RabbitMQ. +This plugin is included in the RabbitMQ distribution. To enable +it, use rabbitmq-plugins. + +## Documentation + +[RabbitMQ management UI documentation](http://www.rabbitmq.com/management.html). + +## Continuous Integration + +[![Build Status](https://travis-ci.org/rabbitmq/rabbitmq-management.svg?branch=master)](https://travis-ci.org/rabbitmq/rabbitmq-management) diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/bin/rabbitmqadmin b/rabbitmq-server/deps/rabbitmq_management/bin/rabbitmqadmin similarity index 92% rename from rabbitmq-server/plugins-src/rabbitmq-management/bin/rabbitmqadmin rename to rabbitmq-server/deps/rabbitmq_management/bin/rabbitmqadmin index f8f2da5..a33271c 100755 --- a/rabbitmq-server/plugins-src/rabbitmq-management/bin/rabbitmqadmin +++ b/rabbitmq-server/deps/rabbitmq_management/bin/rabbitmqadmin @@ -13,7 +13,7 @@ # The Original Code is RabbitMQ Management Plugin. # # The Initial Developer of the Original Code is GoPivotal, Inc. -# Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. +# Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. import sys if sys.version_info[0] < 2 or (sys.version_info[0] == 2 and sys.version_info[1] < 6): @@ -252,6 +252,7 @@ def fmt_usage_stanza(root, verb): default_options = { "hostname" : "localhost", "port" : "15672", + "path_prefix" : "", "declare_vhost" : "/", "username" : "guest", "password" : "guest", @@ -291,6 +292,8 @@ def make_parser(): add("-P", "--port", dest="port", help="connect to port PORT", metavar="PORT") + add("--path-prefix", dest="path_prefix", + help="use specific URI path prefix for the RabbitMQ HTTP API (default: blank string)") add("-V", "--vhost", dest="vhost", help="connect to vhost VHOST [default: all vhosts for list, '/' for declare]", metavar="VHOST") @@ -367,7 +370,10 @@ def make_configuration(): (options.node, options.config, error)) else: for key, val in new_conf.items(): - setattr(options, key, val) + if key == 'ssl': + setattr(options, key, val == "True") + else: + setattr(options, key, val) return (options, args) @@ -421,16 +427,16 @@ class Management: self.args = args def get(self, path): - return self.http("GET", "/api%s" % path, "") + return self.http("GET", "%s/api%s" % (self.options.path_prefix, path), "") def put(self, path, body): - return self.http("PUT", "/api%s" % path, body) + return self.http("PUT", "%s/api%s" % (self.options.path_prefix, path), body) def post(self, path, body): - return self.http("POST", "/api%s" % path, body) + return self.http("POST", "%s/api%s" % (self.options.path_prefix, path), body) def delete(self, path): - return self.http("DELETE", "/api%s" % path, "") + return self.http("DELETE", "%s/api%s" % (self.options.path_prefix, path), "") def http(self, method, path, body): if self.options.ssl: @@ -525,7 +531,10 @@ class Management: def invoke_export(self): path = self.get_arg() - definitions = self.get("/definitions") + uri = "/definitions" + if self.options.vhost: + uri += "/%s" % quote_plus(self.options.vhost) + definitions = self.get(uri) f = open(path, 'w') f.write(definitions) f.close() @@ -537,7 +546,10 @@ class Management: f = open(path, 'r') definitions = f.read() f.close() - self.post("/definitions", definitions) + uri = "/definitions" + if self.options.vhost: + uri += "/%s" % quote_plus(self.options.vhost) + self.post(uri, definitions) self.verbose("Imported definitions for %s from \"%s\"" % (self.options.hostname, path)) @@ -867,77 +879,77 @@ _rabbitmqadmin() fargs="--help --host --port --vhost --username --password --format --depth --sort --sort-reverse" case "${prev}" in - list) - COMPREPLY=( $(compgen -W '""" + " ".join(LISTABLE) + """' -- ${cur}) ) + list) + COMPREPLY=( $(compgen -W '""" + " ".join(LISTABLE) + """' -- ${cur}) ) return 0 ;; - show) - COMPREPLY=( $(compgen -W '""" + " ".join(SHOWABLE) + """' -- ${cur}) ) + show) + COMPREPLY=( $(compgen -W '""" + " ".join(SHOWABLE) + """' -- ${cur}) ) return 0 ;; - declare) - COMPREPLY=( $(compgen -W '""" + " ".join(DECLARABLE.keys()) + """' -- ${cur}) ) + declare) + COMPREPLY=( $(compgen -W '""" + " ".join(DECLARABLE.keys()) + """' -- ${cur}) ) return 0 ;; - delete) - COMPREPLY=( $(compgen -W '""" + " ".join(DELETABLE.keys()) + """' -- ${cur}) ) + delete) + COMPREPLY=( $(compgen -W '""" + " ".join(DELETABLE.keys()) + """' -- ${cur}) ) return 0 ;; - close) - COMPREPLY=( $(compgen -W '""" + " ".join(CLOSABLE.keys()) + """' -- ${cur}) ) + close) + COMPREPLY=( $(compgen -W '""" + " ".join(CLOSABLE.keys()) + """' -- ${cur}) ) return 0 ;; - purge) - COMPREPLY=( $(compgen -W '""" + " ".join(PURGABLE.keys()) + """' -- ${cur}) ) + purge) + COMPREPLY=( $(compgen -W '""" + " ".join(PURGABLE.keys()) + """' -- ${cur}) ) return 0 ;; - export) - COMPREPLY=( $(compgen -f ${cur}) ) + export) + COMPREPLY=( $(compgen -f ${cur}) ) return 0 ;; - import) - COMPREPLY=( $(compgen -f ${cur}) ) + import) + COMPREPLY=( $(compgen -f ${cur}) ) return 0 ;; - help) + help) opts="subcommands config" - COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) + COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) return 0 ;; - -H) - COMPREPLY=( $(compgen -A hostname ${cur}) ) + -H) + COMPREPLY=( $(compgen -A hostname ${cur}) ) return 0 ;; - --host) - COMPREPLY=( $(compgen -A hostname ${cur}) ) + --host) + COMPREPLY=( $(compgen -A hostname ${cur}) ) return 0 ;; - -V) + -V) opts="$(rabbitmqadmin -q -f bash list vhosts)" - COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) + COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) return 0 ;; - --vhost) + --vhost) opts="$(rabbitmqadmin -q -f bash list vhosts)" - COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) + COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) return 0 ;; - -u) + -u) opts="$(rabbitmqadmin -q -f bash list users)" - COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) + COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) return 0 ;; - --username) + --username) opts="$(rabbitmqadmin -q -f bash list users)" - COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) + COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) return 0 ;; - -f) - COMPREPLY=( $(compgen -W \"""" + " ".join(FORMATS.keys()) + """\" -- ${cur}) ) + -f) + COMPREPLY=( $(compgen -W \"""" + " ".join(FORMATS.keys()) + """\" -- ${cur}) ) return 0 ;; - --format) - COMPREPLY=( $(compgen -W \"""" + " ".join(FORMATS.keys()) + """\" -- ${cur}) ) + --format) + COMPREPLY=( $(compgen -W \"""" + " ".join(FORMATS.keys()) + """\" -- ${cur}) ) return 0 ;; @@ -946,7 +958,7 @@ _rabbitmqadmin() key = l[0:len(l) - 1] script += " " + key + """) opts="$(rabbitmqadmin -q -f bash list """ + l + """)" - COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) + COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) return 0 ;; """ diff --git a/rabbitmq-server/deps/rabbitmq_management/build.config b/rabbitmq-server/deps/rabbitmq_management/build.config new file mode 100644 index 0000000..0855303 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_management/build.config @@ -0,0 +1,43 @@ +# Do *not* comment or remove core modules +# unless you know what you are doing. +# +# Feel free to comment plugins out however. + +# Core modules. +core/core +index/* +core/index +core/deps + +# Plugins that must run before Erlang code gets compiled. +plugins/erlydtl +plugins/protobuffs + +# Core modules, continued. +core/erlc +core/docs +core/rel +core/test +core/compat + +# Plugins. +plugins/asciidoc +plugins/bootstrap +plugins/c_src +plugins/ci +plugins/ct +plugins/dialyzer +plugins/edoc +plugins/elvis +plugins/escript +# plugins/eunit +plugins/relx +plugins/shell +plugins/triq +plugins/xref + +# Plugins enhancing the functionality of other plugins. +plugins/cover + +# Core modules which can use variables from plugins. +core/deps-tools diff --git a/rabbitmq-server/deps/rabbitmq_management/erlang.mk b/rabbitmq-server/deps/rabbitmq_management/erlang.mk new file mode 100644 index 0000000..9f0c0c3 --- /dev/null +++ b/rabbitmq-server/deps/rabbitmq_management/erlang.mk @@ -0,0 +1,6589 @@ +# Copyright (c) 2013-2015, Loïc Hoguin +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +.PHONY: all app deps search rel docs install-docs check tests clean distclean help erlang-mk + +ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) + +ERLANG_MK_VERSION = 2.0.0-pre.2-16-gb52203c-dirty + +# Core configuration. + +PROJECT ?= $(notdir $(CURDIR)) +PROJECT := $(strip $(PROJECT)) + +PROJECT_VERSION ?= rolling + +# Verbosity. + +V ?= 0 + +verbose_0 = @ +verbose_2 = set -x; +verbose = $(verbose_$(V)) + +gen_verbose_0 = @echo " GEN " $@; +gen_verbose_2 = set -x; +gen_verbose = $(gen_verbose_$(V)) + +# Temporary files directory. + +ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk +export ERLANG_MK_TMP + +# "erl" command. + +ERL = erl +A0 -noinput -boot start_clean + +# Platform detection. + +ifeq ($(PLATFORM),) +UNAME_S := $(shell uname -s) + +ifeq ($(UNAME_S),Linux) +PLATFORM = linux +else ifeq ($(UNAME_S),Darwin) +PLATFORM = darwin +else ifeq ($(UNAME_S),SunOS) +PLATFORM = solaris +else ifeq ($(UNAME_S),GNU) +PLATFORM = gnu +else ifeq ($(UNAME_S),FreeBSD) +PLATFORM = freebsd +else ifeq ($(UNAME_S),NetBSD) +PLATFORM = netbsd +else ifeq ($(UNAME_S),OpenBSD) +PLATFORM = openbsd +else ifeq ($(UNAME_S),DragonFly) +PLATFORM = dragonfly +else ifeq ($(shell uname -o),Msys) +PLATFORM = msys2 +else +$(error Unable to detect platform. Please open a ticket with the output of uname -a.) +endif + +export PLATFORM +endif + +# Core targets. + +all:: deps app rel + +# Noop to avoid a Make warning when there's nothing to do. +rel:: + $(verbose) : + +check:: clean app tests + +clean:: clean-crashdump + +clean-crashdump: +ifneq ($(wildcard erl_crash.dump),) + $(gen_verbose) rm -f erl_crash.dump +endif + +distclean:: clean distclean-tmp + +distclean-tmp: + $(gen_verbose) rm -rf $(ERLANG_MK_TMP) + +help:: + $(verbose) printf "%s\n" \ + "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \ + "Copyright (c) 2013-2015 Loïc Hoguin " \ + "" \ + "Usage: [V=1] $(MAKE) [target]..." \ + "" \ + "Core targets:" \ + " all Run deps, app and rel targets in that order" \ + " app Compile the project" \ + " deps Fetch dependencies (if needed) and compile them" \ + " fetch-deps Fetch dependencies (if needed) without compiling them" \ + " list-deps Fetch dependencies (if needed) and list them" \ + " search q=... Search for a package in the built-in index" \ + " rel Build a release for this project, if applicable" \ + " docs Build the documentation for this project" \ + " install-docs Install the man pages for this project" \ + " check Compile and run all tests and analysis for this project" \ + " tests Run the tests for this project" \ + " clean Delete temporary and output files from most targets" \ + " distclean Delete all temporary and output files" \ + " help Display this help and exit" \ + " erlang-mk Update erlang.mk to the latest version" + +# Core functions. + +empty := +space := $(empty) $(empty) +tab := $(empty) $(empty) +comma := , + +define newline + + +endef + +define comma_list +$(subst $(space),$(comma),$(strip $(1))) +endef + +# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. +define erlang +$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk +endef + +ifeq ($(PLATFORM),msys2) +core_native_path = $(subst \,\\\\,$(shell cygpath -w $1)) +else +core_native_path = $1 +endif + +ifeq ($(shell which wget 2>/dev/null | wc -l), 1) +define core_http_get + wget --no-check-certificate -O $(1) $(2)|| rm $(1) +endef +else +define core_http_get.erl + ssl:start(), + inets:start(), + case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of + {ok, {{_, 200, _}, _, Body}} -> + case file:write_file("$(1)", Body) of + ok -> ok; + {error, R1} -> halt(R1) + end; + {error, R2} -> + halt(R2) + end, + halt(0). +endef + +define core_http_get + $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2)) +endef +endif + +core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1))) + +core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2))) + +core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1))))))))))))))))))))))))))) + +core_ls = $(filter-out $(1),$(shell echo $(1))) + +# @todo Use a solution that does not require using perl. +core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2) + +# Automated update. + +ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk +ERLANG_MK_COMMIT ?= +ERLANG_MK_BUILD_CONFIG ?= build.config +ERLANG_MK_BUILD_DIR ?= .erlang.mk.build + +erlang-mk: + git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) +ifdef ERLANG_MK_COMMIT + cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) +endif + if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi + $(MAKE) -C $(ERLANG_MK_BUILD_DIR) + cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk + rm -rf $(ERLANG_MK_BUILD_DIR) + +# The erlang.mk package index is bundled in the default erlang.mk build. +# Search for the string "copyright" to skip to the rest of the code. + +PACKAGES += aberth +pkg_aberth_name = aberth +pkg_aberth_description = Generic BERT-RPC server in Erlang +pkg_aberth_homepage = https://github.com/a13x/aberth +pkg_aberth_fetch = git +pkg_aberth_repo = https://github.com/a13x/aberth +pkg_aberth_commit = master + +PACKAGES += active +pkg_active_name = active +pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running +pkg_active_homepage = https://github.com/proger/active +pkg_active_fetch = git +pkg_active_repo = https://github.com/proger/active +pkg_active_commit = master + +PACKAGES += actordb_core +pkg_actordb_core_name = actordb_core +pkg_actordb_core_description = ActorDB main source +pkg_actordb_core_homepage = http://www.actordb.com/ +pkg_actordb_core_fetch = git +pkg_actordb_core_repo = https://github.com/biokoda/actordb_core +pkg_actordb_core_commit = master + +PACKAGES += actordb_thrift +pkg_actordb_thrift_name = actordb_thrift +pkg_actordb_thrift_description = Thrift API for ActorDB +pkg_actordb_thrift_homepage = http://www.actordb.com/ +pkg_actordb_thrift_fetch = git +pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift +pkg_actordb_thrift_commit = master + +PACKAGES += aleppo +pkg_aleppo_name = aleppo +pkg_aleppo_description = Alternative Erlang Pre-Processor +pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo +pkg_aleppo_fetch = git +pkg_aleppo_repo = https://github.com/ErlyORM/aleppo +pkg_aleppo_commit = master + +PACKAGES += alog +pkg_alog_name = alog +pkg_alog_description = Simply the best logging framework for Erlang +pkg_alog_homepage = https://github.com/siberian-fast-food/alogger +pkg_alog_fetch = git +pkg_alog_repo = https://github.com/siberian-fast-food/alogger +pkg_alog_commit = master + +PACKAGES += amqp_client +pkg_amqp_client_name = amqp_client +pkg_amqp_client_description = RabbitMQ Erlang AMQP client +pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html +pkg_amqp_client_fetch = git +pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git +pkg_amqp_client_commit = master + +PACKAGES += annotations +pkg_annotations_name = annotations +pkg_annotations_description = Simple code instrumentation utilities +pkg_annotations_homepage = https://github.com/hyperthunk/annotations +pkg_annotations_fetch = git +pkg_annotations_repo = https://github.com/hyperthunk/annotations +pkg_annotations_commit = master + +PACKAGES += antidote +pkg_antidote_name = antidote +pkg_antidote_description = Large-scale computation without synchronisation +pkg_antidote_homepage = https://syncfree.lip6.fr/ +pkg_antidote_fetch = git +pkg_antidote_repo = https://github.com/SyncFree/antidote +pkg_antidote_commit = master + +PACKAGES += apns +pkg_apns_name = apns +pkg_apns_description = Apple Push Notification Server for Erlang +pkg_apns_homepage = http://inaka.github.com/apns4erl +pkg_apns_fetch = git +pkg_apns_repo = https://github.com/inaka/apns4erl +pkg_apns_commit = 1.0.4 + +PACKAGES += azdht +pkg_azdht_name = azdht +pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang +pkg_azdht_homepage = https://github.com/arcusfelis/azdht +pkg_azdht_fetch = git +pkg_azdht_repo = https://github.com/arcusfelis/azdht +pkg_azdht_commit = master + +PACKAGES += backoff +pkg_backoff_name = backoff +pkg_backoff_description = Simple exponential backoffs in Erlang +pkg_backoff_homepage = https://github.com/ferd/backoff +pkg_backoff_fetch = git +pkg_backoff_repo = https://github.com/ferd/backoff +pkg_backoff_commit = master + +PACKAGES += barrel_tcp +pkg_barrel_tcp_name = barrel_tcp +pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang. +pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_fetch = git +pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_commit = master + +PACKAGES += basho_bench +pkg_basho_bench_name = basho_bench +pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for. +pkg_basho_bench_homepage = https://github.com/basho/basho_bench +pkg_basho_bench_fetch = git +pkg_basho_bench_repo = https://github.com/basho/basho_bench +pkg_basho_bench_commit = master + +PACKAGES += bcrypt +pkg_bcrypt_name = bcrypt +pkg_bcrypt_description = Bcrypt Erlang / C library +pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt +pkg_bcrypt_fetch = git +pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt +pkg_bcrypt_commit = master + +PACKAGES += beam +pkg_beam_name = beam +pkg_beam_description = BEAM emulator written in Erlang +pkg_beam_homepage = https://github.com/tonyrog/beam +pkg_beam_fetch = git +pkg_beam_repo = https://github.com/tonyrog/beam +pkg_beam_commit = master + +PACKAGES += beanstalk +pkg_beanstalk_name = beanstalk +pkg_beanstalk_description = An Erlang client for beanstalkd +pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_fetch = git +pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_commit = master + +PACKAGES += bear +pkg_bear_name = bear +pkg_bear_description = a set of statistics functions for erlang +pkg_bear_homepage = https://github.com/boundary/bear +pkg_bear_fetch = git +pkg_bear_repo = https://github.com/boundary/bear +pkg_bear_commit = master + +PACKAGES += bertconf +pkg_bertconf_name = bertconf +pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded +pkg_bertconf_homepage = https://github.com/ferd/bertconf +pkg_bertconf_fetch = git +pkg_bertconf_repo = https://github.com/ferd/bertconf +pkg_bertconf_commit = master + +PACKAGES += bifrost +pkg_bifrost_name = bifrost +pkg_bifrost_description = Erlang FTP Server Framework +pkg_bifrost_homepage = https://github.com/thorstadt/bifrost +pkg_bifrost_fetch = git +pkg_bifrost_repo = https://github.com/thorstadt/bifrost +pkg_bifrost_commit = master + +PACKAGES += binpp +pkg_binpp_name = binpp +pkg_binpp_description = Erlang Binary Pretty Printer +pkg_binpp_homepage = https://github.com/jtendo/binpp +pkg_binpp_fetch = git +pkg_binpp_repo = https://github.com/jtendo/binpp +pkg_binpp_commit = master + +PACKAGES += bisect +pkg_bisect_name = bisect +pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang +pkg_bisect_homepage = https://github.com/knutin/bisect +pkg_bisect_fetch = git +pkg_bisect_repo = https://github.com/knutin/bisect +pkg_bisect_commit = master + +PACKAGES += bitcask +pkg_bitcask_name = bitcask +pkg_bitcask_description = because you need another a key/value storage engine +pkg_bitcask_homepage = https://github.com/basho/bitcask +pkg_bitcask_fetch = git +pkg_bitcask_repo = https://github.com/basho/bitcask +pkg_bitcask_commit = master + +PACKAGES += bitstore +pkg_bitstore_name = bitstore +pkg_bitstore_description = A document based ontology development environment +pkg_bitstore_homepage = https://github.com/bdionne/bitstore +pkg_bitstore_fetch = git +pkg_bitstore_repo = https://github.com/bdionne/bitstore +pkg_bitstore_commit = master + +PACKAGES += bootstrap +pkg_bootstrap_name = bootstrap +pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application. +pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap +pkg_bootstrap_fetch = git +pkg_bootstrap_repo = https://github.com/schlagert/bootstrap +pkg_bootstrap_commit = master + +PACKAGES += boss +pkg_boss_name = boss +pkg_boss_description = Erlang web MVC, now featuring Comet +pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_fetch = git +pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_commit = master + +PACKAGES += boss_db +pkg_boss_db_name = boss_db +pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang +pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db +pkg_boss_db_fetch = git +pkg_boss_db_repo = https://github.com/ErlyORM/boss_db +pkg_boss_db_commit = master + +PACKAGES += bson +pkg_bson_name = bson +pkg_bson_description = BSON documents in Erlang, see bsonspec.org +pkg_bson_homepage = https://github.com/comtihon/bson-erlang +pkg_bson_fetch = git +pkg_bson_repo = https://github.com/comtihon/bson-erlang +pkg_bson_commit = master + +PACKAGES += bullet +pkg_bullet_name = bullet +pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy. +pkg_bullet_homepage = http://ninenines.eu +pkg_bullet_fetch = git +pkg_bullet_repo = https://github.com/ninenines/bullet +pkg_bullet_commit = master + +PACKAGES += cache +pkg_cache_name = cache +pkg_cache_description = Erlang in-memory cache +pkg_cache_homepage = https://github.com/fogfish/cache +pkg_cache_fetch = git +pkg_cache_repo = https://github.com/fogfish/cache +pkg_cache_commit = master + +PACKAGES += cake +pkg_cake_name = cake +pkg_cake_description = Really simple terminal colorization +pkg_cake_homepage = https://github.com/darach/cake-erl +pkg_cake_fetch = git +pkg_cake_repo = https://github.com/darach/cake-erl +pkg_cake_commit = v0.1.2 + +PACKAGES += carotene +pkg_carotene_name = carotene +pkg_carotene_description = Real-time server +pkg_carotene_homepage = https://github.com/carotene/carotene +pkg_carotene_fetch = git +pkg_carotene_repo = https://github.com/carotene/carotene +pkg_carotene_commit = master + +PACKAGES += cberl +pkg_cberl_name = cberl +pkg_cberl_description = NIF based Erlang bindings for Couchbase +pkg_cberl_homepage = https://github.com/chitika/cberl +pkg_cberl_fetch = git +pkg_cberl_repo = https://github.com/chitika/cberl +pkg_cberl_commit = master + +PACKAGES += cecho +pkg_cecho_name = cecho +pkg_cecho_description = An ncurses library for Erlang +pkg_cecho_homepage = https://github.com/mazenharake/cecho +pkg_cecho_fetch = git +pkg_cecho_repo = https://github.com/mazenharake/cecho +pkg_cecho_commit = master + +PACKAGES += cferl +pkg_cferl_name = cferl +pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client +pkg_cferl_homepage = https://github.com/ddossot/cferl +pkg_cferl_fetch = git +pkg_cferl_repo = https://github.com/ddossot/cferl +pkg_cferl_commit = master + +PACKAGES += chaos_monkey +pkg_chaos_monkey_name = chaos_monkey +pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes. +pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_fetch = git +pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_commit = master + +PACKAGES += check_node +pkg_check_node_name = check_node +pkg_check_node_description = Nagios Scripts for monitoring Riak +pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios +pkg_check_node_fetch = git +pkg_check_node_repo = https://github.com/basho-labs/riak_nagios +pkg_check_node_commit = master + +PACKAGES += chronos +pkg_chronos_name = chronos +pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests. +pkg_chronos_homepage = https://github.com/lehoff/chronos +pkg_chronos_fetch = git +pkg_chronos_repo = https://github.com/lehoff/chronos +pkg_chronos_commit = master + +PACKAGES += cl +pkg_cl_name = cl +pkg_cl_description = OpenCL binding for Erlang +pkg_cl_homepage = https://github.com/tonyrog/cl +pkg_cl_fetch = git +pkg_cl_repo = https://github.com/tonyrog/cl +pkg_cl_commit = master + +PACKAGES += classifier +pkg_classifier_name = classifier +pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier +pkg_classifier_homepage = https://github.com/inaka/classifier +pkg_classifier_fetch = git +pkg_classifier_repo = https://github.com/inaka/classifier +pkg_classifier_commit = master + +PACKAGES += clique +pkg_clique_name = clique +pkg_clique_description = CLI Framework for Erlang +pkg_clique_homepage = https://github.com/basho/clique +pkg_clique_fetch = git +pkg_clique_repo = https://github.com/basho/clique +pkg_clique_commit = develop + +PACKAGES += cloudi_core +pkg_cloudi_core_name = cloudi_core +pkg_cloudi_core_description = CloudI internal service runtime +pkg_cloudi_core_homepage = http://cloudi.org/ +pkg_cloudi_core_fetch = git +pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core +pkg_cloudi_core_commit = master + +PACKAGES += cloudi_service_api_requests +pkg_cloudi_service_api_requests_name = cloudi_service_api_requests +pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support) +pkg_cloudi_service_api_requests_homepage = http://cloudi.org/ +pkg_cloudi_service_api_requests_fetch = git +pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests +pkg_cloudi_service_api_requests_commit = master + +PACKAGES += cloudi_service_db +pkg_cloudi_service_db_name = cloudi_service_db +pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) +pkg_cloudi_service_db_homepage = http://cloudi.org/ +pkg_cloudi_service_db_fetch = git +pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db +pkg_cloudi_service_db_commit = master + +PACKAGES += cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service +pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_fetch = git +pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_commit = master + +PACKAGES += cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service +pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_cql_fetch = git +pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_commit = master + +PACKAGES += cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service +pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/ +pkg_cloudi_service_db_couchdb_fetch = git +pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_commit = master + +PACKAGES += cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service +pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/ +pkg_cloudi_service_db_elasticsearch_fetch = git +pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_commit = master + +PACKAGES += cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_description = memcached CloudI Service +pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/ +pkg_cloudi_service_db_memcached_fetch = git +pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_commit = master + +PACKAGES += cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_description = MySQL CloudI Service +pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_mysql_fetch = git +pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_commit = master + +PACKAGES += cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service +pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_pgsql_fetch = git +pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_commit = master + +PACKAGES += cloudi_service_db_riak +pkg_cloudi_service_db_riak_name = cloudi_service_db_riak +pkg_cloudi_service_db_riak_description = Riak CloudI Service +pkg_cloudi_service_db_riak_homepage = http://cloudi.org/ +pkg_cloudi_service_db_riak_fetch = git +pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak +pkg_cloudi_service_db_riak_commit = master + +PACKAGES += cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service +pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/ +pkg_cloudi_service_db_tokyotyrant_fetch = git +pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_commit = master + +PACKAGES += cloudi_service_filesystem +pkg_cloudi_service_filesystem_name = cloudi_service_filesystem +pkg_cloudi_service_filesystem_description = Filesystem CloudI Service +pkg_cloudi_service_filesystem_homepage = http://cloudi.org/ +pkg_cloudi_service_filesystem_fetch = git +pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem +pkg_cloudi_service_filesystem_commit = master + +PACKAGES += cloudi_service_http_client +pkg_cloudi_service_http_client_name = cloudi_service_http_client +pkg_cloudi_service_http_client_description = HTTP client CloudI Service +pkg_cloudi_service_http_client_homepage = http://cloudi.org/ +pkg_cloudi_service_http_client_fetch = git +pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client +pkg_cloudi_service_http_client_commit = master + +PACKAGES += cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service +pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/ +pkg_cloudi_service_http_cowboy_fetch = git +pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_commit = master + +PACKAGES += cloudi_service_http_elli +pkg_cloudi_service_http_elli_name = cloudi_service_http_elli +pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service +pkg_cloudi_service_http_elli_homepage = http://cloudi.org/ +pkg_cloudi_service_http_elli_fetch = git +pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli +pkg_cloudi_service_http_elli_commit = master + +PACKAGES += cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service +pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/ +pkg_cloudi_service_map_reduce_fetch = git +pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_commit = master + +PACKAGES += cloudi_service_oauth1 +pkg_cloudi_service_oauth1_name = cloudi_service_oauth1 +pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service +pkg_cloudi_service_oauth1_homepage = http://cloudi.org/ +pkg_cloudi_service_oauth1_fetch = git +pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1 +pkg_cloudi_service_oauth1_commit = master + +PACKAGES += cloudi_service_queue +pkg_cloudi_service_queue_name = cloudi_service_queue +pkg_cloudi_service_queue_description = Persistent Queue Service +pkg_cloudi_service_queue_homepage = http://cloudi.org/ +pkg_cloudi_service_queue_fetch = git +pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue +pkg_cloudi_service_queue_commit = master + +PACKAGES += cloudi_service_quorum +pkg_cloudi_service_quorum_name = cloudi_service_quorum +pkg_cloudi_service_quorum_description = CloudI Quorum Service +pkg_cloudi_service_quorum_homepage = http://cloudi.org/ +pkg_cloudi_service_quorum_fetch = git +pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum +pkg_cloudi_service_quorum_commit = master + +PACKAGES += cloudi_service_router +pkg_cloudi_service_router_name = cloudi_service_router +pkg_cloudi_service_router_description = CloudI Router Service +pkg_cloudi_service_router_homepage = http://cloudi.org/ +pkg_cloudi_service_router_fetch = git +pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router +pkg_cloudi_service_router_commit = master + +PACKAGES += cloudi_service_tcp +pkg_cloudi_service_tcp_name = cloudi_service_tcp +pkg_cloudi_service_tcp_description = TCP CloudI Service +pkg_cloudi_service_tcp_homepage = http://cloudi.org/ +pkg_cloudi_service_tcp_fetch = git +pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp +pkg_cloudi_service_tcp_commit = master + +PACKAGES += cloudi_service_timers +pkg_cloudi_service_timers_name = cloudi_service_timers +pkg_cloudi_service_timers_description = Timers CloudI Service +pkg_cloudi_service_timers_homepage = http://cloudi.org/ +pkg_cloudi_service_timers_fetch = git +pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers +pkg_cloudi_service_timers_commit = master + +PACKAGES += cloudi_service_udp +pkg_cloudi_service_udp_name = cloudi_service_udp +pkg_cloudi_service_udp_description = UDP CloudI Service +pkg_cloudi_service_udp_homepage = http://cloudi.org/ +pkg_cloudi_service_udp_fetch = git +pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp +pkg_cloudi_service_udp_commit = master + +PACKAGES += cloudi_service_validate +pkg_cloudi_service_validate_name = cloudi_service_validate +pkg_cloudi_service_validate_description = CloudI Validate Service +pkg_cloudi_service_validate_homepage = http://cloudi.org/ +pkg_cloudi_service_validate_fetch = git +pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate +pkg_cloudi_service_validate_commit = master + +PACKAGES += cloudi_service_zeromq +pkg_cloudi_service_zeromq_name = cloudi_service_zeromq +pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service +pkg_cloudi_service_zeromq_homepage = http://cloudi.org/ +pkg_cloudi_service_zeromq_fetch = git +pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq +pkg_cloudi_service_zeromq_commit = master + +PACKAGES += cluster_info +pkg_cluster_info_name = cluster_info +pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app +pkg_cluster_info_homepage = https://github.com/basho/cluster_info +pkg_cluster_info_fetch = git +pkg_cluster_info_repo = https://github.com/basho/cluster_info +pkg_cluster_info_commit = master + +PACKAGES += color +pkg_color_name = color +pkg_color_description = ANSI colors for your Erlang +pkg_color_homepage = https://github.com/julianduque/erlang-color +pkg_color_fetch = git +pkg_color_repo = https://github.com/julianduque/erlang-color +pkg_color_commit = master + +PACKAGES += confetti +pkg_confetti_name = confetti +pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids +pkg_confetti_homepage = https://github.com/jtendo/confetti +pkg_confetti_fetch = git +pkg_confetti_repo = https://github.com/jtendo/confetti +pkg_confetti_commit = master + +PACKAGES += couchbeam +pkg_couchbeam_name = couchbeam +pkg_couchbeam_description = Apache CouchDB client in Erlang +pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam +pkg_couchbeam_fetch = git +pkg_couchbeam_repo = https://github.com/benoitc/couchbeam +pkg_couchbeam_commit = master + +PACKAGES += covertool +pkg_covertool_name = covertool +pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports +pkg_covertool_homepage = https://github.com/idubrov/covertool +pkg_covertool_fetch = git +pkg_covertool_repo = https://github.com/idubrov/covertool +pkg_covertool_commit = master + +PACKAGES += cowboy +pkg_cowboy_name = cowboy +pkg_cowboy_description = Small, fast and modular HTTP server. +pkg_cowboy_homepage = http://ninenines.eu +pkg_cowboy_fetch = git +pkg_cowboy_repo = https://github.com/ninenines/cowboy +pkg_cowboy_commit = 1.0.1 + +PACKAGES += cowdb +pkg_cowdb_name = cowdb +pkg_cowdb_description = Pure Key/Value database library for Erlang Applications +pkg_cowdb_homepage = https://github.com/refuge/cowdb +pkg_cowdb_fetch = git +pkg_cowdb_repo = https://github.com/refuge/cowdb +pkg_cowdb_commit = master + +PACKAGES += cowlib +pkg_cowlib_name = cowlib +pkg_cowlib_description = Support library for manipulating Web protocols. +pkg_cowlib_homepage = http://ninenines.eu +pkg_cowlib_fetch = git +pkg_cowlib_repo = https://github.com/ninenines/cowlib +pkg_cowlib_commit = 1.0.1 + +PACKAGES += cpg +pkg_cpg_name = cpg +pkg_cpg_description = CloudI Process Groups +pkg_cpg_homepage = https://github.com/okeuday/cpg +pkg_cpg_fetch = git +pkg_cpg_repo = https://github.com/okeuday/cpg +pkg_cpg_commit = master + +PACKAGES += cqerl +pkg_cqerl_name = cqerl +pkg_cqerl_description = Native Erlang CQL client for Cassandra +pkg_cqerl_homepage = https://matehat.github.io/cqerl/ +pkg_cqerl_fetch = git +pkg_cqerl_repo = https://github.com/matehat/cqerl +pkg_cqerl_commit = master + +PACKAGES += cr +pkg_cr_name = cr +pkg_cr_description = Chain Replication +pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm +pkg_cr_fetch = git +pkg_cr_repo = https://github.com/spawnproc/cr +pkg_cr_commit = master + +PACKAGES += cuttlefish +pkg_cuttlefish_name = cuttlefish +pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me? +pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish +pkg_cuttlefish_fetch = git +pkg_cuttlefish_repo = https://github.com/basho/cuttlefish +pkg_cuttlefish_commit = master + +PACKAGES += damocles +pkg_damocles_name = damocles +pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box. +pkg_damocles_homepage = https://github.com/lostcolony/damocles +pkg_damocles_fetch = git +pkg_damocles_repo = https://github.com/lostcolony/damocles +pkg_damocles_commit = master + +PACKAGES += debbie +pkg_debbie_name = debbie +pkg_debbie_description = .DEB Built In Erlang +pkg_debbie_homepage = https://github.com/crownedgrouse/debbie +pkg_debbie_fetch = git +pkg_debbie_repo = https://github.com/crownedgrouse/debbie +pkg_debbie_commit = master + +PACKAGES += decimal +pkg_decimal_name = decimal +pkg_decimal_description = An Erlang decimal arithmetic library +pkg_decimal_homepage = https://github.com/tim/erlang-decimal +pkg_decimal_fetch = git +pkg_decimal_repo = https://github.com/tim/erlang-decimal +pkg_decimal_commit = master + +PACKAGES += detergent +pkg_detergent_name = detergent +pkg_detergent_description = An emulsifying Erlang SOAP library +pkg_detergent_homepage = https://github.com/devinus/detergent +pkg_detergent_fetch = git +pkg_detergent_repo = https://github.com/devinus/detergent +pkg_detergent_commit = master + +PACKAGES += detest +pkg_detest_name = detest +pkg_detest_description = Tool for running tests on a cluster of erlang nodes +pkg_detest_homepage = https://github.com/biokoda/detest +pkg_detest_fetch = git +pkg_detest_repo = https://github.com/biokoda/detest +pkg_detest_commit = master + +PACKAGES += dh_date +pkg_dh_date_name = dh_date +pkg_dh_date_description = Date formatting / parsing library for erlang +pkg_dh_date_homepage = https://github.com/daleharvey/dh_date +pkg_dh_date_fetch = git +pkg_dh_date_repo = https://github.com/daleharvey/dh_date +pkg_dh_date_commit = master + +PACKAGES += dhtcrawler +pkg_dhtcrawler_name = dhtcrawler +pkg_dhtcrawler_description = dhtcrawler is a DHT crawler written in erlang. It can join a DHT network and crawl many P2P torrents. +pkg_dhtcrawler_homepage = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_fetch = git +pkg_dhtcrawler_repo = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_commit = master + +PACKAGES += dirbusterl +pkg_dirbusterl_name = dirbusterl +pkg_dirbusterl_description = DirBuster successor in Erlang +pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_fetch = git +pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_commit = master + +PACKAGES += dispcount +pkg_dispcount_name = dispcount +pkg_dispcount_description = Erlang task dispatcher based on ETS counters. +pkg_dispcount_homepage = https://github.com/ferd/dispcount +pkg_dispcount_fetch = git +pkg_dispcount_repo = https://github.com/ferd/dispcount +pkg_dispcount_commit = master + +PACKAGES += dlhttpc +pkg_dlhttpc_name = dlhttpc +pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints +pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc +pkg_dlhttpc_fetch = git +pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc +pkg_dlhttpc_commit = master + +PACKAGES += dns +pkg_dns_name = dns +pkg_dns_description = Erlang DNS library +pkg_dns_homepage = https://github.com/aetrion/dns_erlang +pkg_dns_fetch = git +pkg_dns_repo = https://github.com/aetrion/dns_erlang +pkg_dns_commit = master + +PACKAGES += dnssd +pkg_dnssd_name = dnssd +pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation +pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_fetch = git +pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_commit = master + +PACKAGES += dtl +pkg_dtl_name = dtl +pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang. +pkg_dtl_homepage = https://github.com/oinksoft/dtl +pkg_dtl_fetch = git +pkg_dtl_repo = https://github.com/oinksoft/dtl +pkg_dtl_commit = master + +PACKAGES += dynamic_compile +pkg_dynamic_compile_name = dynamic_compile +pkg_dynamic_compile_description = compile and load erlang modules from string input +pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_fetch = git +pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_commit = master + +PACKAGES += e2 +pkg_e2_name = e2 +pkg_e2_description = Library to simply writing correct OTP applications. +pkg_e2_homepage = http://e2project.org +pkg_e2_fetch = git +pkg_e2_repo = https://github.com/gar1t/e2 +pkg_e2_commit = master + +PACKAGES += eamf +pkg_eamf_name = eamf +pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang +pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf +pkg_eamf_fetch = git +pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf +pkg_eamf_commit = master + +PACKAGES += eavro +pkg_eavro_name = eavro +pkg_eavro_description = Apache Avro encoder/decoder +pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_fetch = git +pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_commit = master + +PACKAGES += ecapnp +pkg_ecapnp_name = ecapnp +pkg_ecapnp_description = Cap'n Proto library for Erlang +pkg_ecapnp_homepage = https://github.com/kaos/ecapnp +pkg_ecapnp_fetch = git +pkg_ecapnp_repo = https://github.com/kaos/ecapnp +pkg_ecapnp_commit = master + +PACKAGES += econfig +pkg_econfig_name = econfig +pkg_econfig_description = simple Erlang config handler using INI files +pkg_econfig_homepage = https://github.com/benoitc/econfig +pkg_econfig_fetch = git +pkg_econfig_repo = https://github.com/benoitc/econfig +pkg_econfig_commit = master + +PACKAGES += edate +pkg_edate_name = edate +pkg_edate_description = date manipulation library for erlang +pkg_edate_homepage = https://github.com/dweldon/edate +pkg_edate_fetch = git +pkg_edate_repo = https://github.com/dweldon/edate +pkg_edate_commit = master + +PACKAGES += edgar +pkg_edgar_name = edgar +pkg_edgar_description = Erlang Does GNU AR +pkg_edgar_homepage = https://github.com/crownedgrouse/edgar +pkg_edgar_fetch = git +pkg_edgar_repo = https://github.com/crownedgrouse/edgar +pkg_edgar_commit = master + +PACKAGES += edis +pkg_edis_name = edis +pkg_edis_description = An Erlang implementation of Redis KV Store +pkg_edis_homepage = http://inaka.github.com/edis/ +pkg_edis_fetch = git +pkg_edis_repo = https://github.com/inaka/edis +pkg_edis_commit = master + +PACKAGES += edns +pkg_edns_name = edns +pkg_edns_description = Erlang/OTP DNS server +pkg_edns_homepage = https://github.com/hcvst/erlang-dns +pkg_edns_fetch = git +pkg_edns_repo = https://github.com/hcvst/erlang-dns +pkg_edns_commit = master + +PACKAGES += edown +pkg_edown_name = edown +pkg_edown_description = EDoc extension for generating Github-flavored Markdown +pkg_edown_homepage = https://github.com/uwiger/edown +pkg_edown_fetch = git +pkg_edown_repo = https://github.com/uwiger/edown +pkg_edown_commit = master + +PACKAGES += eep +pkg_eep_name = eep +pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy +pkg_eep_homepage = https://github.com/virtan/eep +pkg_eep_fetch = git +pkg_eep_repo = https://github.com/virtan/eep +pkg_eep_commit = master + +PACKAGES += eep_app +pkg_eep_app_name = eep_app +pkg_eep_app_description = Embedded Event Processing +pkg_eep_app_homepage = https://github.com/darach/eep-erl +pkg_eep_app_fetch = git +pkg_eep_app_repo = https://github.com/darach/eep-erl +pkg_eep_app_commit = master + +PACKAGES += efene +pkg_efene_name = efene +pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX +pkg_efene_homepage = https://github.com/efene/efene +pkg_efene_fetch = git +pkg_efene_repo = https://github.com/efene/efene +pkg_efene_commit = master + +PACKAGES += eganglia +pkg_eganglia_name = eganglia +pkg_eganglia_description = Erlang library to interact with Ganglia +pkg_eganglia_homepage = https://github.com/inaka/eganglia +pkg_eganglia_fetch = git +pkg_eganglia_repo = https://github.com/inaka/eganglia +pkg_eganglia_commit = v0.9.1 + +PACKAGES += egeoip +pkg_egeoip_name = egeoip +pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database. +pkg_egeoip_homepage = https://github.com/mochi/egeoip +pkg_egeoip_fetch = git +pkg_egeoip_repo = https://github.com/mochi/egeoip +pkg_egeoip_commit = master + +PACKAGES += ehsa +pkg_ehsa_name = ehsa +pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules +pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa +pkg_ehsa_fetch = hg +pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa +pkg_ehsa_commit = 2.0.4 + +PACKAGES += ej +pkg_ej_name = ej +pkg_ej_description = Helper module for working with Erlang terms representing JSON +pkg_ej_homepage = https://github.com/seth/ej +pkg_ej_fetch = git +pkg_ej_repo = https://github.com/seth/ej +pkg_ej_commit = master + +PACKAGES += ejabberd +pkg_ejabberd_name = ejabberd +pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform +pkg_ejabberd_homepage = https://github.com/processone/ejabberd +pkg_ejabberd_fetch = git +pkg_ejabberd_repo = https://github.com/processone/ejabberd +pkg_ejabberd_commit = master + +PACKAGES += ejwt +pkg_ejwt_name = ejwt +pkg_ejwt_description = erlang library for JSON Web Token +pkg_ejwt_homepage = https://github.com/artefactop/ejwt +pkg_ejwt_fetch = git +pkg_ejwt_repo = https://github.com/artefactop/ejwt +pkg_ejwt_commit = master + +PACKAGES += ekaf +pkg_ekaf_name = ekaf +pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang. +pkg_ekaf_homepage = https://github.com/helpshift/ekaf +pkg_ekaf_fetch = git +pkg_ekaf_repo = https://github.com/helpshift/ekaf +pkg_ekaf_commit = master + +PACKAGES += elarm +pkg_elarm_name = elarm +pkg_elarm_description = Alarm Manager for Erlang. +pkg_elarm_homepage = https://github.com/esl/elarm +pkg_elarm_fetch = git +pkg_elarm_repo = https://github.com/esl/elarm +pkg_elarm_commit = master + +PACKAGES += eleveldb +pkg_eleveldb_name = eleveldb +pkg_eleveldb_description = Erlang LevelDB API +pkg_eleveldb_homepage = https://github.com/basho/eleveldb +pkg_eleveldb_fetch = git +pkg_eleveldb_repo = https://github.com/basho/eleveldb +pkg_eleveldb_commit = master + +PACKAGES += elli +pkg_elli_name = elli +pkg_elli_description = Simple, robust and performant Erlang web server +pkg_elli_homepage = https://github.com/knutin/elli +pkg_elli_fetch = git +pkg_elli_repo = https://github.com/knutin/elli +pkg_elli_commit = master + +PACKAGES += elvis +pkg_elvis_name = elvis +pkg_elvis_description = Erlang Style Reviewer +pkg_elvis_homepage = https://github.com/inaka/elvis +pkg_elvis_fetch = git +pkg_elvis_repo = https://github.com/inaka/elvis +pkg_elvis_commit = 0.2.4 + +PACKAGES += emagick +pkg_emagick_name = emagick +pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool. +pkg_emagick_homepage = https://github.com/kivra/emagick +pkg_emagick_fetch = git +pkg_emagick_repo = https://github.com/kivra/emagick +pkg_emagick_commit = master + +PACKAGES += emysql +pkg_emysql_name = emysql +pkg_emysql_description = Stable, pure Erlang MySQL driver. +pkg_emysql_homepage = https://github.com/Eonblast/Emysql +pkg_emysql_fetch = git +pkg_emysql_repo = https://github.com/Eonblast/Emysql +pkg_emysql_commit = master + +PACKAGES += enm +pkg_enm_name = enm +pkg_enm_description = Erlang driver for nanomsg +pkg_enm_homepage = https://github.com/basho/enm +pkg_enm_fetch = git +pkg_enm_repo = https://github.com/basho/enm +pkg_enm_commit = master + +PACKAGES += entop +pkg_entop_name = entop +pkg_entop_description = A top-like tool for monitoring an Erlang node +pkg_entop_homepage = https://github.com/mazenharake/entop +pkg_entop_fetch = git +pkg_entop_repo = https://github.com/mazenharake/entop +pkg_entop_commit = master + +PACKAGES += epcap +pkg_epcap_name = epcap +pkg_epcap_description = Erlang packet capture interface using pcap +pkg_epcap_homepage = https://github.com/msantos/epcap +pkg_epcap_fetch = git +pkg_epcap_repo = https://github.com/msantos/epcap +pkg_epcap_commit = master + +PACKAGES += eper +pkg_eper_name = eper +pkg_eper_description = Erlang performance and debugging tools. +pkg_eper_homepage = https://github.com/massemanet/eper +pkg_eper_fetch = git +pkg_eper_repo = https://github.com/massemanet/eper +pkg_eper_commit = master + +PACKAGES += epgsql +pkg_epgsql_name = epgsql +pkg_epgsql_description = Erlang PostgreSQL client library. +pkg_epgsql_homepage = https://github.com/epgsql/epgsql +pkg_epgsql_fetch = git +pkg_epgsql_repo = https://github.com/epgsql/epgsql +pkg_epgsql_commit = master + +PACKAGES += episcina +pkg_episcina_name = episcina +pkg_episcina_description = A simple non intrusive resource pool for connections +pkg_episcina_homepage = https://github.com/erlware/episcina +pkg_episcina_fetch = git +pkg_episcina_repo = https://github.com/erlware/episcina +pkg_episcina_commit = master + +PACKAGES += eplot +pkg_eplot_name = eplot +pkg_eplot_description = A plot engine written in erlang. +pkg_eplot_homepage = https://github.com/psyeugenic/eplot +pkg_eplot_fetch = git +pkg_eplot_repo = https://github.com/psyeugenic/eplot +pkg_eplot_commit = master + +PACKAGES += epocxy +pkg_epocxy_name = epocxy +pkg_epocxy_description = Erlang Patterns of Concurrency +pkg_epocxy_homepage = https://github.com/duomark/epocxy +pkg_epocxy_fetch = git +pkg_epocxy_repo = https://github.com/duomark/epocxy +pkg_epocxy_commit = master + +PACKAGES += epubnub +pkg_epubnub_name = epubnub +pkg_epubnub_description = Erlang PubNub API +pkg_epubnub_homepage = https://github.com/tsloughter/epubnub +pkg_epubnub_fetch = git +pkg_epubnub_repo = https://github.com/tsloughter/epubnub +pkg_epubnub_commit = master + +PACKAGES += eqm +pkg_eqm_name = eqm +pkg_eqm_description = Erlang pub sub with supply-demand channels +pkg_eqm_homepage = https://github.com/loucash/eqm +pkg_eqm_fetch = git +pkg_eqm_repo = https://github.com/loucash/eqm +pkg_eqm_commit = master + +PACKAGES += eredis +pkg_eredis_name = eredis +pkg_eredis_description = Erlang Redis client +pkg_eredis_homepage = https://github.com/wooga/eredis +pkg_eredis_fetch = git +pkg_eredis_repo = https://github.com/wooga/eredis +pkg_eredis_commit = master + +PACKAGES += eredis_pool +pkg_eredis_pool_name = eredis_pool +pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy. +pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_fetch = git +pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_commit = master + +PACKAGES += erl_streams +pkg_erl_streams_name = erl_streams +pkg_erl_streams_description = Streams in Erlang +pkg_erl_streams_homepage = https://github.com/epappas/erl_streams +pkg_erl_streams_fetch = git +pkg_erl_streams_repo = https://github.com/epappas/erl_streams +pkg_erl_streams_commit = master + +PACKAGES += erlang_cep +pkg_erlang_cep_name = erlang_cep +pkg_erlang_cep_description = A basic CEP package written in erlang +pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_fetch = git +pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_commit = master + +PACKAGES += erlang_js +pkg_erlang_js_name = erlang_js +pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime. +pkg_erlang_js_homepage = https://github.com/basho/erlang_js +pkg_erlang_js_fetch = git +pkg_erlang_js_repo = https://github.com/basho/erlang_js +pkg_erlang_js_commit = master + +PACKAGES += erlang_localtime +pkg_erlang_localtime_name = erlang_localtime +pkg_erlang_localtime_description = Erlang library for conversion from one local time to another +pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_fetch = git +pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_commit = master + +PACKAGES += erlang_smtp +pkg_erlang_smtp_name = erlang_smtp +pkg_erlang_smtp_description = Erlang SMTP and POP3 server code. +pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_fetch = git +pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_commit = master + +PACKAGES += erlang_term +pkg_erlang_term_name = erlang_term +pkg_erlang_term_description = Erlang Term Info +pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term +pkg_erlang_term_fetch = git +pkg_erlang_term_repo = https://github.com/okeuday/erlang_term +pkg_erlang_term_commit = master + +PACKAGES += erlastic_search +pkg_erlastic_search_name = erlastic_search +pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface. +pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_fetch = git +pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_commit = master + +PACKAGES += erlasticsearch +pkg_erlasticsearch_name = erlasticsearch +pkg_erlasticsearch_description = Erlang thrift interface to elastic_search +pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_fetch = git +pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_commit = master + +PACKAGES += erlbrake +pkg_erlbrake_name = erlbrake +pkg_erlbrake_description = Erlang Airbrake notification client +pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake +pkg_erlbrake_fetch = git +pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake +pkg_erlbrake_commit = master + +PACKAGES += erlcloud +pkg_erlcloud_name = erlcloud +pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB) +pkg_erlcloud_homepage = https://github.com/gleber/erlcloud +pkg_erlcloud_fetch = git +pkg_erlcloud_repo = https://github.com/gleber/erlcloud +pkg_erlcloud_commit = master + +PACKAGES += erlcron +pkg_erlcron_name = erlcron +pkg_erlcron_description = Erlang cronish system +pkg_erlcron_homepage = https://github.com/erlware/erlcron +pkg_erlcron_fetch = git +pkg_erlcron_repo = https://github.com/erlware/erlcron +pkg_erlcron_commit = master + +PACKAGES += erldb +pkg_erldb_name = erldb +pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang +pkg_erldb_homepage = http://erldb.org +pkg_erldb_fetch = git +pkg_erldb_repo = https://github.com/erldb/erldb +pkg_erldb_commit = master + +PACKAGES += erldis +pkg_erldis_name = erldis +pkg_erldis_description = redis erlang client library +pkg_erldis_homepage = https://github.com/cstar/erldis +pkg_erldis_fetch = git +pkg_erldis_repo = https://github.com/cstar/erldis +pkg_erldis_commit = master + +PACKAGES += erldns +pkg_erldns_name = erldns +pkg_erldns_description = DNS server, in erlang. +pkg_erldns_homepage = https://github.com/aetrion/erl-dns +pkg_erldns_fetch = git +pkg_erldns_repo = https://github.com/aetrion/erl-dns +pkg_erldns_commit = master + +PACKAGES += erldocker +pkg_erldocker_name = erldocker +pkg_erldocker_description = Docker Remote API client for Erlang +pkg_erldocker_homepage = https://github.com/proger/erldocker +pkg_erldocker_fetch = git +pkg_erldocker_repo = https://github.com/proger/erldocker +pkg_erldocker_commit = master + +PACKAGES += erlfsmon +pkg_erlfsmon_name = erlfsmon +pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX +pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon +pkg_erlfsmon_fetch = git +pkg_erlfsmon_repo = https://github.com/proger/erlfsmon +pkg_erlfsmon_commit = master + +PACKAGES += erlgit +pkg_erlgit_name = erlgit +pkg_erlgit_description = Erlang convenience wrapper around git executable +pkg_erlgit_homepage = https://github.com/gleber/erlgit +pkg_erlgit_fetch = git +pkg_erlgit_repo = https://github.com/gleber/erlgit +pkg_erlgit_commit = master + +PACKAGES += erlguten +pkg_erlguten_name = erlguten +pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang. +pkg_erlguten_homepage = https://github.com/richcarl/erlguten +pkg_erlguten_fetch = git +pkg_erlguten_repo = https://github.com/richcarl/erlguten +pkg_erlguten_commit = master + +PACKAGES += erlmc +pkg_erlmc_name = erlmc +pkg_erlmc_description = Erlang memcached binary protocol client +pkg_erlmc_homepage = https://github.com/jkvor/erlmc +pkg_erlmc_fetch = git +pkg_erlmc_repo = https://github.com/jkvor/erlmc +pkg_erlmc_commit = master + +PACKAGES += erlmongo +pkg_erlmongo_name = erlmongo +pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support +pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_fetch = git +pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_commit = master + +PACKAGES += erlog +pkg_erlog_name = erlog +pkg_erlog_description = Prolog interpreter in and for Erlang +pkg_erlog_homepage = https://github.com/rvirding/erlog +pkg_erlog_fetch = git +pkg_erlog_repo = https://github.com/rvirding/erlog +pkg_erlog_commit = master + +PACKAGES += erlpass +pkg_erlpass_name = erlpass +pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever. +pkg_erlpass_homepage = https://github.com/ferd/erlpass +pkg_erlpass_fetch = git +pkg_erlpass_repo = https://github.com/ferd/erlpass +pkg_erlpass_commit = master + +PACKAGES += erlport +pkg_erlport_name = erlport +pkg_erlport_description = ErlPort - connect Erlang to other languages +pkg_erlport_homepage = https://github.com/hdima/erlport +pkg_erlport_fetch = git +pkg_erlport_repo = https://github.com/hdima/erlport +pkg_erlport_commit = master + +PACKAGES += erlsh +pkg_erlsh_name = erlsh +pkg_erlsh_description = Erlang shell tools +pkg_erlsh_homepage = https://github.com/proger/erlsh +pkg_erlsh_fetch = git +pkg_erlsh_repo = https://github.com/proger/erlsh +pkg_erlsh_commit = master + +PACKAGES += erlsha2 +pkg_erlsha2_name = erlsha2 +pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs. +pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2 +pkg_erlsha2_fetch = git +pkg_erlsha2_repo = https://github.com/vinoski/erlsha2 +pkg_erlsha2_commit = master + +PACKAGES += erlsom +pkg_erlsom_name = erlsom +pkg_erlsom_description = XML parser for Erlang +pkg_erlsom_homepage = https://github.com/willemdj/erlsom +pkg_erlsom_fetch = git +pkg_erlsom_repo = https://github.com/willemdj/erlsom +pkg_erlsom_commit = master + +PACKAGES += erlubi +pkg_erlubi_name = erlubi +pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer) +pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi +pkg_erlubi_fetch = git +pkg_erlubi_repo = https://github.com/krestenkrab/erlubi +pkg_erlubi_commit = master + +PACKAGES += erlvolt +pkg_erlvolt_name = erlvolt +pkg_erlvolt_description = VoltDB Erlang Client Driver +pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_fetch = git +pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_commit = master + +PACKAGES += erlware_commons +pkg_erlware_commons_name = erlware_commons +pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components. +pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons +pkg_erlware_commons_fetch = git +pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons +pkg_erlware_commons_commit = master + +PACKAGES += erlydtl +pkg_erlydtl_name = erlydtl +pkg_erlydtl_description = Django Template Language for Erlang. +pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl +pkg_erlydtl_fetch = git +pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl +pkg_erlydtl_commit = master + +PACKAGES += errd +pkg_errd_name = errd +pkg_errd_description = Erlang RRDTool library +pkg_errd_homepage = https://github.com/archaelus/errd +pkg_errd_fetch = git +pkg_errd_repo = https://github.com/archaelus/errd +pkg_errd_commit = master + +PACKAGES += erserve +pkg_erserve_name = erserve +pkg_erserve_description = Erlang/Rserve communication interface +pkg_erserve_homepage = https://github.com/del/erserve +pkg_erserve_fetch = git +pkg_erserve_repo = https://github.com/del/erserve +pkg_erserve_commit = master + +PACKAGES += erwa +pkg_erwa_name = erwa +pkg_erwa_description = A WAMP router and client written in Erlang. +pkg_erwa_homepage = https://github.com/bwegh/erwa +pkg_erwa_fetch = git +pkg_erwa_repo = https://github.com/bwegh/erwa +pkg_erwa_commit = 0.1.1 + +PACKAGES += espec +pkg_espec_name = espec +pkg_espec_description = ESpec: Behaviour driven development framework for Erlang +pkg_espec_homepage = https://github.com/lucaspiller/espec +pkg_espec_fetch = git +pkg_espec_repo = https://github.com/lucaspiller/espec +pkg_espec_commit = master + +PACKAGES += estatsd +pkg_estatsd_name = estatsd +pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite +pkg_estatsd_homepage = https://github.com/RJ/estatsd +pkg_estatsd_fetch = git +pkg_estatsd_repo = https://github.com/RJ/estatsd +pkg_estatsd_commit = master + +PACKAGES += etap +pkg_etap_name = etap +pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output. +pkg_etap_homepage = https://github.com/ngerakines/etap +pkg_etap_fetch = git +pkg_etap_repo = https://github.com/ngerakines/etap +pkg_etap_commit = master + +PACKAGES += etest +pkg_etest_name = etest +pkg_etest_description = A lightweight, convention over configuration test framework for Erlang +pkg_etest_homepage = https://github.com/wooga/etest +pkg_etest_fetch = git +pkg_etest_repo = https://github.com/wooga/etest +pkg_etest_commit = master + +PACKAGES += etest_http +pkg_etest_http_name = etest_http +pkg_etest_http_description = etest Assertions around HTTP (client-side) +pkg_etest_http_homepage = https://github.com/wooga/etest_http +pkg_etest_http_fetch = git +pkg_etest_http_repo = https://github.com/wooga/etest_http +pkg_etest_http_commit = master + +PACKAGES += etoml +pkg_etoml_name = etoml +pkg_etoml_description = TOML language erlang parser +pkg_etoml_homepage = https://github.com/kalta/etoml +pkg_etoml_fetch = git +pkg_etoml_repo = https://github.com/kalta/etoml +pkg_etoml_commit = master + +PACKAGES += eunit +pkg_eunit_name = eunit +pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. +pkg_eunit_homepage = https://github.com/richcarl/eunit +pkg_eunit_fetch = git +pkg_eunit_repo = https://github.com/richcarl/eunit +pkg_eunit_commit = master + +PACKAGES += eunit_formatters +pkg_eunit_formatters_name = eunit_formatters +pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better. +pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_fetch = git +pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_commit = master + +PACKAGES += euthanasia +pkg_euthanasia_name = euthanasia +pkg_euthanasia_description = Merciful killer for your Erlang processes +pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia +pkg_euthanasia_fetch = git +pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia +pkg_euthanasia_commit = master + +PACKAGES += evum +pkg_evum_name = evum +pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM +pkg_evum_homepage = https://github.com/msantos/evum +pkg_evum_fetch = git +pkg_evum_repo = https://github.com/msantos/evum +pkg_evum_commit = master + +PACKAGES += exec +pkg_exec_name = exec +pkg_exec_description = Execute and control OS processes from Erlang/OTP. +pkg_exec_homepage = http://saleyn.github.com/erlexec +pkg_exec_fetch = git +pkg_exec_repo = https://github.com/saleyn/erlexec +pkg_exec_commit = master + +PACKAGES += exml +pkg_exml_name = exml +pkg_exml_description = XML parsing library in Erlang +pkg_exml_homepage = https://github.com/paulgray/exml +pkg_exml_fetch = git +pkg_exml_repo = https://github.com/paulgray/exml +pkg_exml_commit = master + +PACKAGES += exometer +pkg_exometer_name = exometer +pkg_exometer_description = Basic measurement objects and probe behavior +pkg_exometer_homepage = https://github.com/Feuerlabs/exometer +pkg_exometer_fetch = git +pkg_exometer_repo = https://github.com/Feuerlabs/exometer +pkg_exometer_commit = 1.2 + +PACKAGES += exs1024 +pkg_exs1024_name = exs1024 +pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang. +pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024 +pkg_exs1024_fetch = git +pkg_exs1024_repo = https://github.com/jj1bdx/exs1024 +pkg_exs1024_commit = master + +PACKAGES += exs64 +pkg_exs64_name = exs64 +pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang. +pkg_exs64_homepage = https://github.com/jj1bdx/exs64 +pkg_exs64_fetch = git +pkg_exs64_repo = https://github.com/jj1bdx/exs64 +pkg_exs64_commit = master + +PACKAGES += exsplus116 +pkg_exsplus116_name = exsplus116 +pkg_exsplus116_description = Xorshift116plus for Erlang +pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_fetch = git +pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_commit = master + +PACKAGES += exsplus128 +pkg_exsplus128_name = exsplus128 +pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang. +pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_fetch = git +pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_commit = master + +PACKAGES += ezmq +pkg_ezmq_name = ezmq +pkg_ezmq_description = zMQ implemented in Erlang +pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq +pkg_ezmq_fetch = git +pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq +pkg_ezmq_commit = master + +PACKAGES += ezmtp +pkg_ezmtp_name = ezmtp +pkg_ezmtp_description = ZMTP protocol in pure Erlang. +pkg_ezmtp_homepage = https://github.com/a13x/ezmtp +pkg_ezmtp_fetch = git +pkg_ezmtp_repo = https://github.com/a13x/ezmtp +pkg_ezmtp_commit = master + +PACKAGES += fast_disk_log +pkg_fast_disk_log_name = fast_disk_log +pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger +pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_fetch = git +pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_commit = master + +PACKAGES += feeder +pkg_feeder_name = feeder +pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds. +pkg_feeder_homepage = https://github.com/michaelnisi/feeder +pkg_feeder_fetch = git +pkg_feeder_repo = https://github.com/michaelnisi/feeder +pkg_feeder_commit = v1.4.6 + +PACKAGES += fix +pkg_fix_name = fix +pkg_fix_description = http://fixprotocol.org/ implementation. +pkg_fix_homepage = https://github.com/maxlapshin/fix +pkg_fix_fetch = git +pkg_fix_repo = https://github.com/maxlapshin/fix +pkg_fix_commit = master + +PACKAGES += flower +pkg_flower_name = flower +pkg_flower_description = FlowER - a Erlang OpenFlow development platform +pkg_flower_homepage = https://github.com/travelping/flower +pkg_flower_fetch = git +pkg_flower_repo = https://github.com/travelping/flower +pkg_flower_commit = master + +PACKAGES += fn +pkg_fn_name = fn +pkg_fn_description = Function utilities for Erlang +pkg_fn_homepage = https://github.com/reiddraper/fn +pkg_fn_fetch = git +pkg_fn_repo = https://github.com/reiddraper/fn +pkg_fn_commit = master + +PACKAGES += folsom +pkg_folsom_name = folsom +pkg_folsom_description = Expose Erlang Events and Metrics +pkg_folsom_homepage = https://github.com/boundary/folsom +pkg_folsom_fetch = git +pkg_folsom_repo = https://github.com/boundary/folsom +pkg_folsom_commit = master + +PACKAGES += folsom_cowboy +pkg_folsom_cowboy_name = folsom_cowboy +pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper. +pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_fetch = git +pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_commit = master + +PACKAGES += folsomite +pkg_folsomite_name = folsomite +pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics +pkg_folsomite_homepage = https://github.com/campanja/folsomite +pkg_folsomite_fetch = git +pkg_folsomite_repo = https://github.com/campanja/folsomite +pkg_folsomite_commit = master + +PACKAGES += fs +pkg_fs_name = fs +pkg_fs_description = Erlang FileSystem Listener +pkg_fs_homepage = https://github.com/synrc/fs +pkg_fs_fetch = git +pkg_fs_repo = https://github.com/synrc/fs +pkg_fs_commit = master + +PACKAGES += fuse +pkg_fuse_name = fuse +pkg_fuse_description = A Circuit Breaker for Erlang +pkg_fuse_homepage = https://github.com/jlouis/fuse +pkg_fuse_fetch = git +pkg_fuse_repo = https://github.com/jlouis/fuse +pkg_fuse_commit = master + +PACKAGES += gcm +pkg_gcm_name = gcm +pkg_gcm_description = An Erlang application for Google Cloud Messaging +pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang +pkg_gcm_fetch = git +pkg_gcm_repo = https://github.com/pdincau/gcm-erlang +pkg_gcm_commit = master + +PACKAGES += gcprof +pkg_gcprof_name = gcprof +pkg_gcprof_description = Garbage Collection profiler for Erlang +pkg_gcprof_homepage = https://github.com/knutin/gcprof +pkg_gcprof_fetch = git +pkg_gcprof_repo = https://github.com/knutin/gcprof +pkg_gcprof_commit = master + +PACKAGES += geas +pkg_geas_name = geas +pkg_geas_description = Guess Erlang Application Scattering +pkg_geas_homepage = https://github.com/crownedgrouse/geas +pkg_geas_fetch = git +pkg_geas_repo = https://github.com/crownedgrouse/geas +pkg_geas_commit = master + +PACKAGES += geef +pkg_geef_name = geef +pkg_geef_description = Git NEEEEF (Erlang NIF) +pkg_geef_homepage = https://github.com/carlosmn/geef +pkg_geef_fetch = git +pkg_geef_repo = https://github.com/carlosmn/geef +pkg_geef_commit = master + +PACKAGES += gen_cycle +pkg_gen_cycle_name = gen_cycle +pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks +pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_fetch = git +pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_commit = develop + +PACKAGES += gen_icmp +pkg_gen_icmp_name = gen_icmp +pkg_gen_icmp_description = Erlang interface to ICMP sockets +pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp +pkg_gen_icmp_fetch = git +pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp +pkg_gen_icmp_commit = master + +PACKAGES += gen_nb_server +pkg_gen_nb_server_name = gen_nb_server +pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers +pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_fetch = git +pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_commit = master + +PACKAGES += gen_paxos +pkg_gen_paxos_name = gen_paxos +pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol +pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos +pkg_gen_paxos_fetch = git +pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos +pkg_gen_paxos_commit = master + +PACKAGES += gen_smtp +pkg_gen_smtp_name = gen_smtp +pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules +pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_fetch = git +pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_commit = master + +PACKAGES += gen_tracker +pkg_gen_tracker_name = gen_tracker +pkg_gen_tracker_description = supervisor with ets handling of children and their metadata +pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_fetch = git +pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_commit = master + +PACKAGES += gen_unix +pkg_gen_unix_name = gen_unix +pkg_gen_unix_description = Erlang Unix socket interface +pkg_gen_unix_homepage = https://github.com/msantos/gen_unix +pkg_gen_unix_fetch = git +pkg_gen_unix_repo = https://github.com/msantos/gen_unix +pkg_gen_unix_commit = master + +PACKAGES += getopt +pkg_getopt_name = getopt +pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax +pkg_getopt_homepage = https://github.com/jcomellas/getopt +pkg_getopt_fetch = git +pkg_getopt_repo = https://github.com/jcomellas/getopt +pkg_getopt_commit = master + +PACKAGES += gettext +pkg_gettext_name = gettext +pkg_gettext_description = Erlang internationalization library. +pkg_gettext_homepage = https://github.com/etnt/gettext +pkg_gettext_fetch = git +pkg_gettext_repo = https://github.com/etnt/gettext +pkg_gettext_commit = master + +PACKAGES += giallo +pkg_giallo_name = giallo +pkg_giallo_description = Small and flexible web framework on top of Cowboy +pkg_giallo_homepage = https://github.com/kivra/giallo +pkg_giallo_fetch = git +pkg_giallo_repo = https://github.com/kivra/giallo +pkg_giallo_commit = master + +PACKAGES += gin +pkg_gin_name = gin +pkg_gin_description = The guards and for Erlang parse_transform +pkg_gin_homepage = https://github.com/mad-cocktail/gin +pkg_gin_fetch = git +pkg_gin_repo = https://github.com/mad-cocktail/gin +pkg_gin_commit = master + +PACKAGES += gitty +pkg_gitty_name = gitty +pkg_gitty_description = Git access in erlang +pkg_gitty_homepage = https://github.com/maxlapshin/gitty +pkg_gitty_fetch = git +pkg_gitty_repo = https://github.com/maxlapshin/gitty +pkg_gitty_commit = master + +PACKAGES += gold_fever +pkg_gold_fever_name = gold_fever +pkg_gold_fever_description = A Treasure Hunt for Erlangers +pkg_gold_fever_homepage = https://github.com/inaka/gold_fever +pkg_gold_fever_fetch = git +pkg_gold_fever_repo = https://github.com/inaka/gold_fever +pkg_gold_fever_commit = master + +PACKAGES += gossiperl +pkg_gossiperl_name = gossiperl +pkg_gossiperl_description = Gossip middleware in Erlang +pkg_gossiperl_homepage = http://gossiperl.com/ +pkg_gossiperl_fetch = git +pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl +pkg_gossiperl_commit = master + +PACKAGES += gpb +pkg_gpb_name = gpb +pkg_gpb_description = A Google Protobuf implementation for Erlang +pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_fetch = git +pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_commit = master + +PACKAGES += gproc +pkg_gproc_name = gproc +pkg_gproc_description = Extended process registry for Erlang +pkg_gproc_homepage = https://github.com/uwiger/gproc +pkg_gproc_fetch = git +pkg_gproc_repo = https://github.com/uwiger/gproc +pkg_gproc_commit = master + +PACKAGES += grapherl +pkg_grapherl_name = grapherl +pkg_grapherl_description = Create graphs of Erlang systems and programs +pkg_grapherl_homepage = https://github.com/eproxus/grapherl +pkg_grapherl_fetch = git +pkg_grapherl_repo = https://github.com/eproxus/grapherl +pkg_grapherl_commit = master + +PACKAGES += gun +pkg_gun_name = gun +pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang. +pkg_gun_homepage = http//ninenines.eu +pkg_gun_fetch = git +pkg_gun_repo = https://github.com/ninenines/gun +pkg_gun_commit = master + +PACKAGES += gut +pkg_gut_name = gut +pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman +pkg_gut_homepage = https://github.com/unbalancedparentheses/gut +pkg_gut_fetch = git +pkg_gut_repo = https://github.com/unbalancedparentheses/gut +pkg_gut_commit = master + +PACKAGES += hackney +pkg_hackney_name = hackney +pkg_hackney_description = simple HTTP client in Erlang +pkg_hackney_homepage = https://github.com/benoitc/hackney +pkg_hackney_fetch = git +pkg_hackney_repo = https://github.com/benoitc/hackney +pkg_hackney_commit = master + +PACKAGES += hamcrest +pkg_hamcrest_name = hamcrest +pkg_hamcrest_description = Erlang port of Hamcrest +pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_fetch = git +pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_commit = master + +PACKAGES += hanoidb +pkg_hanoidb_name = hanoidb +pkg_hanoidb_description = Erlang LSM BTree Storage +pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_fetch = git +pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_commit = master + +PACKAGES += hottub +pkg_hottub_name = hottub +pkg_hottub_description = Permanent Erlang Worker Pool +pkg_hottub_homepage = https://github.com/bfrog/hottub +pkg_hottub_fetch = git +pkg_hottub_repo = https://github.com/bfrog/hottub +pkg_hottub_commit = master + +PACKAGES += hpack +pkg_hpack_name = hpack +pkg_hpack_description = HPACK Implementation for Erlang +pkg_hpack_homepage = https://github.com/joedevivo/hpack +pkg_hpack_fetch = git +pkg_hpack_repo = https://github.com/joedevivo/hpack +pkg_hpack_commit = master + +PACKAGES += hyper +pkg_hyper_name = hyper +pkg_hyper_description = Erlang implementation of HyperLogLog +pkg_hyper_homepage = https://github.com/GameAnalytics/hyper +pkg_hyper_fetch = git +pkg_hyper_repo = https://github.com/GameAnalytics/hyper +pkg_hyper_commit = master + +PACKAGES += ibrowse +pkg_ibrowse_name = ibrowse +pkg_ibrowse_description = Erlang HTTP client +pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_fetch = git +pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_commit = v4.1.1 + +PACKAGES += ierlang +pkg_ierlang_name = ierlang +pkg_ierlang_description = An Erlang language kernel for IPython. +pkg_ierlang_homepage = https://github.com/robbielynch/ierlang +pkg_ierlang_fetch = git +pkg_ierlang_repo = https://github.com/robbielynch/ierlang +pkg_ierlang_commit = master + +PACKAGES += iota +pkg_iota_name = iota +pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code +pkg_iota_homepage = https://github.com/jpgneves/iota +pkg_iota_fetch = git +pkg_iota_repo = https://github.com/jpgneves/iota +pkg_iota_commit = master + +PACKAGES += irc_lib +pkg_irc_lib_name = irc_lib +pkg_irc_lib_description = Erlang irc client library +pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_fetch = git +pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_commit = master + +PACKAGES += ircd +pkg_ircd_name = ircd +pkg_ircd_description = A pluggable IRC daemon application/library for Erlang. +pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd +pkg_ircd_fetch = git +pkg_ircd_repo = https://github.com/tonyg/erlang-ircd +pkg_ircd_commit = master + +PACKAGES += iris +pkg_iris_name = iris +pkg_iris_description = Iris Erlang binding +pkg_iris_homepage = https://github.com/project-iris/iris-erl +pkg_iris_fetch = git +pkg_iris_repo = https://github.com/project-iris/iris-erl +pkg_iris_commit = master + +PACKAGES += iso8601 +pkg_iso8601_name = iso8601 +pkg_iso8601_description = Erlang ISO 8601 date formatter/parser +pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_fetch = git +pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_commit = master + +PACKAGES += jamdb_sybase +pkg_jamdb_sybase_name = jamdb_sybase +pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE +pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_fetch = git +pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_commit = 0.6.0 + +PACKAGES += jerg +pkg_jerg_name = jerg +pkg_jerg_description = JSON Schema to Erlang Records Generator +pkg_jerg_homepage = https://github.com/ddossot/jerg +pkg_jerg_fetch = git +pkg_jerg_repo = https://github.com/ddossot/jerg +pkg_jerg_commit = master + +PACKAGES += jesse +pkg_jesse_name = jesse +pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang. +pkg_jesse_homepage = https://github.com/klarna/jesse +pkg_jesse_fetch = git +pkg_jesse_repo = https://github.com/klarna/jesse +pkg_jesse_commit = master + +PACKAGES += jiffy +pkg_jiffy_name = jiffy +pkg_jiffy_description = JSON NIFs for Erlang. +pkg_jiffy_homepage = https://github.com/davisp/jiffy +pkg_jiffy_fetch = git +pkg_jiffy_repo = https://github.com/davisp/jiffy +pkg_jiffy_commit = master + +PACKAGES += jiffy_v +pkg_jiffy_v_name = jiffy_v +pkg_jiffy_v_description = JSON validation utility +pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_fetch = git +pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_commit = 0.3.3 + +PACKAGES += jobs +pkg_jobs_name = jobs +pkg_jobs_description = a Job scheduler for load regulation +pkg_jobs_homepage = https://github.com/esl/jobs +pkg_jobs_fetch = git +pkg_jobs_repo = https://github.com/esl/jobs +pkg_jobs_commit = 0.3 + +PACKAGES += joxa +pkg_joxa_name = joxa +pkg_joxa_description = A Modern Lisp for the Erlang VM +pkg_joxa_homepage = https://github.com/joxa/joxa +pkg_joxa_fetch = git +pkg_joxa_repo = https://github.com/joxa/joxa +pkg_joxa_commit = master + +PACKAGES += json +pkg_json_name = json +pkg_json_description = a high level json library for erlang (17.0+) +pkg_json_homepage = https://github.com/talentdeficit/json +pkg_json_fetch = git +pkg_json_repo = https://github.com/talentdeficit/json +pkg_json_commit = master + +PACKAGES += json_rec +pkg_json_rec_name = json_rec +pkg_json_rec_description = JSON to erlang record +pkg_json_rec_homepage = https://github.com/justinkirby/json_rec +pkg_json_rec_fetch = git +pkg_json_rec_repo = https://github.com/justinkirby/json_rec +pkg_json_rec_commit = master + +PACKAGES += jsonerl +pkg_jsonerl_name = jsonerl +pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder +pkg_jsonerl_homepage = https://github.com/lambder/jsonerl +pkg_jsonerl_fetch = git +pkg_jsonerl_repo = https://github.com/lambder/jsonerl +pkg_jsonerl_commit = master + +PACKAGES += jsonpath +pkg_jsonpath_name = jsonpath +pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation +pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_fetch = git +pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_commit = master + +PACKAGES += jsonx +pkg_jsonx_name = jsonx +pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C. +pkg_jsonx_homepage = https://github.com/iskra/jsonx +pkg_jsonx_fetch = git +pkg_jsonx_repo = https://github.com/iskra/jsonx +pkg_jsonx_commit = master + +PACKAGES += jsx +pkg_jsx_name = jsx +pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON. +pkg_jsx_homepage = https://github.com/talentdeficit/jsx +pkg_jsx_fetch = git +pkg_jsx_repo = https://github.com/talentdeficit/jsx +pkg_jsx_commit = master + +PACKAGES += kafka +pkg_kafka_name = kafka +pkg_kafka_description = Kafka consumer and producer in Erlang +pkg_kafka_homepage = https://github.com/wooga/kafka-erlang +pkg_kafka_fetch = git +pkg_kafka_repo = https://github.com/wooga/kafka-erlang +pkg_kafka_commit = master + +PACKAGES += kai +pkg_kai_name = kai +pkg_kai_description = DHT storage by Takeshi Inoue +pkg_kai_homepage = https://github.com/synrc/kai +pkg_kai_fetch = git +pkg_kai_repo = https://github.com/synrc/kai +pkg_kai_commit = master + +PACKAGES += katja +pkg_katja_name = katja +pkg_katja_description = A simple Riemann client written in Erlang. +pkg_katja_homepage = https://github.com/nifoc/katja +pkg_katja_fetch = git +pkg_katja_repo = https://github.com/nifoc/katja +pkg_katja_commit = master + +PACKAGES += kdht +pkg_kdht_name = kdht +pkg_kdht_description = kdht is an erlang DHT implementation +pkg_kdht_homepage = https://github.com/kevinlynx/kdht +pkg_kdht_fetch = git +pkg_kdht_repo = https://github.com/kevinlynx/kdht +pkg_kdht_commit = master + +PACKAGES += key2value +pkg_key2value_name = key2value +pkg_key2value_description = Erlang 2-way map +pkg_key2value_homepage = https://github.com/okeuday/key2value +pkg_key2value_fetch = git +pkg_key2value_repo = https://github.com/okeuday/key2value +pkg_key2value_commit = master + +PACKAGES += keys1value +pkg_keys1value_name = keys1value +pkg_keys1value_description = Erlang set associative map for key lists +pkg_keys1value_homepage = https://github.com/okeuday/keys1value +pkg_keys1value_fetch = git +pkg_keys1value_repo = https://github.com/okeuday/keys1value +pkg_keys1value_commit = master + +PACKAGES += kinetic +pkg_kinetic_name = kinetic +pkg_kinetic_description = Erlang Kinesis Client +pkg_kinetic_homepage = https://github.com/AdRoll/kinetic +pkg_kinetic_fetch = git +pkg_kinetic_repo = https://github.com/AdRoll/kinetic +pkg_kinetic_commit = master + +PACKAGES += kjell +pkg_kjell_name = kjell +pkg_kjell_description = Erlang Shell +pkg_kjell_homepage = https://github.com/karlll/kjell +pkg_kjell_fetch = git +pkg_kjell_repo = https://github.com/karlll/kjell +pkg_kjell_commit = master + +PACKAGES += kraken +pkg_kraken_name = kraken +pkg_kraken_description = Distributed Pubsub Server for Realtime Apps +pkg_kraken_homepage = https://github.com/Asana/kraken +pkg_kraken_fetch = git +pkg_kraken_repo = https://github.com/Asana/kraken +pkg_kraken_commit = master + +PACKAGES += kucumberl +pkg_kucumberl_name = kucumberl +pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber +pkg_kucumberl_homepage = https://github.com/openshine/kucumberl +pkg_kucumberl_fetch = git +pkg_kucumberl_repo = https://github.com/openshine/kucumberl +pkg_kucumberl_commit = master + +PACKAGES += kvc +pkg_kvc_name = kvc +pkg_kvc_description = KVC - Key Value Coding for Erlang data structures +pkg_kvc_homepage = https://github.com/etrepum/kvc +pkg_kvc_fetch = git +pkg_kvc_repo = https://github.com/etrepum/kvc +pkg_kvc_commit = master + +PACKAGES += kvlists +pkg_kvlists_name = kvlists +pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang +pkg_kvlists_homepage = https://github.com/jcomellas/kvlists +pkg_kvlists_fetch = git +pkg_kvlists_repo = https://github.com/jcomellas/kvlists +pkg_kvlists_commit = master + +PACKAGES += kvs +pkg_kvs_name = kvs +pkg_kvs_description = Container and Iterator +pkg_kvs_homepage = https://github.com/synrc/kvs +pkg_kvs_fetch = git +pkg_kvs_repo = https://github.com/synrc/kvs +pkg_kvs_commit = master + +PACKAGES += lager +pkg_lager_name = lager +pkg_lager_description = A logging framework for Erlang/OTP. +pkg_lager_homepage = https://github.com/basho/lager +pkg_lager_fetch = git +pkg_lager_repo = https://github.com/basho/lager +pkg_lager_commit = master + +PACKAGES += lager_amqp_backend +pkg_lager_amqp_backend_name = lager_amqp_backend +pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend +pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_fetch = git +pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_commit = master + +PACKAGES += lager_syslog +pkg_lager_syslog_name = lager_syslog +pkg_lager_syslog_description = Syslog backend for lager +pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog +pkg_lager_syslog_fetch = git +pkg_lager_syslog_repo = https://github.com/basho/lager_syslog +pkg_lager_syslog_commit = master + +PACKAGES += lambdapad +pkg_lambdapad_name = lambdapad +pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang. +pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad +pkg_lambdapad_fetch = git +pkg_lambdapad_repo = https://github.com/gar1t/lambdapad +pkg_lambdapad_commit = master + +PACKAGES += lasp +pkg_lasp_name = lasp +pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations +pkg_lasp_homepage = http://lasp-lang.org/ +pkg_lasp_fetch = git +pkg_lasp_repo = https://github.com/lasp-lang/lasp +pkg_lasp_commit = master + +PACKAGES += lasse +pkg_lasse_name = lasse +pkg_lasse_description = SSE handler for Cowboy +pkg_lasse_homepage = https://github.com/inaka/lasse +pkg_lasse_fetch = git +pkg_lasse_repo = https://github.com/inaka/lasse +pkg_lasse_commit = 0.1.0 + +PACKAGES += ldap +pkg_ldap_name = ldap +pkg_ldap_description = LDAP server written in Erlang +pkg_ldap_homepage = https://github.com/spawnproc/ldap +pkg_ldap_fetch = git +pkg_ldap_repo = https://github.com/spawnproc/ldap +pkg_ldap_commit = master + +PACKAGES += lethink +pkg_lethink_name = lethink +pkg_lethink_description = erlang driver for rethinkdb +pkg_lethink_homepage = https://github.com/taybin/lethink +pkg_lethink_fetch = git +pkg_lethink_repo = https://github.com/taybin/lethink +pkg_lethink_commit = master + +PACKAGES += lfe +pkg_lfe_name = lfe +pkg_lfe_description = Lisp Flavoured Erlang (LFE) +pkg_lfe_homepage = https://github.com/rvirding/lfe +pkg_lfe_fetch = git +pkg_lfe_repo = https://github.com/rvirding/lfe +pkg_lfe_commit = master + +PACKAGES += ling +pkg_ling_name = ling +pkg_ling_description = Erlang on Xen +pkg_ling_homepage = https://github.com/cloudozer/ling +pkg_ling_fetch = git +pkg_ling_repo = https://github.com/cloudozer/ling +pkg_ling_commit = master + +PACKAGES += live +pkg_live_name = live +pkg_live_description = Automated module and configuration reloader. +pkg_live_homepage = http://ninenines.eu +pkg_live_fetch = git +pkg_live_repo = https://github.com/ninenines/live +pkg_live_commit = master + +PACKAGES += lmq +pkg_lmq_name = lmq +pkg_lmq_description = Lightweight Message Queue +pkg_lmq_homepage = https://github.com/iij/lmq +pkg_lmq_fetch = git +pkg_lmq_repo = https://github.com/iij/lmq +pkg_lmq_commit = master + +PACKAGES += locker +pkg_locker_name = locker +pkg_locker_description = Atomic distributed 'check and set' for short-lived keys +pkg_locker_homepage = https://github.com/wooga/locker +pkg_locker_fetch = git +pkg_locker_repo = https://github.com/wooga/locker +pkg_locker_commit = master + +PACKAGES += locks +pkg_locks_name = locks +pkg_locks_description = A scalable, deadlock-resolving resource locker +pkg_locks_homepage = https://github.com/uwiger/locks +pkg_locks_fetch = git +pkg_locks_repo = https://github.com/uwiger/locks +pkg_locks_commit = master + +PACKAGES += log4erl +pkg_log4erl_name = log4erl +pkg_log4erl_description = A logger for erlang in the spirit of Log4J. +pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl +pkg_log4erl_fetch = git +pkg_log4erl_repo = https://github.com/ahmednawras/log4erl +pkg_log4erl_commit = master + +PACKAGES += lol +pkg_lol_name = lol +pkg_lol_description = Lisp on erLang, and programming is fun again +pkg_lol_homepage = https://github.com/b0oh/lol +pkg_lol_fetch = git +pkg_lol_repo = https://github.com/b0oh/lol +pkg_lol_commit = master + +PACKAGES += lucid +pkg_lucid_name = lucid +pkg_lucid_description = HTTP/2 server written in Erlang +pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid +pkg_lucid_fetch = git +pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid +pkg_lucid_commit = master + +PACKAGES += luerl +pkg_luerl_name = luerl +pkg_luerl_description = Lua in Erlang +pkg_luerl_homepage = https://github.com/rvirding/luerl +pkg_luerl_fetch = git +pkg_luerl_repo = https://github.com/rvirding/luerl +pkg_luerl_commit = develop + +PACKAGES += luwak +pkg_luwak_name = luwak +pkg_luwak_description = Large-object storage interface for Riak +pkg_luwak_homepage = https://github.com/basho/luwak +pkg_luwak_fetch = git +pkg_luwak_repo = https://github.com/basho/luwak +pkg_luwak_commit = master + +PACKAGES += lux +pkg_lux_name = lux +pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands +pkg_lux_homepage = https://github.com/hawk/lux +pkg_lux_fetch = git +pkg_lux_repo = https://github.com/hawk/lux +pkg_lux_commit = master + +PACKAGES += machi +pkg_machi_name = machi +pkg_machi_description = Machi file store +pkg_machi_homepage = https://github.com/basho/machi +pkg_machi_fetch = git +pkg_machi_repo = https://github.com/basho/machi +pkg_machi_commit = master + +PACKAGES += mad +pkg_mad_name = mad +pkg_mad_description = Small and Fast Rebar Replacement +pkg_mad_homepage = https://github.com/synrc/mad +pkg_mad_fetch = git +pkg_mad_repo = https://github.com/synrc/mad +pkg_mad_commit = master + +PACKAGES += marina +pkg_marina_name = marina +pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client +pkg_marina_homepage = https://github.com/lpgauth/marina +pkg_marina_fetch = git +pkg_marina_repo = https://github.com/lpgauth/marina +pkg_marina_commit = master + +PACKAGES += mavg +pkg_mavg_name = mavg +pkg_mavg_description = Erlang :: Exponential moving average library +pkg_mavg_homepage = https://github.com/EchoTeam/mavg +pkg_mavg_fetch = git +pkg_mavg_repo = https://github.com/EchoTeam/mavg +pkg_mavg_commit = master + +PACKAGES += mc_erl +pkg_mc_erl_name = mc_erl +pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. +pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl +pkg_mc_erl_fetch = git +pkg_mc_erl_repo = https://github.com/clonejo/mc-erl +pkg_mc_erl_commit = master + +PACKAGES += mcd +pkg_mcd_name = mcd +pkg_mcd_description = Fast memcached protocol client in pure Erlang +pkg_mcd_homepage = https://github.com/EchoTeam/mcd +pkg_mcd_fetch = git +pkg_mcd_repo = https://github.com/EchoTeam/mcd +pkg_mcd_commit = master + +PACKAGES += mcerlang +pkg_mcerlang_name = mcerlang +pkg_mcerlang_description = The McErlang model checker for Erlang +pkg_mcerlang_homepage = https://github.com/fredlund/McErlang +pkg_mcerlang_fetch = git +pkg_mcerlang_repo = https://github.com/fredlund/McErlang +pkg_mcerlang_commit = master + +PACKAGES += meck +pkg_meck_name = meck +pkg_meck_description = A mocking library for Erlang +pkg_meck_homepage = https://github.com/eproxus/meck +pkg_meck_fetch = git +pkg_meck_repo = https://github.com/eproxus/meck +pkg_meck_commit = master + +PACKAGES += mekao +pkg_mekao_name = mekao +pkg_mekao_description = SQL constructor +pkg_mekao_homepage = https://github.com/ddosia/mekao +pkg_mekao_fetch = git +pkg_mekao_repo = https://github.com/ddosia/mekao +pkg_mekao_commit = master + +PACKAGES += memo +pkg_memo_name = memo +pkg_memo_description = Erlang memoization server +pkg_memo_homepage = https://github.com/tuncer/memo +pkg_memo_fetch = git +pkg_memo_repo = https://github.com/tuncer/memo +pkg_memo_commit = master + +PACKAGES += merge_index +pkg_merge_index_name = merge_index +pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop). +pkg_merge_index_homepage = https://github.com/basho/merge_index +pkg_merge_index_fetch = git +pkg_merge_index_repo = https://github.com/basho/merge_index +pkg_merge_index_commit = master + +PACKAGES += merl +pkg_merl_name = merl +pkg_merl_description = Metaprogramming in Erlang +pkg_merl_homepage = https://github.com/richcarl/merl +pkg_merl_fetch = git +pkg_merl_repo = https://github.com/richcarl/merl +pkg_merl_commit = master + +PACKAGES += mimetypes +pkg_mimetypes_name = mimetypes +pkg_mimetypes_description = Erlang MIME types library +pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes +pkg_mimetypes_fetch = git +pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes +pkg_mimetypes_commit = master + +PACKAGES += mixer +pkg_mixer_name = mixer +pkg_mixer_description = Mix in functions from other modules +pkg_mixer_homepage = https://github.com/chef/mixer +pkg_mixer_fetch = git +pkg_mixer_repo = https://github.com/chef/mixer +pkg_mixer_commit = master + +PACKAGES += mochiweb +pkg_mochiweb_name = mochiweb +pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers. +pkg_mochiweb_homepage = https://github.com/mochi/mochiweb +pkg_mochiweb_fetch = git +pkg_mochiweb_repo = https://github.com/mochi/mochiweb +pkg_mochiweb_commit = master + +PACKAGES += mochiweb_xpath +pkg_mochiweb_xpath_name = mochiweb_xpath +pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser +pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_fetch = git +pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_commit = master + +PACKAGES += mockgyver +pkg_mockgyver_name = mockgyver +pkg_mockgyver_description = A mocking library for Erlang +pkg_mockgyver_homepage = https://github.com/klajo/mockgyver +pkg_mockgyver_fetch = git +pkg_mockgyver_repo = https://github.com/klajo/mockgyver +pkg_mockgyver_commit = master + +PACKAGES += modlib +pkg_modlib_name = modlib +pkg_modlib_description = Web framework based on Erlang's inets httpd +pkg_modlib_homepage = https://github.com/gar1t/modlib +pkg_modlib_fetch = git +pkg_modlib_repo = https://github.com/gar1t/modlib +pkg_modlib_commit = master + +PACKAGES += mongodb +pkg_mongodb_name = mongodb +pkg_mongodb_description = MongoDB driver for Erlang +pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_fetch = git +pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_commit = master + +PACKAGES += mongooseim +pkg_mongooseim_name = mongooseim +pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions +pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform +pkg_mongooseim_fetch = git +pkg_mongooseim_repo = https://github.com/esl/MongooseIM +pkg_mongooseim_commit = master + +PACKAGES += moyo +pkg_moyo_name = moyo +pkg_moyo_description = Erlang utility functions library +pkg_moyo_homepage = https://github.com/dwango/moyo +pkg_moyo_fetch = git +pkg_moyo_repo = https://github.com/dwango/moyo +pkg_moyo_commit = master + +PACKAGES += msgpack +pkg_msgpack_name = msgpack +pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang +pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_fetch = git +pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_commit = master + +PACKAGES += mu2 +pkg_mu2_name = mu2 +pkg_mu2_description = Erlang mutation testing tool +pkg_mu2_homepage = https://github.com/ramsay-t/mu2 +pkg_mu2_fetch = git +pkg_mu2_repo = https://github.com/ramsay-t/mu2 +pkg_mu2_commit = master + +PACKAGES += mustache +pkg_mustache_name = mustache +pkg_mustache_description = Mustache template engine for Erlang. +pkg_mustache_homepage = https://github.com/mojombo/mustache.erl +pkg_mustache_fetch = git +pkg_mustache_repo = https://github.com/mojombo/mustache.erl +pkg_mustache_commit = master + +PACKAGES += myproto +pkg_myproto_name = myproto +pkg_myproto_description = MySQL Server Protocol in Erlang +pkg_myproto_homepage = https://github.com/altenwald/myproto +pkg_myproto_fetch = git +pkg_myproto_repo = https://github.com/altenwald/myproto +pkg_myproto_commit = master + +PACKAGES += mysql +pkg_mysql_name = mysql +pkg_mysql_description = Erlang MySQL Driver (from code.google.com) +pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_fetch = git +pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_commit = master + +PACKAGES += n2o +pkg_n2o_name = n2o +pkg_n2o_description = WebSocket Application Server +pkg_n2o_homepage = https://github.com/5HT/n2o +pkg_n2o_fetch = git +pkg_n2o_repo = https://github.com/5HT/n2o +pkg_n2o_commit = master + +PACKAGES += nat_upnp +pkg_nat_upnp_name = nat_upnp +pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD +pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_fetch = git +pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_commit = master + +PACKAGES += neo4j +pkg_neo4j_name = neo4j +pkg_neo4j_description = Erlang client library for Neo4J. +pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_fetch = git +pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_commit = master + +PACKAGES += neotoma +pkg_neotoma_name = neotoma +pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars. +pkg_neotoma_homepage = https://github.com/seancribbs/neotoma +pkg_neotoma_fetch = git +pkg_neotoma_repo = https://github.com/seancribbs/neotoma +pkg_neotoma_commit = master + +PACKAGES += newrelic +pkg_newrelic_name = newrelic +pkg_newrelic_description = Erlang library for sending metrics to New Relic +pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang +pkg_newrelic_fetch = git +pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang +pkg_newrelic_commit = master + +PACKAGES += nifty +pkg_nifty_name = nifty +pkg_nifty_description = Erlang NIF wrapper generator +pkg_nifty_homepage = https://github.com/parapluu/nifty +pkg_nifty_fetch = git +pkg_nifty_repo = https://github.com/parapluu/nifty +pkg_nifty_commit = master + +PACKAGES += nitrogen_core +pkg_nitrogen_core_name = nitrogen_core +pkg_nitrogen_core_description = The core Nitrogen library. +pkg_nitrogen_core_homepage = http://nitrogenproject.com/ +pkg_nitrogen_core_fetch = git +pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core +pkg_nitrogen_core_commit = master + +PACKAGES += nkbase +pkg_nkbase_name = nkbase +pkg_nkbase_description = NkBASE distributed database +pkg_nkbase_homepage = https://github.com/Nekso/nkbase +pkg_nkbase_fetch = git +pkg_nkbase_repo = https://github.com/Nekso/nkbase +pkg_nkbase_commit = develop + +PACKAGES += nkdocker +pkg_nkdocker_name = nkdocker +pkg_nkdocker_description = Erlang Docker client +pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker +pkg_nkdocker_fetch = git +pkg_nkdocker_repo = https://github.com/Nekso/nkdocker +pkg_nkdocker_commit = master + +PACKAGES += nkpacket +pkg_nkpacket_name = nkpacket +pkg_nkpacket_description = Generic Erlang transport layer +pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket +pkg_nkpacket_fetch = git +pkg_nkpacket_repo = https://github.com/Nekso/nkpacket +pkg_nkpacket_commit = master + +PACKAGES += nksip +pkg_nksip_name = nksip +pkg_nksip_description = Erlang SIP application server +pkg_nksip_homepage = https://github.com/kalta/nksip +pkg_nksip_fetch = git +pkg_nksip_repo = https://github.com/kalta/nksip +pkg_nksip_commit = master + +PACKAGES += nodefinder +pkg_nodefinder_name = nodefinder +pkg_nodefinder_description = automatic node discovery via UDP multicast +pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder +pkg_nodefinder_fetch = git +pkg_nodefinder_repo = https://github.com/okeuday/nodefinder +pkg_nodefinder_commit = master + +PACKAGES += nprocreg +pkg_nprocreg_name = nprocreg +pkg_nprocreg_description = Minimal Distributed Erlang Process Registry +pkg_nprocreg_homepage = http://nitrogenproject.com/ +pkg_nprocreg_fetch = git +pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg +pkg_nprocreg_commit = master + +PACKAGES += oauth +pkg_oauth_name = oauth +pkg_oauth_description = An Erlang OAuth 1.0 implementation +pkg_oauth_homepage = https://github.com/tim/erlang-oauth +pkg_oauth_fetch = git +pkg_oauth_repo = https://github.com/tim/erlang-oauth +pkg_oauth_commit = master + +PACKAGES += oauth2 +pkg_oauth2_name = oauth2 +pkg_oauth2_description = Erlang Oauth2 implementation +pkg_oauth2_homepage = https://github.com/kivra/oauth2 +pkg_oauth2_fetch = git +pkg_oauth2_repo = https://github.com/kivra/oauth2 +pkg_oauth2_commit = master + +PACKAGES += oauth2c +pkg_oauth2c_name = oauth2c +pkg_oauth2c_description = Erlang OAuth2 Client +pkg_oauth2c_homepage = https://github.com/kivra/oauth2_client +pkg_oauth2c_fetch = git +pkg_oauth2c_repo = https://github.com/kivra/oauth2_client +pkg_oauth2c_commit = master + +PACKAGES += octopus +pkg_octopus_name = octopus +pkg_octopus_description = Small and flexible pool manager written in Erlang +pkg_octopus_homepage = https://github.com/erlangbureau/octopus +pkg_octopus_fetch = git +pkg_octopus_repo = https://github.com/erlangbureau/octopus +pkg_octopus_commit = 1.0.0 + +PACKAGES += of_protocol +pkg_of_protocol_name = of_protocol +pkg_of_protocol_description = OpenFlow Protocol Library for Erlang +pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_fetch = git +pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_commit = master + +PACKAGES += opencouch +pkg_opencouch_name = couch +pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB +pkg_opencouch_homepage = https://github.com/benoitc/opencouch +pkg_opencouch_fetch = git +pkg_opencouch_repo = https://github.com/benoitc/opencouch +pkg_opencouch_commit = master + +PACKAGES += openflow +pkg_openflow_name = openflow +pkg_openflow_description = An OpenFlow controller written in pure erlang +pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_fetch = git +pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_commit = master + +PACKAGES += openid +pkg_openid_name = openid +pkg_openid_description = Erlang OpenID +pkg_openid_homepage = https://github.com/brendonh/erl_openid +pkg_openid_fetch = git +pkg_openid_repo = https://github.com/brendonh/erl_openid +pkg_openid_commit = master + +PACKAGES += openpoker +pkg_openpoker_name = openpoker +pkg_openpoker_description = Genesis Texas hold'em Game Server +pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker +pkg_openpoker_fetch = git +pkg_openpoker_repo = https://github.com/hpyhacking/openpoker +pkg_openpoker_commit = master + +PACKAGES += pal +pkg_pal_name = pal +pkg_pal_description = Pragmatic Authentication Library +pkg_pal_homepage = https://github.com/manifest/pal +pkg_pal_fetch = git +pkg_pal_repo = https://github.com/manifest/pal +pkg_pal_commit = master + +PACKAGES += parse_trans +pkg_parse_trans_name = parse_trans +pkg_parse_trans_description = Parse transform utilities for Erlang +pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans +pkg_parse_trans_fetch = git +pkg_parse_trans_repo = https://github.com/uwiger/parse_trans +pkg_parse_trans_commit = master + +PACKAGES += parsexml +pkg_parsexml_name = parsexml +pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API +pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml +pkg_parsexml_fetch = git +pkg_parsexml_repo = https://github.com/maxlapshin/parsexml +pkg_parsexml_commit = master + +PACKAGES += pegjs +pkg_pegjs_name = pegjs +pkg_pegjs_description = An implementation of PEG.js grammar for Erlang. +pkg_pegjs_homepage = https://github.com/dmitriid/pegjs +pkg_pegjs_fetch = git +pkg_pegjs_repo = https://github.com/dmitriid/pegjs +pkg_pegjs_commit = 0.3 + +PACKAGES += percept2 +pkg_percept2_name = percept2 +pkg_percept2_description = Concurrent profiling tool for Erlang +pkg_percept2_homepage = https://github.com/huiqing/percept2 +pkg_percept2_fetch = git +pkg_percept2_repo = https://github.com/huiqing/percept2 +pkg_percept2_commit = master + +PACKAGES += pgsql +pkg_pgsql_name = pgsql +pkg_pgsql_description = Erlang PostgreSQL driver +pkg_pgsql_homepage = https://github.com/semiocast/pgsql +pkg_pgsql_fetch = git +pkg_pgsql_repo = https://github.com/semiocast/pgsql +pkg_pgsql_commit = master + +PACKAGES += pkgx +pkg_pkgx_name = pkgx +pkg_pkgx_description = Build .deb packages from Erlang releases +pkg_pkgx_homepage = https://github.com/arjan/pkgx +pkg_pkgx_fetch = git +pkg_pkgx_repo = https://github.com/arjan/pkgx +pkg_pkgx_commit = master + +PACKAGES += pkt +pkg_pkt_name = pkt +pkg_pkt_description = Erlang network protocol library +pkg_pkt_homepage = https://github.com/msantos/pkt +pkg_pkt_fetch = git +pkg_pkt_repo = https://github.com/msantos/pkt +pkg_pkt_commit = master + +PACKAGES += plain_fsm +pkg_plain_fsm_name = plain_fsm +pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs. +pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_fetch = git +pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_commit = master + +PACKAGES += plumtree +pkg_plumtree_name = plumtree +pkg_plumtree_description = Epidemic Broadcast Trees +pkg_plumtree_homepage = https://github.com/helium/plumtree +pkg_plumtree_fetch = git +pkg_plumtree_repo = https://github.com/helium/plumtree +pkg_plumtree_commit = master + +PACKAGES += pmod_transform +pkg_pmod_transform_name = pmod_transform +pkg_pmod_transform_description = Parse transform for parameterized modules +pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform +pkg_pmod_transform_fetch = git +pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform +pkg_pmod_transform_commit = master + +PACKAGES += pobox +pkg_pobox_name = pobox +pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang +pkg_pobox_homepage = https://github.com/ferd/pobox +pkg_pobox_fetch = git +pkg_pobox_repo = https://github.com/ferd/pobox +pkg_pobox_commit = master + +PACKAGES += ponos +pkg_ponos_name = ponos +pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang +pkg_ponos_homepage = https://github.com/klarna/ponos +pkg_ponos_fetch = git +pkg_ponos_repo = https://github.com/klarna/ponos +pkg_ponos_commit = master + +PACKAGES += poolboy +pkg_poolboy_name = poolboy +pkg_poolboy_description = A hunky Erlang worker pool factory +pkg_poolboy_homepage = https://github.com/devinus/poolboy +pkg_poolboy_fetch = git +pkg_poolboy_repo = https://github.com/devinus/poolboy +pkg_poolboy_commit = master + +PACKAGES += pooler +pkg_pooler_name = pooler +pkg_pooler_description = An OTP Process Pool Application +pkg_pooler_homepage = https://github.com/seth/pooler +pkg_pooler_fetch = git +pkg_pooler_repo = https://github.com/seth/pooler +pkg_pooler_commit = master + +PACKAGES += pqueue +pkg_pqueue_name = pqueue +pkg_pqueue_description = Erlang Priority Queues +pkg_pqueue_homepage = https://github.com/okeuday/pqueue +pkg_pqueue_fetch = git +pkg_pqueue_repo = https://github.com/okeuday/pqueue +pkg_pqueue_commit = master + +PACKAGES += procket +pkg_procket_name = procket +pkg_procket_description = Erlang interface to low level socket operations +pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket +pkg_procket_fetch = git +pkg_procket_repo = https://github.com/msantos/procket +pkg_procket_commit = master + +PACKAGES += prop +pkg_prop_name = prop +pkg_prop_description = An Erlang code scaffolding and generator system. +pkg_prop_homepage = https://github.com/nuex/prop +pkg_prop_fetch = git +pkg_prop_repo = https://github.com/nuex/prop +pkg_prop_commit = master + +PACKAGES += proper +pkg_proper_name = proper +pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang. +pkg_proper_homepage = http://proper.softlab.ntua.gr +pkg_proper_fetch = git +pkg_proper_repo = https://github.com/manopapad/proper +pkg_proper_commit = master + +PACKAGES += props +pkg_props_name = props +pkg_props_description = Property structure library +pkg_props_homepage = https://github.com/greyarea/props +pkg_props_fetch = git +pkg_props_repo = https://github.com/greyarea/props +pkg_props_commit = master + +PACKAGES += protobuffs +pkg_protobuffs_name = protobuffs +pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs. +pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_fetch = git +pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_commit = master + +PACKAGES += psycho +pkg_psycho_name = psycho +pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware. +pkg_psycho_homepage = https://github.com/gar1t/psycho +pkg_psycho_fetch = git +pkg_psycho_repo = https://github.com/gar1t/psycho +pkg_psycho_commit = master + +PACKAGES += purity +pkg_purity_name = purity +pkg_purity_description = A side-effect analyzer for Erlang +pkg_purity_homepage = https://github.com/mpitid/purity +pkg_purity_fetch = git +pkg_purity_repo = https://github.com/mpitid/purity +pkg_purity_commit = master + +PACKAGES += push_service +pkg_push_service_name = push_service +pkg_push_service_description = Push service +pkg_push_service_homepage = https://github.com/hairyhum/push_service +pkg_push_service_fetch = git +pkg_push_service_repo = https://github.com/hairyhum/push_service +pkg_push_service_commit = master + +PACKAGES += qdate +pkg_qdate_name = qdate +pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang. +pkg_qdate_homepage = https://github.com/choptastic/qdate +pkg_qdate_fetch = git +pkg_qdate_repo = https://github.com/choptastic/qdate +pkg_qdate_commit = 0.4.0 + +PACKAGES += qrcode +pkg_qrcode_name = qrcode +pkg_qrcode_description = QR Code encoder in Erlang +pkg_qrcode_homepage = https://github.com/komone/qrcode +pkg_qrcode_fetch = git +pkg_qrcode_repo = https://github.com/komone/qrcode +pkg_qrcode_commit = master + +PACKAGES += quest +pkg_quest_name = quest +pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang. +pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest +pkg_quest_fetch = git +pkg_quest_repo = https://github.com/eriksoe/ErlangQuest +pkg_quest_commit = master + +PACKAGES += quickrand +pkg_quickrand_name = quickrand +pkg_quickrand_description = Quick Erlang Random Number Generation +pkg_quickrand_homepage = https://github.com/okeuday/quickrand +pkg_quickrand_fetch = git +pkg_quickrand_repo = https://github.com/okeuday/quickrand +pkg_quickrand_commit = master + +PACKAGES += rabbit +pkg_rabbit_name = rabbit +pkg_rabbit_description = RabbitMQ Server +pkg_rabbit_homepage = https://www.rabbitmq.com/ +pkg_rabbit_fetch = git +pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git +pkg_rabbit_commit = master + +PACKAGES += rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak +pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_fetch = git +pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_commit = master + +PACKAGES += rack +pkg_rack_name = rack +pkg_rack_description = Rack handler for erlang +pkg_rack_homepage = https://github.com/erlyvideo/rack +pkg_rack_fetch = git +pkg_rack_repo = https://github.com/erlyvideo/rack +pkg_rack_commit = master + +PACKAGES += radierl +pkg_radierl_name = radierl +pkg_radierl_description = RADIUS protocol stack implemented in Erlang. +pkg_radierl_homepage = https://github.com/vances/radierl +pkg_radierl_fetch = git +pkg_radierl_repo = https://github.com/vances/radierl +pkg_radierl_commit = master + +PACKAGES += rafter +pkg_rafter_name = rafter +pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol +pkg_rafter_homepage = https://github.com/andrewjstone/rafter +pkg_rafter_fetch = git +pkg_rafter_repo = https://github.com/andrewjstone/rafter +pkg_rafter_commit = master + +PACKAGES += ranch +pkg_ranch_name = ranch +pkg_ranch_description = Socket acceptor pool for TCP protocols. +pkg_ranch_homepage = http://ninenines.eu +pkg_ranch_fetch = git +pkg_ranch_repo = https://github.com/ninenines/ranch +pkg_ranch_commit = 1.1.0 + +PACKAGES += rbeacon +pkg_rbeacon_name = rbeacon +pkg_rbeacon_description = LAN discovery and presence in Erlang. +pkg_rbeacon_homepage = https://github.com/refuge/rbeacon +pkg_rbeacon_fetch = git +pkg_rbeacon_repo = https://github.com/refuge/rbeacon +pkg_rbeacon_commit = master + +PACKAGES += rebar +pkg_rebar_name = rebar +pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases. +pkg_rebar_homepage = http://www.rebar3.org +pkg_rebar_fetch = git +pkg_rebar_repo = https://github.com/rebar/rebar3 +pkg_rebar_commit = master + +PACKAGES += rebus +pkg_rebus_name = rebus +pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang. +pkg_rebus_homepage = https://github.com/olle/rebus +pkg_rebus_fetch = git +pkg_rebus_repo = https://github.com/olle/rebus +pkg_rebus_commit = master + +PACKAGES += rec2json +pkg_rec2json_name = rec2json +pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily. +pkg_rec2json_homepage = https://github.com/lordnull/rec2json +pkg_rec2json_fetch = git +pkg_rec2json_repo = https://github.com/lordnull/rec2json +pkg_rec2json_commit = master + +PACKAGES += recon +pkg_recon_name = recon +pkg_recon_description = Collection of functions and scripts to debug Erlang in production. +pkg_recon_homepage = https://github.com/ferd/recon +pkg_recon_fetch = git +pkg_recon_repo = https://github.com/ferd/recon +pkg_recon_commit = 2.2.1 + +PACKAGES += record_info +pkg_record_info_name = record_info +pkg_record_info_description = Convert between record and proplist +pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info +pkg_record_info_fetch = git +pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info +pkg_record_info_commit = master + +PACKAGES += redgrid +pkg_redgrid_name = redgrid +pkg_redgrid_description = automatic Erlang node discovery via redis +pkg_redgrid_homepage = https://github.com/jkvor/redgrid +pkg_redgrid_fetch = git +pkg_redgrid_repo = https://github.com/jkvor/redgrid +pkg_redgrid_commit = master + +PACKAGES += redo +pkg_redo_name = redo +pkg_redo_description = pipelined erlang redis client +pkg_redo_homepage = https://github.com/jkvor/redo +pkg_redo_fetch = git +pkg_redo_repo = https://github.com/jkvor/redo +pkg_redo_commit = master + +PACKAGES += reload_mk +pkg_reload_mk_name = reload_mk +pkg_reload_mk_description = Live reload plugin for erlang.mk. +pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk +pkg_reload_mk_fetch = git +pkg_reload_mk_repo = https://github.com/bullno1/reload.mk +pkg_reload_mk_commit = master + +PACKAGES += reltool_util +pkg_reltool_util_name = reltool_util +pkg_reltool_util_description = Erlang reltool utility functionality application +pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util +pkg_reltool_util_fetch = git +pkg_reltool_util_repo = https://github.com/okeuday/reltool_util +pkg_reltool_util_commit = master + +PACKAGES += relx +pkg_relx_name = relx +pkg_relx_description = Sane, simple release creation for Erlang +pkg_relx_homepage = https://github.com/erlware/relx +pkg_relx_fetch = git +pkg_relx_repo = https://github.com/erlware/relx +pkg_relx_commit = master + +PACKAGES += resource_discovery +pkg_resource_discovery_name = resource_discovery +pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster. +pkg_resource_discovery_homepage = http://erlware.org/ +pkg_resource_discovery_fetch = git +pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery +pkg_resource_discovery_commit = master + +PACKAGES += restc +pkg_restc_name = restc +pkg_restc_description = Erlang Rest Client +pkg_restc_homepage = https://github.com/kivra/restclient +pkg_restc_fetch = git +pkg_restc_repo = https://github.com/kivra/restclient +pkg_restc_commit = master + +PACKAGES += rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation. +pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_fetch = git +pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_commit = master + +PACKAGES += riak_control +pkg_riak_control_name = riak_control +pkg_riak_control_description = Webmachine-based administration interface for Riak. +pkg_riak_control_homepage = https://github.com/basho/riak_control +pkg_riak_control_fetch = git +pkg_riak_control_repo = https://github.com/basho/riak_control +pkg_riak_control_commit = master + +PACKAGES += riak_core +pkg_riak_core_name = riak_core +pkg_riak_core_description = Distributed systems infrastructure used by Riak. +pkg_riak_core_homepage = https://github.com/basho/riak_core +pkg_riak_core_fetch = git +pkg_riak_core_repo = https://github.com/basho/riak_core +pkg_riak_core_commit = master + +PACKAGES += riak_dt +pkg_riak_dt_name = riak_dt +pkg_riak_dt_description = Convergent replicated datatypes in Erlang +pkg_riak_dt_homepage = https://github.com/basho/riak_dt +pkg_riak_dt_fetch = git +pkg_riak_dt_repo = https://github.com/basho/riak_dt +pkg_riak_dt_commit = master + +PACKAGES += riak_ensemble +pkg_riak_ensemble_name = riak_ensemble +pkg_riak_ensemble_description = Multi-Paxos framework in Erlang +pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_fetch = git +pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_commit = master + +PACKAGES += riak_kv +pkg_riak_kv_name = riak_kv +pkg_riak_kv_description = Riak Key/Value Store +pkg_riak_kv_homepage = https://github.com/basho/riak_kv +pkg_riak_kv_fetch = git +pkg_riak_kv_repo = https://github.com/basho/riak_kv +pkg_riak_kv_commit = master + +PACKAGES += riak_pg +pkg_riak_pg_name = riak_pg +pkg_riak_pg_description = Distributed process groups with riak_core. +pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_fetch = git +pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_commit = master + +PACKAGES += riak_pipe +pkg_riak_pipe_name = riak_pipe +pkg_riak_pipe_description = Riak Pipelines +pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe +pkg_riak_pipe_fetch = git +pkg_riak_pipe_repo = https://github.com/basho/riak_pipe +pkg_riak_pipe_commit = master + +PACKAGES += riak_sysmon +pkg_riak_sysmon_name = riak_sysmon +pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages +pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_fetch = git +pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_commit = master + +PACKAGES += riak_test +pkg_riak_test_name = riak_test +pkg_riak_test_description = I'm in your cluster, testing your riaks +pkg_riak_test_homepage = https://github.com/basho/riak_test +pkg_riak_test_fetch = git +pkg_riak_test_repo = https://github.com/basho/riak_test +pkg_riak_test_commit = master + +PACKAGES += riakc +pkg_riakc_name = riakc +pkg_riakc_description = Erlang clients for Riak. +pkg_riakc_homepage = https://github.com/basho/riak-erlang-client +pkg_riakc_fetch = git +pkg_riakc_repo = https://github.com/basho/riak-erlang-client +pkg_riakc_commit = master + +PACKAGES += riakhttpc +pkg_riakhttpc_name = riakhttpc +pkg_riakhttpc_description = Riak Erlang client using the HTTP interface +pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_fetch = git +pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_commit = master + +PACKAGES += riaknostic +pkg_riaknostic_name = riaknostic +pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap +pkg_riaknostic_homepage = https://github.com/basho/riaknostic +pkg_riaknostic_fetch = git +pkg_riaknostic_repo = https://github.com/basho/riaknostic +pkg_riaknostic_commit = master + +PACKAGES += riakpool +pkg_riakpool_name = riakpool +pkg_riakpool_description = erlang riak client pool +pkg_riakpool_homepage = https://github.com/dweldon/riakpool +pkg_riakpool_fetch = git +pkg_riakpool_repo = https://github.com/dweldon/riakpool +pkg_riakpool_commit = master + +PACKAGES += rivus_cep +pkg_rivus_cep_name = rivus_cep +pkg_rivus_cep_description = Complex event processing in Erlang +pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_fetch = git +pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_commit = master + +PACKAGES += rlimit +pkg_rlimit_name = rlimit +pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent +pkg_rlimit_homepage = https://github.com/jlouis/rlimit +pkg_rlimit_fetch = git +pkg_rlimit_repo = https://github.com/jlouis/rlimit +pkg_rlimit_commit = master + +PACKAGES += safetyvalve +pkg_safetyvalve_name = safetyvalve +pkg_safetyvalve_description = A safety valve for your erlang node +pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_fetch = git +pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_commit = master + +PACKAGES += seestar +pkg_seestar_name = seestar +pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol +pkg_seestar_homepage = https://github.com/iamaleksey/seestar +pkg_seestar_fetch = git +pkg_seestar_repo = https://github.com/iamaleksey/seestar +pkg_seestar_commit = master + +PACKAGES += service +pkg_service_name = service +pkg_service_description = A minimal Erlang behavior for creating CloudI internal services +pkg_service_homepage = http://cloudi.org/ +pkg_service_fetch = git +pkg_service_repo = https://github.com/CloudI/service +pkg_service_commit = master + +PACKAGES += setup +pkg_setup_name = setup +pkg_setup_description = Generic setup utility for Erlang-based systems +pkg_setup_homepage = https://github.com/uwiger/setup +pkg_setup_fetch = git +pkg_setup_repo = https://github.com/uwiger/setup +pkg_setup_commit = master + +PACKAGES += sext +pkg_sext_name = sext +pkg_sext_description = Sortable Erlang Term Serialization +pkg_sext_homepage = https://github.com/uwiger/sext +pkg_sext_fetch = git +pkg_sext_repo = https://github.com/uwiger/sext +pkg_sext_commit = master + +PACKAGES += sfmt +pkg_sfmt_name = sfmt +pkg_sfmt_description = SFMT pseudo random number generator for Erlang. +pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_fetch = git +pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_commit = master + +PACKAGES += sgte +pkg_sgte_name = sgte +pkg_sgte_description = A simple Erlang Template Engine +pkg_sgte_homepage = https://github.com/filippo/sgte +pkg_sgte_fetch = git +pkg_sgte_repo = https://github.com/filippo/sgte +pkg_sgte_commit = master + +PACKAGES += sheriff +pkg_sheriff_name = sheriff +pkg_sheriff_description = Parse transform for type based validation. +pkg_sheriff_homepage = http://ninenines.eu +pkg_sheriff_fetch = git +pkg_sheriff_repo = https://github.com/extend/sheriff +pkg_sheriff_commit = master + +PACKAGES += shotgun +pkg_shotgun_name = shotgun +pkg_shotgun_description = better than just a gun +pkg_shotgun_homepage = https://github.com/inaka/shotgun +pkg_shotgun_fetch = git +pkg_shotgun_repo = https://github.com/inaka/shotgun +pkg_shotgun_commit = 0.1.0 + +PACKAGES += sidejob +pkg_sidejob_name = sidejob +pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang +pkg_sidejob_homepage = https://github.com/basho/sidejob +pkg_sidejob_fetch = git +pkg_sidejob_repo = https://github.com/basho/sidejob +pkg_sidejob_commit = master + +PACKAGES += sieve +pkg_sieve_name = sieve +pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang +pkg_sieve_homepage = https://github.com/benoitc/sieve +pkg_sieve_fetch = git +pkg_sieve_repo = https://github.com/benoitc/sieve +pkg_sieve_commit = master + +PACKAGES += sighandler +pkg_sighandler_name = sighandler +pkg_sighandler_description = Handle UNIX signals in Er lang +pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler +pkg_sighandler_fetch = git +pkg_sighandler_repo = https://github.com/jkingsbery/sighandler +pkg_sighandler_commit = master + +PACKAGES += simhash +pkg_simhash_name = simhash +pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data. +pkg_simhash_homepage = https://github.com/ferd/simhash +pkg_simhash_fetch = git +pkg_simhash_repo = https://github.com/ferd/simhash +pkg_simhash_commit = master + +PACKAGES += simple_bridge +pkg_simple_bridge_name = simple_bridge +pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers. +pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_fetch = git +pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_commit = master + +PACKAGES += simple_oauth2 +pkg_simple_oauth2_name = simple_oauth2 +pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured) +pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_fetch = git +pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_commit = master + +PACKAGES += skel +pkg_skel_name = skel +pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang +pkg_skel_homepage = https://github.com/ParaPhrase/skel +pkg_skel_fetch = git +pkg_skel_repo = https://github.com/ParaPhrase/skel +pkg_skel_commit = master + +PACKAGES += smother +pkg_smother_name = smother +pkg_smother_description = Extended code coverage metrics for Erlang. +pkg_smother_homepage = https://ramsay-t.github.io/Smother/ +pkg_smother_fetch = git +pkg_smother_repo = https://github.com/ramsay-t/Smother +pkg_smother_commit = master + +PACKAGES += social +pkg_social_name = social +pkg_social_description = Cowboy handler for social login via OAuth2 providers +pkg_social_homepage = https://github.com/dvv/social +pkg_social_fetch = git +pkg_social_repo = https://github.com/dvv/social +pkg_social_commit = master + +PACKAGES += spapi_router +pkg_spapi_router_name = spapi_router +pkg_spapi_router_description = Partially-connected Erlang clustering +pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router +pkg_spapi_router_fetch = git +pkg_spapi_router_repo = https://github.com/spilgames/spapi-router +pkg_spapi_router_commit = master + +PACKAGES += sqerl +pkg_sqerl_name = sqerl +pkg_sqerl_description = An Erlang-flavoured SQL DSL +pkg_sqerl_homepage = https://github.com/hairyhum/sqerl +pkg_sqerl_fetch = git +pkg_sqerl_repo = https://github.com/hairyhum/sqerl +pkg_sqerl_commit = master + +PACKAGES += srly +pkg_srly_name = srly +pkg_srly_description = Native Erlang Unix serial interface +pkg_srly_homepage = https://github.com/msantos/srly +pkg_srly_fetch = git +pkg_srly_repo = https://github.com/msantos/srly +pkg_srly_commit = master + +PACKAGES += sshrpc +pkg_sshrpc_name = sshrpc +pkg_sshrpc_description = Erlang SSH RPC module (experimental) +pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_fetch = git +pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_commit = master + +PACKAGES += stable +pkg_stable_name = stable +pkg_stable_description = Library of assorted helpers for Cowboy web server. +pkg_stable_homepage = https://github.com/dvv/stable +pkg_stable_fetch = git +pkg_stable_repo = https://github.com/dvv/stable +pkg_stable_commit = master + +PACKAGES += statebox +pkg_statebox_name = statebox +pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak. +pkg_statebox_homepage = https://github.com/mochi/statebox +pkg_statebox_fetch = git +pkg_statebox_repo = https://github.com/mochi/statebox +pkg_statebox_commit = master + +PACKAGES += statebox_riak +pkg_statebox_riak_name = statebox_riak +pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media. +pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak +pkg_statebox_riak_fetch = git +pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak +pkg_statebox_riak_commit = master + +PACKAGES += statman +pkg_statman_name = statman +pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM +pkg_statman_homepage = https://github.com/knutin/statman +pkg_statman_fetch = git +pkg_statman_repo = https://github.com/knutin/statman +pkg_statman_commit = master + +PACKAGES += statsderl +pkg_statsderl_name = statsderl +pkg_statsderl_description = StatsD client (erlang) +pkg_statsderl_homepage = https://github.com/lpgauth/statsderl +pkg_statsderl_fetch = git +pkg_statsderl_repo = https://github.com/lpgauth/statsderl +pkg_statsderl_commit = master + +PACKAGES += stdinout_pool +pkg_stdinout_pool_name = stdinout_pool +pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication. +pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_fetch = git +pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_commit = master + +PACKAGES += stockdb +pkg_stockdb_name = stockdb +pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang +pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb +pkg_stockdb_fetch = git +pkg_stockdb_repo = https://github.com/maxlapshin/stockdb +pkg_stockdb_commit = master + +PACKAGES += stripe +pkg_stripe_name = stripe +pkg_stripe_description = Erlang interface to the stripe.com API +pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang +pkg_stripe_fetch = git +pkg_stripe_repo = https://github.com/mattsta/stripe-erlang +pkg_stripe_commit = v1 + +PACKAGES += surrogate +pkg_surrogate_name = surrogate +pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes. +pkg_surrogate_homepage = https://github.com/skruger/Surrogate +pkg_surrogate_fetch = git +pkg_surrogate_repo = https://github.com/skruger/Surrogate +pkg_surrogate_commit = master + +PACKAGES += swab +pkg_swab_name = swab +pkg_swab_description = General purpose buffer handling module +pkg_swab_homepage = https://github.com/crownedgrouse/swab +pkg_swab_fetch = git +pkg_swab_repo = https://github.com/crownedgrouse/swab +pkg_swab_commit = master + +PACKAGES += swarm +pkg_swarm_name = swarm +pkg_swarm_description = Fast and simple acceptor pool for Erlang +pkg_swarm_homepage = https://github.com/jeremey/swarm +pkg_swarm_fetch = git +pkg_swarm_repo = https://github.com/jeremey/swarm +pkg_swarm_commit = master + +PACKAGES += switchboard +pkg_switchboard_name = switchboard +pkg_switchboard_description = A framework for processing email using worker plugins. +pkg_switchboard_homepage = https://github.com/thusfresh/switchboard +pkg_switchboard_fetch = git +pkg_switchboard_repo = https://github.com/thusfresh/switchboard +pkg_switchboard_commit = master + +PACKAGES += syn +pkg_syn_name = syn +pkg_syn_description = A global process registry for Erlang. +pkg_syn_homepage = https://github.com/ostinelli/syn +pkg_syn_fetch = git +pkg_syn_repo = https://github.com/ostinelli/syn +pkg_syn_commit = master + +PACKAGES += sync +pkg_sync_name = sync +pkg_sync_description = On-the-fly recompiling and reloading in Erlang. +pkg_sync_homepage = https://github.com/rustyio/sync +pkg_sync_fetch = git +pkg_sync_repo = https://github.com/rustyio/sync +pkg_sync_commit = master + +PACKAGES += syntaxerl +pkg_syntaxerl_name = syntaxerl +pkg_syntaxerl_description = Syntax checker for Erlang +pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_fetch = git +pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_commit = master + +PACKAGES += syslog +pkg_syslog_name = syslog +pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3) +pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog +pkg_syslog_fetch = git +pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog +pkg_syslog_commit = master + +PACKAGES += taskforce +pkg_taskforce_name = taskforce +pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks. +pkg_taskforce_homepage = https://github.com/g-andrade/taskforce +pkg_taskforce_fetch = git +pkg_taskforce_repo = https://github.com/g-andrade/taskforce +pkg_taskforce_commit = master + +PACKAGES += tddreloader +pkg_tddreloader_name = tddreloader +pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes +pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader +pkg_tddreloader_fetch = git +pkg_tddreloader_repo = https://github.com/version2beta/tddreloader +pkg_tddreloader_commit = master + +PACKAGES += tempo +pkg_tempo_name = tempo +pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang. +pkg_tempo_homepage = https://github.com/selectel/tempo +pkg_tempo_fetch = git +pkg_tempo_repo = https://github.com/selectel/tempo +pkg_tempo_commit = master + +PACKAGES += ticktick +pkg_ticktick_name = ticktick +pkg_ticktick_description = Ticktick is an id generator for message service. +pkg_ticktick_homepage = https://github.com/ericliang/ticktick +pkg_ticktick_fetch = git +pkg_ticktick_repo = https://github.com/ericliang/ticktick +pkg_ticktick_commit = master + +PACKAGES += tinymq +pkg_tinymq_name = tinymq +pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue +pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_fetch = git +pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_commit = master + +PACKAGES += tinymt +pkg_tinymt_name = tinymt +pkg_tinymt_description = TinyMT pseudo random number generator for Erlang. +pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_fetch = git +pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_commit = master + +PACKAGES += tirerl +pkg_tirerl_name = tirerl +pkg_tirerl_description = Erlang interface to Elastic Search +pkg_tirerl_homepage = https://github.com/inaka/tirerl +pkg_tirerl_fetch = git +pkg_tirerl_repo = https://github.com/inaka/tirerl +pkg_tirerl_commit = master + +PACKAGES += traffic_tools +pkg_traffic_tools_name = traffic_tools +pkg_traffic_tools_description = Simple traffic limiting library +pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools +pkg_traffic_tools_fetch = git +pkg_traffic_tools_repo = https://github.com/systra/traffic_tools +pkg_traffic_tools_commit = master + +PACKAGES += trails +pkg_trails_name = trails +pkg_trails_description = A couple of improvements over Cowboy Routes +pkg_trails_homepage = http://inaka.github.io/cowboy-trails/ +pkg_trails_fetch = git +pkg_trails_repo = https://github.com/inaka/cowboy-trails +pkg_trails_commit = master + +PACKAGES += trane +pkg_trane_name = trane +pkg_trane_description = SAX style broken HTML parser in Erlang +pkg_trane_homepage = https://github.com/massemanet/trane +pkg_trane_fetch = git +pkg_trane_repo = https://github.com/massemanet/trane +pkg_trane_commit = master + +PACKAGES += transit +pkg_transit_name = transit +pkg_transit_description = transit format for erlang +pkg_transit_homepage = https://github.com/isaiah/transit-erlang +pkg_transit_fetch = git +pkg_transit_repo = https://github.com/isaiah/transit-erlang +pkg_transit_commit = master + +PACKAGES += trie +pkg_trie_name = trie +pkg_trie_description = Erlang Trie Implementation +pkg_trie_homepage = https://github.com/okeuday/trie +pkg_trie_fetch = git +pkg_trie_repo = https://github.com/okeuday/trie +pkg_trie_commit = master + +PACKAGES += triq +pkg_triq_name = triq +pkg_triq_description = Trifork QuickCheck +pkg_triq_homepage = https://github.com/krestenkrab/triq +pkg_triq_fetch = git +pkg_triq_repo = https://github.com/krestenkrab/triq +pkg_triq_commit = master + +PACKAGES += tunctl +pkg_tunctl_name = tunctl +pkg_tunctl_description = Erlang TUN/TAP interface +pkg_tunctl_homepage = https://github.com/msantos/tunctl +pkg_tunctl_fetch = git +pkg_tunctl_repo = https://github.com/msantos/tunctl +pkg_tunctl_commit = master + +PACKAGES += twerl +pkg_twerl_name = twerl +pkg_twerl_description = Erlang client for the Twitter Streaming API +pkg_twerl_homepage = https://github.com/lucaspiller/twerl +pkg_twerl_fetch = git +pkg_twerl_repo = https://github.com/lucaspiller/twerl +pkg_twerl_commit = oauth + +PACKAGES += twitter_erlang +pkg_twitter_erlang_name = twitter_erlang +pkg_twitter_erlang_description = An Erlang twitter client +pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_fetch = git +pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_commit = master + +PACKAGES += ucol_nif +pkg_ucol_nif_name = ucol_nif +pkg_ucol_nif_description = ICU based collation Erlang module +pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif +pkg_ucol_nif_fetch = git +pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif +pkg_ucol_nif_commit = master + +PACKAGES += unicorn +pkg_unicorn_name = unicorn +pkg_unicorn_description = Generic configuration server +pkg_unicorn_homepage = https://github.com/shizzard/unicorn +pkg_unicorn_fetch = git +pkg_unicorn_repo = https://github.com/shizzard/unicorn +pkg_unicorn_commit = 0.3.0 + +PACKAGES += unsplit +pkg_unsplit_name = unsplit +pkg_unsplit_description = Resolves conflicts in Mnesia after network splits +pkg_unsplit_homepage = https://github.com/uwiger/unsplit +pkg_unsplit_fetch = git +pkg_unsplit_repo = https://github.com/uwiger/unsplit +pkg_unsplit_commit = master + +PACKAGES += uuid +pkg_uuid_name = uuid +pkg_uuid_description = Erlang UUID Implementation +pkg_uuid_homepage = https://github.com/okeuday/uuid +pkg_uuid_fetch = git +pkg_uuid_repo = https://github.com/okeuday/uuid +pkg_uuid_commit = v1.4.0 + +PACKAGES += ux +pkg_ux_name = ux +pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation) +pkg_ux_homepage = https://github.com/erlang-unicode/ux +pkg_ux_fetch = git +pkg_ux_repo = https://github.com/erlang-unicode/ux +pkg_ux_commit = master + +PACKAGES += vert +pkg_vert_name = vert +pkg_vert_description = Erlang binding to libvirt virtualization API +pkg_vert_homepage = https://github.com/msantos/erlang-libvirt +pkg_vert_fetch = git +pkg_vert_repo = https://github.com/msantos/erlang-libvirt +pkg_vert_commit = master + +PACKAGES += verx +pkg_verx_name = verx +pkg_verx_description = Erlang implementation of the libvirtd remote protocol +pkg_verx_homepage = https://github.com/msantos/verx +pkg_verx_fetch = git +pkg_verx_repo = https://github.com/msantos/verx +pkg_verx_commit = master + +PACKAGES += vmq_acl +pkg_vmq_acl_name = vmq_acl +pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_acl_homepage = https://verne.mq/ +pkg_vmq_acl_fetch = git +pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl +pkg_vmq_acl_commit = master + +PACKAGES += vmq_bridge +pkg_vmq_bridge_name = vmq_bridge +pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_bridge_homepage = https://verne.mq/ +pkg_vmq_bridge_fetch = git +pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge +pkg_vmq_bridge_commit = master + +PACKAGES += vmq_graphite +pkg_vmq_graphite_name = vmq_graphite +pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_graphite_homepage = https://verne.mq/ +pkg_vmq_graphite_fetch = git +pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite +pkg_vmq_graphite_commit = master + +PACKAGES += vmq_passwd +pkg_vmq_passwd_name = vmq_passwd +pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_passwd_homepage = https://verne.mq/ +pkg_vmq_passwd_fetch = git +pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd +pkg_vmq_passwd_commit = master + +PACKAGES += vmq_server +pkg_vmq_server_name = vmq_server +pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_server_homepage = https://verne.mq/ +pkg_vmq_server_fetch = git +pkg_vmq_server_repo = https://github.com/erlio/vmq_server +pkg_vmq_server_commit = master + +PACKAGES += vmq_snmp +pkg_vmq_snmp_name = vmq_snmp +pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_snmp_homepage = https://verne.mq/ +pkg_vmq_snmp_fetch = git +pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp +pkg_vmq_snmp_commit = master + +PACKAGES += vmq_systree +pkg_vmq_systree_name = vmq_systree +pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_systree_homepage = https://verne.mq/ +pkg_vmq_systree_fetch = git +pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree +pkg_vmq_systree_commit = master + +PACKAGES += vmstats +pkg_vmstats_name = vmstats +pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs. +pkg_vmstats_homepage = https://github.com/ferd/vmstats +pkg_vmstats_fetch = git +pkg_vmstats_repo = https://github.com/ferd/vmstats +pkg_vmstats_commit = master + +PACKAGES += walrus +pkg_walrus_name = walrus +pkg_walrus_description = Walrus - Mustache-like Templating +pkg_walrus_homepage = https://github.com/devinus/walrus +pkg_walrus_fetch = git +pkg_walrus_repo = https://github.com/devinus/walrus +pkg_walrus_commit = master + +PACKAGES += webmachine +pkg_webmachine_name = webmachine +pkg_webmachine_description = A REST-based system for building web applications. +pkg_webmachine_homepage = https://github.com/basho/webmachine +pkg_webmachine_fetch = git +pkg_webmachine_repo = https://github.com/basho/webmachine +pkg_webmachine_commit = master + +PACKAGES += websocket_client +pkg_websocket_client_name = websocket_client +pkg_websocket_client_description = Erlang websocket client (ws and wss supported) +pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client +pkg_websocket_client_fetch = git +pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client +pkg_websocket_client_commit = master + +PACKAGES += worker_pool +pkg_worker_pool_name = worker_pool +pkg_worker_pool_description = a simple erlang worker pool +pkg_worker_pool_homepage = https://github.com/inaka/worker_pool +pkg_worker_pool_fetch = git +pkg_worker_pool_repo = https://github.com/inaka/worker_pool +pkg_worker_pool_commit = 1.0.3 + +PACKAGES += wrangler +pkg_wrangler_name = wrangler +pkg_wrangler_description = Import of the Wrangler svn repository. +pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html +pkg_wrangler_fetch = git +pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler +pkg_wrangler_commit = master + +PACKAGES += wsock +pkg_wsock_name = wsock +pkg_wsock_description = Erlang library to build WebSocket clients and servers +pkg_wsock_homepage = https://github.com/madtrick/wsock +pkg_wsock_fetch = git +pkg_wsock_repo = https://github.com/madtrick/wsock +pkg_wsock_commit = master + +PACKAGES += xhttpc +pkg_xhttpc_name = xhttpc +pkg_xhttpc_description = Extensible HTTP Client for Erlang +pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc +pkg_xhttpc_fetch = git +pkg_xhttpc_repo = https://github.com/seriyps/xhttpc +pkg_xhttpc_commit = master + +PACKAGES += xref_runner +pkg_xref_runner_name = xref_runner +pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref) +pkg_xref_runner_homepage = https://github.com/inaka/xref_runner +pkg_xref_runner_fetch = git +pkg_xref_runner_repo = https://github.com/inaka/xref_runner +pkg_xref_runner_commit = 0.2.0 + +PACKAGES += yamerl +pkg_yamerl_name = yamerl +pkg_yamerl_description = YAML 1.2 parser in pure Erlang +pkg_yamerl_homepage = https://github.com/yakaz/yamerl +pkg_yamerl_fetch = git +pkg_yamerl_repo = https://github.com/yakaz/yamerl +pkg_yamerl_commit = master + +PACKAGES += yamler +pkg_yamler_name = yamler +pkg_yamler_description = libyaml-based yaml loader for Erlang +pkg_yamler_homepage = https://github.com/goertzenator/yamler +pkg_yamler_fetch = git +pkg_yamler_repo = https://github.com/goertzenator/yamler +pkg_yamler_commit = master + +PACKAGES += yaws +pkg_yaws_name = yaws +pkg_yaws_description = Yaws webserver +pkg_yaws_homepage = http://yaws.hyber.org +pkg_yaws_fetch = git +pkg_yaws_repo = https://github.com/klacke/yaws +pkg_yaws_commit = master + +PACKAGES += zab_engine +pkg_zab_engine_name = zab_engine +pkg_zab_engine_description = zab propotocol implement by erlang +pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_fetch = git +pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_commit = master + +PACKAGES += zeta +pkg_zeta_name = zeta +pkg_zeta_description = HTTP access log parser in Erlang +pkg_zeta_homepage = https://github.com/s1n4/zeta +pkg_zeta_fetch = git +pkg_zeta_repo = https://github.com/s1n4/zeta +pkg_zeta_commit = + +PACKAGES += zippers +pkg_zippers_name = zippers +pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers +pkg_zippers_homepage = https://github.com/ferd/zippers +pkg_zippers_fetch = git +pkg_zippers_repo = https://github.com/ferd/zippers +pkg_zippers_commit = master + +PACKAGES += zlists +pkg_zlists_name = zlists +pkg_zlists_description = Erlang lazy lists library. +pkg_zlists_homepage = https://github.com/vjache/erlang-zlists +pkg_zlists_fetch = git +pkg_zlists_repo = https://github.com/vjache/erlang-zlists +pkg_zlists_commit = master + +PACKAGES += zraft_lib +pkg_zraft_lib_name = zraft_lib +pkg_zraft_lib_description = Erlang raft consensus protocol implementation +pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_fetch = git +pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_commit = master + +PACKAGES += zucchini +pkg_zucchini_name = zucchini +pkg_zucchini_description = An Erlang INI parser +pkg_zucchini_homepage = https://github.com/devinus/zucchini +pkg_zucchini_fetch = git +pkg_zucchini_repo = https://github.com/devinus/zucchini +pkg_zucchini_commit = master + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: search + +define pkg_print + $(verbose) printf "%s\n" \ + $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \ + "App name: $(pkg_$(1)_name)" \ + "Description: $(pkg_$(1)_description)" \ + "Home page: $(pkg_$(1)_homepage)" \ + "Fetch with: $(pkg_$(1)_fetch)" \ + "Repository: $(pkg_$(1)_repo)" \ + "Commit: $(pkg_$(1)_commit)" \ + "" + +endef + +search: +ifdef q + $(foreach p,$(PACKAGES), \ + $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \ + $(call pkg_print,$(p)))) +else + $(foreach p,$(PACKAGES),$(call pkg_print,$(p))) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-deps + +# Configuration. + +ifdef OTP_DEPS +$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.) +endif + +IGNORE_DEPS ?= +export IGNORE_DEPS + +APPS_DIR ?= $(CURDIR)/apps +export APPS_DIR + +DEPS_DIR ?= $(CURDIR)/deps +export DEPS_DIR + +REBAR_DEPS_DIR = $(DEPS_DIR) +export REBAR_DEPS_DIR + +dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) +dep_repo = $(patsubst git://github.com/%,https://github.com/%, \ + $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))) +dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) + +ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) +ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) + +ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),) +ifeq ($(ERL_LIBS),) + ERL_LIBS = $(APPS_DIR):$(DEPS_DIR) +else + ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR) +endif +endif +export ERL_LIBS + +export NO_AUTOPATCH + +# Verbosity. + +dep_verbose_0 = @echo " DEP " $(1); +dep_verbose_2 = set -x; +dep_verbose = $(dep_verbose_$(V)) + +# Core targets. + +ifneq ($(SKIP_DEPS),) +deps:: +else +deps:: $(ALL_DEPS_DIRS) +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \ + if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \ + :; \ + else \ + echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ + if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ + $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \ + else \ + echo "Error: No Makefile to build dependency $$dep."; \ + exit 2; \ + fi \ + fi \ + done +endif + +# Deps related targets. + +# @todo rename GNUmakefile and makefile into Makefile first, if they exist +# While Makefile file could be GNUmakefile or makefile, +# in practice only Makefile is needed so far. +define dep_autopatch + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \ + $(call dep_autopatch2,$(1)); \ + else \ + if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + $(call dep_autopatch_erlang_mk,$(1)); \ + else \ + $(call erlang,$(call dep_autopatch_app.erl,$(1))); \ + fi \ + fi \ + else \ + if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ + $(call dep_autopatch_noop,$(1)); \ + else \ + $(call dep_autopatch2,$(1)); \ + fi \ + fi +endef + +define dep_autopatch2 + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + if [ -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \ + $(call dep_autopatch_fetch_rebar); \ + $(call dep_autopatch_rebar,$(1)); \ + else \ + $(call dep_autopatch_gen,$(1)); \ + fi +endef + +define dep_autopatch_noop + printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile +endef + +# Overwrite erlang.mk with the current file by default. +ifeq ($(NO_AUTOPATCH_ERLANG_MK),) +define dep_autopatch_erlang_mk + echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \ + > $(DEPS_DIR)/$1/erlang.mk +endef +else +define dep_autopatch_erlang_mk + : +endef +endif + +define dep_autopatch_gen + printf "%s\n" \ + "ERLC_OPTS = +debug_info" \ + "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile +endef + +define dep_autopatch_fetch_rebar + mkdir -p $(ERLANG_MK_TMP); \ + if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ + git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \ + cd $(ERLANG_MK_TMP)/rebar; \ + git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \ + $(MAKE); \ + cd -; \ + fi +endef + +define dep_autopatch_rebar + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \ + fi; \ + $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \ + rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app +endef + +define dep_autopatch_rebar.erl + application:load(rebar), + application:set_env(rebar, log_level, debug), + Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of + {ok, Conf0} -> Conf0; + _ -> [] + end, + {Conf, OsEnv} = fun() -> + case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of + false -> {Conf1, []}; + true -> + Bindings0 = erl_eval:new_bindings(), + Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0), + Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1), + Before = os:getenv(), + {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings), + {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)} + end + end(), + Write = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append]) + end, + Escape = fun (Text) -> + re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}]) + end, + Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package " + "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"), + Write("C_SRC_DIR = /path/do/not/exist\n"), + Write("C_SRC_TYPE = rebar\n"), + Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"), + Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]), + fun() -> + Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), + case lists:keyfind(erl_opts, 1, Conf) of + false -> ok; + {_, ErlOpts} -> + lists:foreach(fun + ({d, D}) -> + Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + ({i, I}) -> + Write(["ERLC_OPTS += -I ", I, "\n"]); + ({platform_define, Regex, D}) -> + case rebar_utils:is_arch(Regex) of + true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + false -> ok + end; + ({parse_transform, PT}) -> + Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n"); + (_) -> ok + end, ErlOpts) + end, + Write("\n") + end(), + fun() -> + File = case lists:keyfind(deps, 1, Conf) of + false -> []; + {_, Deps} -> + [begin case case Dep of + {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}}; + {N, S} when is_tuple(S) -> {N, S}; + {N, _, S} -> {N, S}; + {N, _, S, _} -> {N, S}; + _ -> false + end of + false -> ok; + {Name, Source} -> + {Method, Repo, Commit} = case Source of + {hex, V} -> {hex, V, undefined}; + {git, R} -> {git, R, master}; + {M, R, {branch, C}} -> {M, R, C}; + {M, R, {ref, C}} -> {M, R, C}; + {M, R, {tag, C}} -> {M, R, C}; + {M, R, C} -> {M, R, C} + end, + Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit])) + end end || Dep <- Deps] + end + end(), + fun() -> + case lists:keyfind(erl_first_files, 1, Conf) of + false -> ok; + {_, Files} -> + Names = [[" ", case lists:reverse(F) of + "lre." ++ Elif -> lists:reverse(Elif); + Elif -> lists:reverse(Elif) + end] || "src/" ++ F <- Files], + Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) + end + end(), + FindFirst = fun(F, Fd) -> + case io:parse_erl_form(Fd, undefined) of + {ok, {attribute, _, compile, {parse_transform, PT}}, _} -> + [PT, F(F, Fd)]; + {ok, {attribute, _, compile, CompileOpts}, _} when is_list(CompileOpts) -> + case proplists:get_value(parse_transform, CompileOpts) of + undefined -> [F(F, Fd)]; + PT -> [PT, F(F, Fd)] + end; + {ok, {attribute, _, include, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end + end; + {ok, {attribute, _, include_lib, "$(1)/include/" ++ Hrl}, _} -> + {ok, HrlFd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]), + [F(F, HrlFd), F(F, Fd)]; + {ok, {attribute, _, include_lib, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {ok, {attribute, _, import, {Imp, _}}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(Imp) ++ ".erl", [read]) of + {ok, ImpFd} -> [Imp, F(F, ImpFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {eof, _} -> + file:close(Fd), + []; + _ -> + F(F, Fd) + end + end, + fun() -> + ErlFiles = filelib:wildcard("$(call core_native_path,$(DEPS_DIR)/$1/src/)*.erl"), + First0 = lists:usort(lists:flatten([begin + {ok, Fd} = file:open(F, [read]), + FindFirst(FindFirst, Fd) + end || F <- ErlFiles])), + First = lists:flatten([begin + {ok, Fd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", [read]), + FindFirst(FindFirst, Fd) + end || M <- First0, lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)]) ++ First0, + Write(["COMPILE_FIRST +=", [[" ", atom_to_list(M)] || M <- First, + lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)], "\n"]) + end(), + Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"), + Write("\npreprocess::\n"), + Write("\npre-deps::\n"), + Write("\npre-app::\n"), + PatchHook = fun(Cmd) -> + case Cmd of + "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + _ -> Escape(Cmd) + end + end, + fun() -> + case lists:keyfind(pre_hooks, 1, Conf) of + false -> ok; + {_, Hooks} -> + [case H of + {'get-deps', Cmd} -> + Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n"); + {compile, Cmd} -> + Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + {Regex, compile, Cmd} -> + case rebar_utils:is_arch(Regex) of + true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + false -> ok + end; + _ -> ok + end || H <- Hooks] + end + end(), + ShellToMk = fun(V) -> + re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]), + "-Werror\\\\b", "", [{return, list}, global]) + end, + PortSpecs = fun() -> + case lists:keyfind(port_specs, 1, Conf) of + false -> + case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of + false -> []; + true -> + [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"), + proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}] + end; + {_, Specs} -> + lists:flatten([case S of + {Output, Input} -> {ShellToMk(Output), Input, []}; + {Regex, Output, Input} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, []}; + false -> [] + end; + {Regex, Output, Input, [{env, Env}]} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, Env}; + false -> [] + end + end || S <- Specs]) + end + end(), + PortSpecWrite = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append]) + end, + case PortSpecs of + [] -> ok; + _ -> + Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"), + PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I ~s/erts-~s/include -I ~s\n", + [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])), + PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L ~s -lerl_interface -lei\n", + [code:lib_dir(erl_interface, lib)])), + [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv], + FilterEnv = fun(Env) -> + lists:flatten([case E of + {_, _} -> E; + {Regex, K, V} -> + case rebar_utils:is_arch(Regex) of + true -> {K, V}; + false -> [] + end + end || E <- Env]) + end, + MergeEnv = fun(Env) -> + lists:foldl(fun ({K, V}, Acc) -> + case lists:keyfind(K, 1, Acc) of + false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc]; + {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc] + end + end, [], Env) + end, + PortEnv = case lists:keyfind(port_env, 1, Conf) of + false -> []; + {_, PortEnv0} -> FilterEnv(PortEnv0) + end, + PortSpec = fun ({Output, Input0, Env}) -> + filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output), + Input = [[" ", I] || I <- Input0], + PortSpecWrite([ + [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))], + case $(PLATFORM) of + darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress"; + _ -> "" + end, + "\n\nall:: ", Output, "\n\n", + "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))], + Output, ": $$\(foreach ext,.c .C .cc .cpp,", + "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n", + "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)", + case filename:extension(Output) of + [] -> "\n"; + _ -> " -shared\n" + end]) + end, + [PortSpec(S) || S <- PortSpecs] + end, + Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"), + RunPlugin = fun(Plugin, Step) -> + case erlang:function_exported(Plugin, Step, 2) of + false -> ok; + true -> + c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"), + Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(), + dict:store(base_dir, "", dict:new())}, undefined), + io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret]) + end + end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins} -> + [begin + case lists:keyfind(deps, 1, Conf) of + false -> ok; + {_, Deps} -> + case lists:keyfind(P, 1, Deps) of + false -> ok; + _ -> + Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P), + io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]), + io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]), + code:add_patha(Path ++ "/ebin") + end + end + end || P <- Plugins], + [case code:load_file(P) of + {module, P} -> ok; + _ -> + case lists:keyfind(plugin_dir, 1, Conf) of + false -> ok; + {_, PluginsDir} -> + ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl", + {ok, P, Bin} = compile:file(ErlFile, [binary]), + {module, P} = code:load_binary(P, ErlFile, Bin) + end + end || P <- Plugins], + [RunPlugin(P, preprocess) || P <- Plugins], + [RunPlugin(P, pre_compile) || P <- Plugins], + [RunPlugin(P, compile) || P <- Plugins] + end + end(), + halt() +endef + +define dep_autopatch_app.erl + UpdateModules = fun(App) -> + case filelib:is_regular(App) of + false -> ok; + true -> + {ok, [{application, '$(1)', L0}]} = file:consult(App), + Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true, + fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []), + L = lists:keystore(modules, 1, L0, {modules, Mods}), + ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}])) + end + end, + UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"), + halt() +endef + +define dep_autopatch_appsrc.erl + AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", + AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end, + case filelib:is_regular(AppSrcIn) of + false -> ok; + true -> + {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), + L1 = lists:keystore(modules, 1, L0, {modules, []}), + L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end, + L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end, + ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])), + case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end + end, + halt() +endef + +define dep_fetch_git + git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1)); +endef + +define dep_fetch_git-submodule + git submodule update --init -- $(DEPS_DIR)/$1; +endef + +define dep_fetch_hg + hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1)); +endef + +define dep_fetch_svn + svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_cp + cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_hex.erl + ssl:start(), + inets:start(), + {ok, {{_, 200, _}, _, Body}} = httpc:request(get, + {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []}, + [], [{body_format, binary}]), + {ok, Files} = erl_tar:extract({binary, Body}, [memory]), + {_, Source} = lists:keyfind("contents.tar.gz", 1, Files), + ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]), + halt() +endef + +# Hex only has a package version. No need to look in the Erlang.mk packages. +define dep_fetch_hex + $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1)))))); +endef + +define dep_fetch_fail + echo "Error: Unknown or invalid dependency: $(1)." >&2; \ + exit 78; +endef + +# Kept for compatibility purposes with older Erlang.mk configuration. +define dep_fetch_legacy + $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \ + git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \ + cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master); +endef + +define dep_fetch + $(if $(dep_$(1)), \ + $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \ + $(word 1,$(dep_$(1))), \ + $(if $(IS_DEP),legacy,fail)), \ + $(if $(filter $(1),$(PACKAGES)), \ + $(pkg_$(1)_fetch), \ + fail)) +endef + +define dep_target +$(DEPS_DIR)/$(call dep_name,$1): + $(eval DEP_NAME := $(call dep_name,$1)) + $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) + $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ + echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \ + exit 17; \ + fi + $(verbose) mkdir -p $(DEPS_DIR) + $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$1)),$1) + $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure.ac -o -f $(DEPS_DIR)/$(DEP_NAME)/configure.in ]; then \ + echo " AUTO " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && autoreconf -Wall -vif -I m4; \ + fi + - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \ + echo " CONF " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \ + fi +ifeq ($(filter $(1),$(NO_AUTOPATCH)),) + $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi; \ + if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \ + echo " PATCH Downloading rabbitmq-server"; \ + git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \ + fi; \ + ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \ + elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi \ + else \ + $$(call dep_autopatch,$(DEP_NAME)) \ + fi +endif +endef + +$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep)))) + +ifndef IS_APP +clean:: clean-apps + +clean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \ + done + +distclean:: distclean-apps + +distclean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \ + done +endif + +ifndef SKIP_DEPS +distclean:: distclean-deps + +distclean-deps: + $(gen_verbose) rm -rf $(DEPS_DIR) +endif + +# Forward-declare variables used in core/deps-tools.mk. This is required +# in case plugins use them. + +ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/list-deps.log +ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/list-doc-deps.log +ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/list-rel-deps.log +ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/list-test-deps.log +ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/list-shell-deps.log + +# External plugins. + +DEP_PLUGINS ?= + +define core_dep_plugin +-include $(DEPS_DIR)/$(1) + +$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; +endef + +$(foreach p,$(DEP_PLUGINS),\ + $(eval $(if $(findstring /,$p),\ + $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ + $(call core_dep_plugin,$p/plugins.mk,$p)))) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Configuration. + +DTL_FULL_PATH ?= +DTL_PATH ?= templates/ +DTL_SUFFIX ?= _dtl + +# Verbosity. + +dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); +dtl_verbose = $(dtl_verbose_$(V)) + +# Core targets. + +define erlydtl_compile.erl + [begin + Module0 = case "$(strip $(DTL_FULL_PATH))" of + "" -> + filename:basename(F, ".dtl"); + _ -> + "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"), + re:replace(F2, "/", "_", [{return, list}, global]) + end, + Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), + case erlydtl:compile(F, Module, [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of + ok -> ok; + {ok, _} -> ok + end + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) + +DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl)) + +ifdef DTL_FULL_PATH +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%)))) +else +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES)))) +endif + +ifneq ($(words $(DTL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(DTL_FILES); \ + fi + @touch $@ + +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl +endif + +ebin/$(PROJECT).app:: $(DTL_FILES) + $(if $(strip $?),\ + $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?,-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Verbosity. + +proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); +proto_verbose = $(proto_verbose_$(V)) + +# Core targets. + +define compile_proto + $(verbose) mkdir -p ebin/ include/ + $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1))) + $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl + $(verbose) rm ebin/*.erl +endef + +define compile_proto.erl + [begin + Dir = filename:dirname(filename:dirname(F)), + protobuffs_compile:generate_source(F, + [{output_include_dir, Dir ++ "/include"}, + {output_src_dir, Dir ++ "/ebin"}]) + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) +ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto)) + $(if $(strip $?),$(call compile_proto,$?)) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-app + +# Configuration. + +ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \ + +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec +COMPILE_FIRST ?= +COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST))) +ERLC_EXCLUDE ?= +ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE))) + +ERLC_MIB_OPTS ?= +COMPILE_MIB_FIRST ?= +COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST))) + +# Verbosity. + +app_verbose_0 = @echo " APP " $(PROJECT); +app_verbose_2 = set -x; +app_verbose = $(app_verbose_$(V)) + +appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src; +appsrc_verbose_2 = set -x; +appsrc_verbose = $(appsrc_verbose_$(V)) + +makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d; +makedep_verbose_2 = set -x; +makedep_verbose = $(makedep_verbose_$(V)) + +erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\ + $(filter %.erl %.core,$(?F))); +erlc_verbose_2 = set -x; +erlc_verbose = $(erlc_verbose_$(V)) + +xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F)); +xyrl_verbose_2 = set -x; +xyrl_verbose = $(xyrl_verbose_$(V)) + +asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F)); +asn1_verbose_2 = set -x; +asn1_verbose = $(asn1_verbose_$(V)) + +mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F)); +mib_verbose_2 = set -x; +mib_verbose = $(mib_verbose_$(V)) + +ifneq ($(wildcard src/),) + +# Targets. + +ifeq ($(wildcard ebin/test),) +app:: deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +else +app:: clean deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +endif + +ifeq ($(wildcard src/$(PROJECT)_app.erl),) +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, []}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]} +]}. +endef +else +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}, + {mod, {$(PROJECT)_app, []}} +]}. +endef +endif + +app-build: ebin/$(PROJECT).app + $(verbose) : + +# Source files. + +ERL_FILES = $(sort $(call core_find,src/,*.erl)) +CORE_FILES = $(sort $(call core_find,src/,*.core)) + +# ASN.1 files. + +ifneq ($(wildcard asn1/),) +ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1)) +ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +define compile_asn1 + $(verbose) mkdir -p include/ + $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1) + $(verbose) mv asn1/*.erl src/ + $(verbose) mv asn1/*.hrl include/ + $(verbose) mv asn1/*.asn1db include/ +endef + +$(PROJECT).d:: $(ASN1_FILES) + $(if $(strip $?),$(call compile_asn1,$?)) +endif + +# SNMP MIB files. + +ifneq ($(wildcard mibs/),) +MIB_FILES = $(sort $(call core_find,mibs/,*.mib)) + +$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES) + $(verbose) mkdir -p include/ priv/mibs/ + $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $? + $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?))) +endif + +# Leex and Yecc files. + +XRL_FILES = $(sort $(call core_find,src/,*.xrl)) +XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES)))) +ERL_FILES += $(XRL_ERL_FILES) + +YRL_FILES = $(sort $(call core_find,src/,*.yrl)) +YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES)))) +ERL_FILES += $(YRL_ERL_FILES) + +$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES) + $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?) + +# Erlang and Core Erlang files. + +define makedep.erl + ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")), + Modules = [{filename:basename(F, ".erl"), F} || F <- ErlFiles], + Add = fun (Dep, Acc) -> + case lists:keyfind(atom_to_list(Dep), 1, Modules) of + {_, DepFile} -> [DepFile|Acc]; + false -> Acc + end + end, + AddHd = fun (Dep, Acc) -> + case {Dep, lists:keymember(Dep, 2, Modules)} of + {"src/" ++ _, false} -> [Dep|Acc]; + {"include/" ++ _, false} -> [Dep|Acc]; + _ -> Acc + end + end, + CompileFirst = fun (Deps) -> + First0 = [case filename:extension(D) of + ".erl" -> filename:basename(D, ".erl"); + _ -> [] + end || D <- Deps], + case lists:usort(First0) of + [] -> []; + [[]] -> []; + First -> ["COMPILE_FIRST +=", [[" ", F] || F <- First], "\n"] + end + end, + Depend = [begin + case epp:parse_file(F, ["include/"], []) of + {ok, Forms} -> + Deps = lists:usort(lists:foldl(fun + ({attribute, _, behavior, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, behaviour, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, compile, {parse_transform, Dep}}, Acc) -> Add(Dep, Acc); + ({attribute, _, file, {Dep, _}}, Acc) -> AddHd(Dep, Acc); + (_, Acc) -> Acc + end, [], Forms)), + case Deps of + [] -> ""; + _ -> [F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n", CompileFirst(Deps)] + end; + {error, enoent} -> + [] + end + end || F <- ErlFiles], + ok = file:write_file("$(1)", Depend), + halt() +endef + +ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),) +$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST) + $(makedep_verbose) $(call erlang,$(call makedep.erl,$@)) +endif + +ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \ + touch -c $(PROJECT).d; \ + fi + @touch $@ + +$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change +endif + +-include $(PROJECT).d + +ebin/$(PROJECT).app:: ebin/ + +ebin/: + $(verbose) mkdir -p ebin/ + +define compile_erl + $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \ + -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1)) +endef + +ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src) + $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?)) + $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE))) + $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true)) + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES))))))) +ifeq ($(wildcard src/$(PROJECT).app.src),) + $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \ + > ebin/$(PROJECT).app +else + $(verbose) if [ -z "$$(grep -E '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \ + echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \ + exit 1; \ + fi + $(appsrc_verbose) cat src/$(PROJECT).app.src \ + | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \ + | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(GITDESCRIBE)\"}/" \ + > ebin/$(PROJECT).app +endif + +clean:: clean-app + +clean-app: + $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \ + $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \ + $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +endif + +# Copyright (c) 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: docs-deps + +# Configuration. + +ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS)) + +# Targets. + +$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +doc-deps: +else +doc-deps: $(ALL_DOC_DEPS_DIRS) + $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rel-deps + +# Configuration. + +ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS)) + +# Targets. + +$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +rel-deps: +else +rel-deps: $(ALL_REL_DEPS_DIRS) + $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: test-deps test-dir test-build clean-test-dir + +# Configuration. + +TEST_DIR ?= $(CURDIR)/test + +ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS)) + +TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard +TEST_ERLC_OPTS += -DTEST=1 + +# Targets. + +$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +test-deps: +else +test-deps: $(ALL_TEST_DEPS_DIRS) + $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done +endif + +ifneq ($(wildcard $(TEST_DIR)),) +test-dir: + $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \ + $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/ +endif + +ifeq ($(wildcard ebin/test),) +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: clean deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" + $(gen_verbose) touch ebin/test +else +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" +endif + +clean:: clean-test-dir + +clean-test-dir: +ifneq ($(wildcard $(TEST_DIR)/*.beam),) + $(gen_verbose) rm -f $(TEST_DIR)/*.beam +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rebar.config + +# We strip out -Werror because we don't want to fail due to +# warnings when used as a dependency. + +compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/') + +define compat_convert_erlc_opts +$(if $(filter-out -Werror,$1),\ + $(if $(findstring +,$1),\ + $(shell echo $1 | cut -b 2-))) +endef + +define compat_rebar_config +{deps, [$(call comma_list,$(foreach d,$(DEPS),\ + {$(call dep_name,$d),".*",{git,"$(call dep_repo,$d)","$(call dep_commit,$d)"}}))]}. +{erl_opts, [$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$(ERLC_OPTS)),\ + $(call compat_convert_erlc_opts,$o)))]}. +endef + +$(eval _compat_rebar_config = $$(compat_rebar_config)) +$(eval export _compat_rebar_config) + +rebar.config: + $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc + +MAN_INSTALL_PATH ?= /usr/local/share/man +MAN_SECTIONS ?= 3 7 + +docs:: asciidoc + +asciidoc: distclean-asciidoc doc-deps asciidoc-guide asciidoc-manual + +ifeq ($(wildcard doc/src/guide/book.asciidoc),) +asciidoc-guide: +else +asciidoc-guide: + a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf + a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ +endif + +ifeq ($(wildcard doc/src/manual/*.asciidoc),) +asciidoc-manual: +else +asciidoc-manual: + for f in doc/src/manual/*.asciidoc ; do \ + a2x -v -f manpage $$f ; \ + done + for s in $(MAN_SECTIONS); do \ + mkdir -p doc/man$$s/ ; \ + mv doc/src/manual/*.$$s doc/man$$s/ ; \ + gzip doc/man$$s/*.$$s ; \ + done + +install-docs:: install-asciidoc + +install-asciidoc: asciidoc-manual + for s in $(MAN_SECTIONS); do \ + mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \ + install -g 0 -o 0 -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \ + done +endif + +distclean:: distclean-asciidoc + +distclean-asciidoc: + $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/ + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Bootstrap targets:" \ + " bootstrap Generate a skeleton of an OTP application" \ + " bootstrap-lib Generate a skeleton of an OTP library" \ + " bootstrap-rel Generate the files needed to build a release" \ + " new-app n=NAME Create a new local OTP application NAME" \ + " new-lib n=NAME Create a new local OTP library NAME" \ + " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ + " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \ + " list-templates List available templates" + +# Bootstrap templates. + +define bs_appsrc +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]}, + {mod, {$p_app, []}}, + {env, []} +]}. +endef + +define bs_appsrc_lib +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. +endef + +ifdef SP +define bs_Makefile +PROJECT = $p +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.0.1 + +# Whitespace to be used when creating files from templates. +SP = $(SP) + +include erlang.mk +endef +else +define bs_Makefile +PROJECT = $p +include erlang.mk +endef +endif + +define bs_apps_Makefile +PROJECT = $p +include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk +endef + +define bs_app +-module($p_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). + +start(_Type, _Args) -> + $p_sup:start_link(). + +stop(_State) -> + ok. +endef + +define bs_relx_config +{release, {$p_release, "1"}, [$p]}. +{extended_start_script, true}. +{sys_config, "rel/sys.config"}. +{vm_args, "rel/vm.args"}. +endef + +define bs_sys_config +[ +]. +endef + +define bs_vm_args +-name $p@127.0.0.1 +-setcookie $p +-heart +endef + +# Normal templates. + +define tpl_supervisor +-module($(n)). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +init([]) -> + Procs = [], + {ok, {{one_for_one, 1, 5}, Procs}}. +endef + +define tpl_gen_server +-module($(n)). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +%% gen_server. + +init([]) -> + {ok, #state{}}. + +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +endef + +define tpl_cowboy_http +-module($(n)). +-behaviour(cowboy_http_handler). + +-export([init/3]). +-export([handle/2]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {ok, Req, #state{}}. + +handle(Req, State=#state{}) -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_gen_fsm +-module($(n)). +-behaviour(gen_fsm). + +%% API. +-export([start_link/0]). + +%% gen_fsm. +-export([init/1]). +-export([state_name/2]). +-export([handle_event/3]). +-export([state_name/3]). +-export([handle_sync_event/4]). +-export([handle_info/3]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_fsm:start_link(?MODULE, [], []). + +%% gen_fsm. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_Event, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_Event, StateName, StateData) -> + {next_state, StateName, StateData}. + +state_name(_Event, _From, StateData) -> + {reply, ignored, state_name, StateData}. + +handle_sync_event(_Event, _From, StateName, StateData) -> + {reply, ignored, StateName, StateData}. + +handle_info(_Info, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + +define tpl_cowboy_loop +-module($(n)). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {loop, Req, #state{}, 5000, hibernate}. + +info(_Info, Req, State) -> + {loop, Req, State, hibernate}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_cowboy_rest +-module($(n)). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_html/2]). + +init(_, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. + +get_html(Req, State) -> + {<<"This is REST!">>, Req, State}. +endef + +define tpl_cowboy_ws +-module($(n)). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +-record(state, { +}). + +init(_, _, _) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, #state{}}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_ranch_protocol +-module($(n)). +-behaviour(ranch_protocol). + +-export([start_link/4]). +-export([init/4]). + +-type opts() :: []. +-export_type([opts/0]). + +-record(state, { + socket :: inet:socket(), + transport :: module() +}). + +start_link(Ref, Socket, Transport, Opts) -> + Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]), + {ok, Pid}. + +-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Ref, Socket, Transport, _Opts) -> + ok = ranch:accept_ack(Ref), + loop(#state{socket=Socket, transport=Transport}). + +loop(State) -> + loop(State). +endef + +# Plugin-specific targets. + +define render_template + $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) +endef + +ifndef WS +ifdef SP +WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a)) +else +WS = $(tab) +endif +endif + +bootstrap: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(eval n := $(PROJECT)_sup) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc,src/$(PROJECT).app.src) +endif + $(call render_template,bs_app,src/$(PROJECT)_app.erl) + $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl) + +bootstrap-lib: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src) +endif + +bootstrap-rel: +ifneq ($(wildcard relx.config),) + $(error Error: relx.config already exists) +endif +ifneq ($(wildcard rel/),) + $(error Error: rel/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_relx_config,relx.config) + $(verbose) mkdir rel/ + $(call render_template,bs_sys_config,rel/sys.config) + $(call render_template,bs_vm_args,rel/vm.args) + +new-app: +ifndef in + $(error Usage: $(MAKE) new-app in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(eval n := $(in)_sup) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) +endif + $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) + $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) + +new-lib: +ifndef in + $(error Usage: $(MAKE) new-lib in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) +endif + +new: +ifeq ($(wildcard src/)$(in),) + $(error Error: src/ directory does not exist) +endif +ifndef t + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifndef tpl_$(t) + $(error Unknown template) +endif +ifndef n + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in= +else + $(call render_template,tpl_$(t),src/$(n).erl) +endif + +list-templates: + $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-c_src distclean-c_src-env + +# Configuration. + +C_SRC_DIR ?= $(CURDIR)/c_src +C_SRC_ENV ?= $(C_SRC_DIR)/env.mk +C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT).so +C_SRC_TYPE ?= shared + +# System type and C compiler/flags. + +ifeq ($(PLATFORM),darwin) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall + LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress +else ifeq ($(PLATFORM),freebsd) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +else ifeq ($(PLATFORM),linux) + CC ?= gcc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +endif + +CFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) +CXXFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) + +LDLIBS += -L $(ERL_INTERFACE_LIB_DIR) -lerl_interface -lei + +# Verbosity. + +c_verbose_0 = @echo " C " $(?F); +c_verbose = $(c_verbose_$(V)) + +cpp_verbose_0 = @echo " CPP " $(?F); +cpp_verbose = $(cpp_verbose_$(V)) + +link_verbose_0 = @echo " LD " $(@F); +link_verbose = $(link_verbose_$(V)) + +# Targets. + +ifeq ($(wildcard $(C_SRC_DIR)),) +else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),) +app:: app-c_src + +test-build:: app-c_src + +app-c_src: + $(MAKE) -C $(C_SRC_DIR) + +clean:: + $(MAKE) -C $(C_SRC_DIR) clean + +else + +ifeq ($(SOURCES),) +SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat)))) +endif +OBJECTS = $(addsuffix .o, $(basename $(SOURCES))) + +COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c +COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c + +app:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +$(C_SRC_OUTPUT): $(OBJECTS) + $(verbose) mkdir -p priv/ + $(link_verbose) $(CC) $(OBJECTS) \ + $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \ + -o $(C_SRC_OUTPUT) + +%.o: %.c + $(COMPILE_C) $(OUTPUT_OPTION) $< + +%.o: %.cc + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.C + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.cpp + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +clean:: clean-c_src + +clean-c_src: + $(gen_verbose) rm -f $(C_SRC_OUTPUT) $(OBJECTS) + +endif + +ifneq ($(wildcard $(C_SRC_DIR)),) +$(C_SRC_ENV): + $(verbose) $(ERL) -eval "file:write_file(\"$(C_SRC_ENV)\", \ + io_lib:format( \ + \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \ + \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \ + \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \ + [code:root_dir(), erlang:system_info(version), \ + code:lib_dir(erl_interface, include), \ + code:lib_dir(erl_interface, lib)])), \ + halt()." + +distclean:: distclean-c_src-env + +distclean-c_src-env: + $(gen_verbose) rm -f $(C_SRC_ENV) + +-include $(C_SRC_ENV) +endif + +# Templates. + +define bs_c_nif +#include "erl_nif.h" + +static int loads = 0; + +static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info) +{ + /* Initialize private data. */ + *priv_data = NULL; + + loads++; + + return 0; +} + +static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info) +{ + /* Convert the private data to the new version. */ + *priv_data = *old_priv_data; + + loads++; + + return 0; +} + +static void unload(ErlNifEnv* env, void* priv_data) +{ + if (loads == 1) { + /* Destroy the private data. */ + } + + loads--; +} + +static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{ + if (enif_is_atom(env, argv[0])) { + return enif_make_tuple2(env, + enif_make_atom(env, "hello"), + argv[0]); + } + + return enif_make_tuple2(env, + enif_make_atom(env, "error"), + enif_make_atom(env, "badarg")); +} + +static ErlNifFunc nif_funcs[] = { + {"hello", 1, hello} +}; + +ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload) +endef + +define bs_erl_nif +-module($n). + +-export([hello/1]). + +-on_load(on_load/0). +on_load() -> + PrivDir = case code:priv_dir(?MODULE) of + {error, _} -> + AppPath = filename:dirname(filename:dirname(code:which(?MODULE))), + filename:join(AppPath, "priv"); + Path -> + Path + end, + erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0). + +hello(_) -> + erlang:nif_error({not_loaded, ?MODULE}). +endef + +new-nif: +ifneq ($(wildcard $(C_SRC_DIR)/$n.c),) + $(error Error: $(C_SRC_DIR)/$n.c already exists) +endif +ifneq ($(wildcard src/$n.erl),) + $(error Error: src/$n.erl already exists) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in= +else + $(verbose) mkdir -p $(C_SRC_DIR) src/ + $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c) + $(call render_template,bs_erl_nif,src/$n.erl) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ci ci-setup distclean-kerl + +KERL ?= $(CURDIR)/kerl +export KERL + +KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl + +OTP_GIT ?= https://github.com/erlang/otp + +CI_INSTALL_DIR ?= $(HOME)/erlang +CI_OTP ?= + +ifeq ($(strip $(CI_OTP)),) +ci:: +else +ci:: $(addprefix ci-,$(CI_OTP)) + +ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP)) + +ci-setup:: + +ci_verbose_0 = @echo " CI " $(1); +ci_verbose = $(ci_verbose_$(V)) + +define ci_target +ci-$(1): $(CI_INSTALL_DIR)/$(1) + $(ci_verbose) \ + PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \ + CI_OTP_RELEASE="$(1)" \ + CT_OPTS="-label $(1)" \ + $(MAKE) clean ci-setup tests +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp)))) + +define ci_otp_target +ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),) +$(CI_INSTALL_DIR)/$(1): $(KERL) + $(KERL) build git $(OTP_GIT) $(1) $(1) + $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1) +endif +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp)))) + +$(KERL): + $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL)) + $(verbose) chmod +x $(KERL) + +help:: + $(verbose) printf "%s\n" "" \ + "Continuous Integration targets:" \ + " ci Run '$(MAKE) tests' on all configured Erlang versions." \ + "" \ + "The CI_OTP variable must be defined with the Erlang versions" \ + "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3" + +distclean:: distclean-kerl + +distclean-kerl: + $(gen_verbose) rm -rf $(KERL) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ct distclean-ct + +# Configuration. + +CT_OPTS ?= +ifneq ($(wildcard $(TEST_DIR)),) + CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl)))) +else + CT_SUITES ?= +endif + +# Core targets. + +tests:: ct + +distclean:: distclean-ct + +help:: + $(verbose) printf "%s\n" "" \ + "Common_test targets:" \ + " ct Run all the common_test suites for this project" \ + "" \ + "All your common_test suites have their associated targets." \ + "A suite named http_SUITE can be ran using the ct-http target." + +# Plugin-specific targets. + +CT_RUN = ct_run \ + -no_auto_compile \ + -noinput \ + -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(TEST_DIR) \ + -dir $(TEST_DIR) \ + -logdir $(CURDIR)/logs + +ifeq ($(CT_SUITES),) +ct: +else +ct: test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) +endif + +define ct_suite_target +ct-$(1): test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(1)) $(CT_OPTS) +endef + +$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) + +distclean-ct: + $(gen_verbose) rm -rf $(CURDIR)/logs/ + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: plt distclean-plt dialyze + +# Configuration. + +DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt +export DIALYZER_PLT + +PLT_APPS ?= +DIALYZER_DIRS ?= --src -r src +DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions \ + -Wunmatched_returns # -Wunderspecs + +# Core targets. + +check:: dialyze + +distclean:: distclean-plt + +help:: + $(verbose) printf "%s\n" "" \ + "Dialyzer targets:" \ + " plt Build a PLT file for this project" \ + " dialyze Analyze the project using Dialyzer" + +# Plugin-specific targets. + +$(DIALYZER_PLT): deps app + $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS) + +plt: $(DIALYZER_PLT) + +distclean-plt: + $(gen_verbose) rm -f $(DIALYZER_PLT) + +ifneq ($(wildcard $(DIALYZER_PLT)),) +dialyze: +else +dialyze: $(DIALYZER_PLT) +endif + $(verbose) dialyzer --no_native $(DIALYZER_DIRS) $(DIALYZER_OPTS) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-edoc edoc + +# Configuration. + +EDOC_OPTS ?= + +# Core targets. + +docs:: distclean-edoc edoc + +distclean:: distclean-edoc + +# Plugin-specific targets. + +edoc: doc-deps + $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().' + +distclean-edoc: + $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: elvis distclean-elvis + +# Configuration. + +ELVIS_CONFIG ?= $(CURDIR)/elvis.config + +ELVIS ?= $(CURDIR)/elvis +export ELVIS + +ELVIS_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis +ELVIS_CONFIG_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis.config +ELVIS_OPTS ?= + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Elvis targets:" \ + " elvis Run Elvis using the local elvis.config or download the default otherwise" + +distclean:: distclean-elvis + +# Plugin-specific targets. + +$(ELVIS): + $(gen_verbose) $(call core_http_get,$(ELVIS),$(ELVIS_URL)) + $(verbose) chmod +x $(ELVIS) + +$(ELVIS_CONFIG): + $(verbose) $(call core_http_get,$(ELVIS_CONFIG),$(ELVIS_CONFIG_URL)) + +elvis: $(ELVIS) $(ELVIS_CONFIG) + $(verbose) $(ELVIS) rock -c $(ELVIS_CONFIG) $(ELVIS_OPTS) + +distclean-elvis: + $(gen_verbose) rm -rf $(ELVIS) + +# Copyright (c) 2014 Dave Cottlehuber +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-escript escript + +# Configuration. + +ESCRIPT_NAME ?= $(PROJECT) +ESCRIPT_COMMENT ?= This is an -*- erlang -*- file + +ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*" +ESCRIPT_SYS_CONFIG ?= "rel/sys.config" +ESCRIPT_EMU_ARGS ?= -pa . \ + -sasl errlog_type error \ + -escript main $(ESCRIPT_NAME) +ESCRIPT_SHEBANG ?= /usr/bin/env escript +ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**" + +# Core targets. + +distclean:: distclean-escript + +help:: + $(verbose) printf "%s\n" "" \ + "Escript targets:" \ + " escript Build an executable escript archive" \ + +# Plugin-specific targets. + +# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl +# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center +# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE : +# Software may only be used for the great good and the true happiness of all +# sentient beings. + +define ESCRIPT_RAW +'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\ +'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\ +' [F || F <- A, not filelib:is_dir(F) ] end,'\ +'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\ +'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\ +'Ez = fun(Escript) ->'\ +' Static = Files([$(ESCRIPT_STATIC)]),'\ +' Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\ +' Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\ +' escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\ +' {archive, Archive, [memory]},'\ +' {shebang, "$(ESCRIPT_SHEBANG)"},'\ +' {comment, "$(ESCRIPT_COMMENT)"},'\ +' {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\ +' ]),'\ +' file:change_mode(Escript, 8#755)'\ +'end,'\ +'Ez("$(ESCRIPT_NAME)"),'\ +'halt().' +endef + +ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW)) + +escript:: distclean-escript deps app + $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND) + +distclean-escript: + $(gen_verbose) rm -f $(ESCRIPT_NAME) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: relx-rel distclean-relx-rel distclean-relx run + +# Configuration. + +RELX ?= $(CURDIR)/relx +RELX_CONFIG ?= $(CURDIR)/relx.config + +RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.5.0/relx +RELX_OPTS ?= +RELX_OUTPUT_DIR ?= _rel + +ifeq ($(firstword $(RELX_OPTS)),-o) + RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) +else + RELX_OPTS += -o $(RELX_OUTPUT_DIR) +endif + +# Core targets. + +ifeq ($(IS_DEP),) +ifneq ($(wildcard $(RELX_CONFIG)),) +rel:: relx-rel +endif +endif + +distclean:: distclean-relx-rel distclean-relx + +# Plugin-specific targets. + +$(RELX): + $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) + $(verbose) chmod +x $(RELX) + +relx-rel: $(RELX) rel-deps app + $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) + +distclean-relx-rel: + $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) + +distclean-relx: + $(gen_verbose) rm -rf $(RELX) + +# Run target. + +ifeq ($(wildcard $(RELX_CONFIG)),) +run: +else + +define get_relx_release.erl + {ok, Config} = file:consult("$(RELX_CONFIG)"), + {release, {Name, _}, _} = lists:keyfind(release, 1, Config), + io:format("~s", [Name]), + halt(0). +endef + +RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))` + +run: all + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console + +help:: + $(verbose) printf "%s\n" "" \ + "Relx targets:" \ + " run Compile the project, build the release and run it" + +endif + +# Copyright (c) 2014, M Robert Martin +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: shell + +# Configuration. + +SHELL_ERL ?= erl +SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin +SHELL_OPTS ?= + +ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) + +# Core targets + +help:: + $(verbose) printf "%s\n" "" \ + "Shell targets:" \ + " shell Run an erlang shell with SHELL_OPTS or reasonable default" + +# Plugin-specific targets. + +$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) + +build-shell-deps: $(ALL_SHELL_DEPS_DIRS) + $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done + +shell: build-shell-deps + $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS) + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq) +.PHONY: triq + +# Targets. + +tests:: triq + +define triq_check.erl + code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]), + try + case $(1) of + all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]); + module -> triq:check($(2)); + function -> triq:check($(2)) + end + of + true -> halt(0); + _ -> halt(1) + catch error:undef -> + io:format("Undefined property or module~n"), + halt(0) + end. +endef + +ifdef t +ifeq (,$(findstring :,$(t))) +triq: test-build + $(verbose) $(call erlang,$(call triq_check.erl,module,$(t))) +else +triq: test-build + $(verbose) echo Testing $(t)/0 + $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)())) +endif +else +triq: test-build + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam)))))) + $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES))) +endif +endif + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: xref distclean-xref + +# Configuration. + +ifeq ($(XREF_CONFIG),) + XREF_ARGS := +else + XREF_ARGS := -c $(XREF_CONFIG) +endif + +XREFR ?= $(CURDIR)/xrefr +export XREFR + +XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Xref targets:" \ + " xref Run Xrefr using $XREF_CONFIG as config file if defined" + +distclean:: distclean-xref + +# Plugin-specific targets. + +$(XREFR): + $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL)) + $(verbose) chmod +x $(XREFR) + +xref: deps app $(XREFR) + $(gen_verbose) $(XREFR) $(XREFR_ARGS) + +distclean-xref: + $(gen_verbose) rm -rf $(XREFR) + +# Copyright 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +COVER_REPORT_DIR = cover + +# Hook in coverage to ct + +ifdef COVER +ifdef CT_RUN +# All modules in 'ebin' +COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam))) + +test-build:: $(TEST_DIR)/ct.cover.spec + +$(TEST_DIR)/ct.cover.spec: + $(verbose) echo Cover mods: $(COVER_MODS) + $(gen_verbose) printf "%s\n" \ + '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \ + '{export,"$(CURDIR)/ct.coverdata"}.' > $@ + +CT_RUN += -cover $(TEST_DIR)/ct.cover.spec +endif +endif + +# Core targets + +ifdef COVER +ifneq ($(COVER_REPORT_DIR),) +tests:: + $(verbose) $(MAKE) --no-print-directory cover-report +endif +endif + +clean:: coverdata-clean + +ifneq ($(COVER_REPORT_DIR),) +distclean:: cover-report-clean +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Cover targets:" \ + " cover-report Generate a HTML coverage report from previously collected" \ + " cover data." \ + " all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \ + "" \ + "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \ + "target tests additionally generates a HTML coverage report from the combined" \ + "coverdata files from each of these testing tools. HTML reports can be disabled" \ + "by setting COVER_REPORT_DIR to empty." + +# Plugin specific targets + +COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata)) + +.PHONY: coverdata-clean +coverdata-clean: + $(gen_verbose) rm -f *.coverdata ct.cover.spec + +# Merge all coverdata files into one. +all.coverdata: $(COVERDATA) + $(gen_verbose) $(ERL) -eval ' \ + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \ + cover:export("$@"), halt(0).' + +# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to +# empty if you want the coverdata files but not the HTML report. +ifneq ($(COVER_REPORT_DIR),) + +.PHONY: cover-report-clean cover-report + +cover-report-clean: + $(gen_verbose) rm -rf $(COVER_REPORT_DIR) + +ifeq ($(COVERDATA),) +cover-report: +else + +# Modules which include eunit.hrl always contain one line without coverage +# because eunit defines test/0 which is never called. We compensate for this. +EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \ + grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ + | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq)) + +define cover_report.erl + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) + Ms = cover:imported_modules(), + [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M) + ++ ".COVER.html", [html]) || M <- Ms], + Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms], + EunitHrlMods = [$(EUNIT_HRL_MODS)], + Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of + true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report], + TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]), + TotalN = lists:sum([N || {_, {_, N}} <- Report1]), + TotalPerc = round(100 * TotalY / (TotalY + TotalN)), + {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]), + io:format(F, "~n" + "~n" + "Coverage report~n" + "~n", []), + io:format(F, "

Coverage

~n

Total: ~p%

~n", [TotalPerc]), + io:format(F, "~n", []), + [io:format(F, "" + "~n", + [M, M, round(100 * Y / (Y + N))]) || {M, {Y, N}} <- Report1], + How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))", + Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")", + io:format(F, "
ModuleCoverage
~p~p%
~n" + "

Generated using ~s and erlang.mk on ~s.

~n" + "", [How, Date]), + halt(). +endef + +cover-report: + $(gen_verbose) mkdir -p $(COVER_REPORT_DIR) + $(gen_verbose) $(call erlang,$(cover_report.erl)) + +endif +endif # ifneq ($(COVER_REPORT_DIR),) + +# Copyright (c) 2013-2015, Loïc Hoguin +# Copyright (c) 2015, Jean-Sébastien Pédron +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Fetch dependencies (without building them). + +.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \ + fetch-shell-deps + +ifneq ($(SKIP_DEPS),) +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: + @: +else +# By default, we fetch "normal" dependencies. They are also included no +# matter the type of requested dependencies. +# +# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS). +fetch-deps: $(ALL_DEPS_DIRS) +fetch-doc-deps: $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS) +fetch-rel-deps: $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS) +fetch-test-deps: $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS) +fetch-shell-deps: $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(filter doc,$(DEP_TYPES)),) +fetch-deps: $(ALL_DOC_DEPS_DIRS) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +fetch-deps: $(ALL_REL_DEPS_DIRS) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +fetch-deps: $(ALL_TEST_DEPS_DIRS) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +fetch-deps: $(ALL_SHELL_DEPS_DIRS) +endif + +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep $@ IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/$@.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $^ ; do \ + if ! grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/$@.log; then \ + echo $$dep >> $(ERLANG_MK_TMP)/$@.log; \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C $$dep fetch-deps IS_DEP=1 || exit $$?; \ + fi \ + fi \ + done +endif # ifneq ($(SKIP_DEPS),) + +# List dependencies recursively. + +.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \ + list-shell-deps + +ifneq ($(SKIP_DEPS),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): + $(verbose) :> $@ +else +LIST_DIRS = $(ALL_DEPS_DIRS) +LIST_DEPS = $(BUILD_DEPS) $(DEPS) + +$(ERLANG_MK_RECURSIVE_DEPS_LIST): fetch-deps + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DIRS += $(ALL_DOC_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DEPS += $(DOC_DEPS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-doc-deps +else +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DIRS += $(ALL_REL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DEPS += $(REL_DEPS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-rel-deps +else +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DIRS += $(ALL_TEST_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DEPS += $(TEST_DEPS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-test-deps +else +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DIRS += $(ALL_SHELL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DEPS += $(SHELL_DEPS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-shell-deps +else +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-deps +endif + +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): +ifneq ($(IS_DEP),1) + $(verbose) rm -f $@.orig +endif +ifndef IS_APP + $(verbose) for app in $(filter-out $(CURDIR),$(ALL_APPS_DIRS)); do \ + $(MAKE) -C "$$app" --no-print-directory $@ IS_APP=1 || :; \ + done +endif + $(verbose) for dep in $(filter-out $(CURDIR),$(LIST_DIRS)); do \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C "$$dep" --no-print-directory $@ IS_DEP=1; \ + fi; \ + done + $(verbose) for dep in $(LIST_DEPS); do \ + echo $(DEPS_DIR)/$$dep; \ + done >> $@.orig +ifndef IS_APP +ifneq ($(IS_DEP),1) + $(verbose) sort < $@.orig | uniq > $@ + $(verbose) rm -f $@.orig +endif +endif +endif # ifneq ($(SKIP_DEPS),) + +ifneq ($(SKIP_DEPS),) +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + @: +else +list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST) +list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(IS_DEP),1) +ifneq ($(filter doc,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) +endif +endif + +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + $(verbose) cat $^ | sort | uniq +endif # ifneq ($(SKIP_DEPS),) diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/etc/bunny.config b/rabbitmq-server/deps/rabbitmq_management/etc/bunny.config similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/etc/bunny.config rename to rabbitmq-server/deps/rabbitmq_management/etc/bunny.config diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/etc/hare.config b/rabbitmq-server/deps/rabbitmq_management/etc/hare.config similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/etc/hare.config rename to rabbitmq-server/deps/rabbitmq_management/etc/hare.config diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/etc/rabbit-test.config b/rabbitmq-server/deps/rabbitmq_management/etc/rabbit-test.config similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/etc/rabbit-test.config rename to rabbitmq-server/deps/rabbitmq_management/etc/rabbit-test.config diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/include/rabbit_mgmt.hrl b/rabbitmq-server/deps/rabbitmq_management/include/rabbit_mgmt.hrl similarity index 79% rename from rabbitmq-server/plugins-src/rabbitmq-management/include/rabbit_mgmt.hrl rename to rabbitmq-server/deps/rabbitmq_management/include/rabbit_mgmt.hrl index 43cc67d..fc8f58e 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-management/include/rabbit_mgmt.hrl +++ b/rabbitmq-server/deps/rabbitmq_management/include/rabbit_mgmt.hrl @@ -11,10 +11,12 @@ %% The Original Code is RabbitMQ Management Console. %% %% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. +%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved. %% --record(context, {user, password = none}). +-record(context, {user, + password = none, + impl}). % storage for a context of the resource handler -record(range, {first, last, incr}). -record(stats, {diffs, base}). diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/include/rabbit_mgmt_test.hrl b/rabbitmq-server/deps/rabbitmq_management/include/rabbit_mgmt_test.hrl similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/include/rabbit_mgmt_test.hrl rename to rabbitmq-server/deps/rabbitmq_management/include/rabbit_mgmt_test.hrl diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/license_info b/rabbitmq-server/deps/rabbitmq_management/license_info similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/license_info rename to rabbitmq-server/deps/rabbitmq_management/license_info diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/api/index.html b/rabbitmq-server/deps/rabbitmq_management/priv/www/api/index.html similarity index 93% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/api/index.html rename to rabbitmq-server/deps/rabbitmq_management/priv/www/api/index.html index a509a69..ac43f8e 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/api/index.html +++ b/rabbitmq-server/deps/rabbitmq_management/priv/www/api/index.html @@ -255,6 +255,44 @@ Content-Length: 0 "file". + + X + + + X + /api/definitions/vhost
+ + + The server definitions for a given virtual host - + exchanges, queues, bindings and policies. + POST to upload an existing set of definitions. Note that: +
    +
  • + The definitions are merged. Anything already existing on + the server but not in the uploaded definitions is + untouched. +
  • +
  • + Conflicting definitions on immutable objects (exchanges, + queues and bindings) will cause an error. +
  • +
  • + Conflicting definitions on mutable objects will cause + the object in the server to be overwritten with the + object from the definitions. +
  • +
  • + In the event of an error you will be left with a + part-applied set of definitions. +
  • +
+ For convenience you may upload a file from a browser to this + URI (i.e. you can use multipart/form-data as + well as application/json) in which case the + definitions should be uploaded as a form field named + "file". + + X @@ -263,6 +301,14 @@ Content-Length: 0 /api/connections A list of all open connections. + + X + + + + /api/vhosts/vhost/connections + A list of all open connections in a specific vhost. + X @@ -293,6 +339,14 @@ Content-Length: 0 /api/channels A list of all open channels. + + X + + + + /api/vhosts/vhost/channels + A list of all open channels in a specific vhost. + X diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/cli/index.html b/rabbitmq-server/deps/rabbitmq_management/priv/www/cli/index.html similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/cli/index.html rename to rabbitmq-server/deps/rabbitmq_management/priv/www/cli/index.html diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/css/evil.css b/rabbitmq-server/deps/rabbitmq_management/priv/www/css/evil.css similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/css/evil.css rename to rabbitmq-server/deps/rabbitmq_management/priv/www/css/evil.css diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/css/main.css b/rabbitmq-server/deps/rabbitmq_management/priv/www/css/main.css similarity index 99% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/css/main.css rename to rabbitmq-server/deps/rabbitmq_management/priv/www/css/main.css index 74a321d..b00cb09 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/css/main.css +++ b/rabbitmq-server/deps/rabbitmq_management/priv/www/css/main.css @@ -40,6 +40,9 @@ div.box, div.section, div.section-hidden { overflow: auto; width: 100%; } .right { float: right; } .clear { clear: both; } +.shortinput { width: 50px; text-align: right; } + +.help:after { content: '(?)'; } .help, .popup-options-link { color: #888; cursor: pointer; } .help:hover, .popup-options-link:hover { color: #444; } diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/doc/stats.html b/rabbitmq-server/deps/rabbitmq_management/priv/www/doc/stats.html similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/doc/stats.html rename to rabbitmq-server/deps/rabbitmq_management/priv/www/doc/stats.html diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/favicon.ico b/rabbitmq-server/deps/rabbitmq_management/priv/www/favicon.ico similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/favicon.ico rename to rabbitmq-server/deps/rabbitmq_management/priv/www/favicon.ico diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-binary.png b/rabbitmq-server/deps/rabbitmq_management/priv/www/img/bg-binary.png similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-binary.png rename to rabbitmq-server/deps/rabbitmq_management/priv/www/img/bg-binary.png diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-green-dark.png b/rabbitmq-server/deps/rabbitmq_management/priv/www/img/bg-green-dark.png similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-green-dark.png rename to rabbitmq-server/deps/rabbitmq_management/priv/www/img/bg-green-dark.png diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-red-dark.png b/rabbitmq-server/deps/rabbitmq_management/priv/www/img/bg-red-dark.png similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-red-dark.png rename to rabbitmq-server/deps/rabbitmq_management/priv/www/img/bg-red-dark.png diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-red.png b/rabbitmq-server/deps/rabbitmq_management/priv/www/img/bg-red.png similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-red.png rename to rabbitmq-server/deps/rabbitmq_management/priv/www/img/bg-red.png diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-yellow-dark.png b/rabbitmq-server/deps/rabbitmq_management/priv/www/img/bg-yellow-dark.png similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/bg-yellow-dark.png rename to rabbitmq-server/deps/rabbitmq_management/priv/www/img/bg-yellow-dark.png diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/collapse.png b/rabbitmq-server/deps/rabbitmq_management/priv/www/img/collapse.png similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/collapse.png rename to rabbitmq-server/deps/rabbitmq_management/priv/www/img/collapse.png diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/expand.png b/rabbitmq-server/deps/rabbitmq_management/priv/www/img/expand.png similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/expand.png rename to rabbitmq-server/deps/rabbitmq_management/priv/www/img/expand.png diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/rabbitmqlogo.png b/rabbitmq-server/deps/rabbitmq_management/priv/www/img/rabbitmqlogo.png similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/img/rabbitmqlogo.png rename to rabbitmq-server/deps/rabbitmq_management/priv/www/img/rabbitmqlogo.png diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/index.html b/rabbitmq-server/deps/rabbitmq_management/priv/www/index.html similarity index 93% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/index.html rename to rabbitmq-server/deps/rabbitmq_management/priv/www/index.html index 24b64d7..14759ff 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/index.html +++ b/rabbitmq-server/deps/rabbitmq_management/priv/www/index.html @@ -1,3 +1,4 @@ + RabbitMQ Management @@ -5,7 +6,7 @@ - + diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/base64.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/base64.js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/base64.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/base64.js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/charts.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/charts.js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/charts.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/charts.js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/dispatcher.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/dispatcher.js similarity index 93% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/dispatcher.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/dispatcher.js index 4c2d670..8d4a0ff 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/dispatcher.js +++ b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/dispatcher.js @@ -7,7 +7,8 @@ dispatcher_add(function(sammy) { sammy.get('#/', function() { var reqs = {'overview': {path: '/overview', options: {ranges: ['lengths-over', - 'msg-rates-over']}}}; + 'msg-rates-over']}}, + 'vhosts': '/vhosts'}; if (user_monitor) { reqs['nodes'] = '/nodes'; } @@ -31,9 +32,11 @@ dispatcher_add(function(sammy) { 'node', ''); }); - path('#/connections', - {'connections': {path: '/connections', options: {sort:true}}}, - 'connections'); + sammy.get('#/connections', function() { + renderConnections(); + }); + + sammy.get('#/connections/:name', function() { var name = esc(this.params['name']); render({'connection': {path: '/connections/' + name, @@ -52,17 +55,22 @@ dispatcher_add(function(sammy) { return false; }); - path('#/channels', {'channels': {path: '/channels', options: {sort:true}}}, - 'channels'); + sammy.get('#/channels', function() { + renderChannels(); + }); + sammy.get('#/channels/:name', function() { render({'channel': {path: '/channels/' + esc(this.params['name']), options:{ranges:['msg-rates-ch']}}}, 'channel', '#/channels'); }); - path('#/exchanges', {'exchanges': {path: '/exchanges', - options: {sort:true,vhost:true}}, - 'vhosts': '/vhosts'}, 'exchanges'); + + sammy.get('#/exchanges', function() { + renderExchanges() + }); + + sammy.get('#/exchanges/:vhost/:name', function() { var path = '/exchanges/' + esc(this.params['vhost']) + '/' + esc(this.params['name']); render({'exchange': {path: path, @@ -86,9 +94,11 @@ dispatcher_add(function(sammy) { return false; }); - path('#/queues', {'queues': {path: '/queues', - options: {sort:true,vhost:true}}, - 'vhosts': '/vhosts'}, 'queues'); + sammy.get('#/queues', function() { + renderQueues(); + }); + + sammy.get('#/queues/:vhost/:name', function() { var path = '/queues/' + esc(this.params['vhost']) + '/' + esc(this.params['name']); render({'queue': {path: path, @@ -147,6 +157,7 @@ dispatcher_add(function(sammy) { 'users': '/users/'}, 'vhost', '#/vhosts'); }); + sammy.put('#/vhosts', function() { if (sync_put(this, '/vhosts/:name')) { update_vhosts(); diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/ejs.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/ejs.js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/ejs.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/ejs.js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/ejs.min.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/ejs.min.js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/ejs.min.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/ejs.min.js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/excanvas.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/excanvas.js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/excanvas.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/excanvas.js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/excanvas.min.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/excanvas.min.js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/excanvas.min.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/excanvas.min.js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/formatters.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/formatters.js similarity index 83% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/formatters.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/formatters.js index b3c5bc9..a6cff81 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/formatters.js +++ b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/formatters.js @@ -661,11 +661,8 @@ function fmt_highlight_filter(text) { } } -function filter_ui(items) { - current_truncate = (current_truncate == null) ? - parseInt(get_pref('truncate')) : current_truncate; +function filter_ui_pg(items, truncate, appendselect) { var total = items.length; - if (current_filter != '') { var items2 = []; for (var i in items) { @@ -699,24 +696,85 @@ function filter_ui(items) { var selected = current_filter == '' ? (items_desc(items.length)) : (items.length + ' of ' + items_desc(total) + ' selected'); - var truncate_input = ''; + + selected += appendselect; + res += '

' + selected + + '' + truncate + '

'; + res += ''; + + return res; +} + + +function filter_ui(items) { + current_truncate = (current_truncate == null) ? + parseInt(get_pref('truncate')) : current_truncate; + var truncate_input = ''; + var selected = ''; if (items.length > current_truncate) { selected += ' ' + '(only showing first '; items.length = current_truncate; } else { - selected += ' (show at most '; + selected += ', page size up to '; } - res += '

' + selected + - '' + truncate_input + ')

'; - res += ''; + return filter_ui_pg(items, truncate_input, selected); +} + +function paginate_header_ui(pages, context){ + var res = '

' ; + res += ' All ' + context +' (' + pages.total_count + ((pages.filtered_count != pages.total_count) ? ' Filtered: ' + pages.filtered_count : '') + ')'; + res += '

' return res; } +function pagiante_ui(pages, context){ + var res = paginate_header_ui(pages, context); + res += '
'; + res += '

Pagination

'; + res += '
'; + res += ''; + res += '' + res += ''; + res += ''; + res += '' ; + + res += '' ; + + res +='
' + pages.page_count +' ' ; + res += ' (?)
' ; + res += '

'; + res += ' '; + res += ' '; } return '' + prefix + display + ''; @@ -798,4 +856,43 @@ function properties_size(obj) { if (obj.hasOwnProperty(k)) count++; } return count; +} + +function frm_default_value(template, defaultValue){ + var store_value = get_pref(template); + var result = (((store_value == null) + || (store_value == undefined) + || (store_value == '')) ? defaultValue : + store_value); + + return ((result == undefined) ? defaultValue : result); +} + +function fmt_page_number_request(template, defaultPage){ + if ((defaultPage == undefined) || (defaultPage <= 0)) + defaultPage = 1; + return frm_default_value(template + '_current_page_number', defaultPage); +} +function fmt_page_size_request(template, defaultPageSize){ + if ((defaultPageSize == undefined) || (defaultPageSize < 0)) + defaultPageSize = 100; + result = frm_default_value(template + '_current_page_size', defaultPageSize); + if (result > 500) result = 500; // max + return result; +} + +function fmt_filter_name_request(template, defaultName){ + return frm_default_value(template + '_current_filter_name', defaultName); +} + +function fmt_regex_request(template, defaultName){ + result = frm_default_value(template + '_current_regex', defaultName); + return result; +} + +function isNumberKey(evt){ + var charCode = (evt.which) ? evt.which : event.keyCode + if (charCode > 31 && (charCode < 48 || charCode > 57)) + return false; + return true; } diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/global.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/global.js similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/global.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/global.js index f2de0d9..694926a 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/global.js +++ b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/global.js @@ -243,6 +243,7 @@ var current_sort_reverse = false; var current_filter = ''; var current_filter_regex_on = false; + var current_filter_regex; var current_truncate; @@ -261,3 +262,7 @@ var update_counter = 0; // Holds chart data in between writing the div in an ejs and rendering // the chart. var chart_data = {}; + +// whenever a UI requests a page that doesn't exist +// because things were deleted between refreshes +var last_page_out_of_range_error = 0; diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/help.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/help.js similarity index 98% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/help.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/help.js index f50c19a..37f92f6 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/help.js +++ b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/help.js @@ -55,9 +55,15 @@ HELP = { 'export-definitions': 'The definitions consist of users, virtual hosts, permissions, parameters, exchanges, queues and bindings. They do not include the contents of queues or the cluster name. Exclusive queues will not be exported.', + 'export-definitions-vhost': + 'The definitions exported for a single virtual host consist of exchanges, queues, bindings and policies.', + 'import-definitions': 'The definitions that are imported will be merged with the current definitions. If an error occurs during import, any changes made will not be rolled back.', + 'import-definitions-vhost': + 'For a single virtual host, only exchanges, queues, bindings and policies are imported.', + 'exchange-rates-incoming': 'The incoming rate is the rate at which messages are published directly to this exchange.', diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery-1.6.4.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/jquery-1.6.4.js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery-1.6.4.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/jquery-1.6.4.js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery-1.6.4.min.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/jquery-1.6.4.min.js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery-1.6.4.min.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/jquery-1.6.4.min.js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery.flot.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/jquery.flot.js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery.flot.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/jquery.flot.js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery.flot.min.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/jquery.flot.min.js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery.flot.min.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/jquery.flot.min.js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery.flot.time.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/jquery.flot.time.js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery.flot.time.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/jquery.flot.time.js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery.flot.time.min.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/jquery.flot.time.min.js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/jquery.flot.time.min.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/jquery.flot.time.min.js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/json2.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/json2.js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/json2.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/json2.js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/main.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/main.js similarity index 85% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/main.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/main.js index 8118f62..bb2e70f 100644 --- a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/main.js +++ b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/main.js @@ -41,7 +41,7 @@ function start_app_login() { set_auth_pref(username + ':' + password); check_login(); }); - this.get('#/login/:username/:password', login_route) + this.get('#/login/:username/:password', login_route); }); app.run(); if (get_pref('auth') != null) { @@ -83,7 +83,12 @@ function start_app() { // Note for when we upgrade: HashLocationProxy has become // DefaultLocationProxy in later versions, but otherwise the issue // remains. - Sammy.HashLocationProxy._interval = null; + + // updated to the version 0.7.6 this _interval = null is fixed + // just leave the history here. + //Sammy.HashLocationProxy._interval = null; + + app = new Sammy.Application(dispatcher); app.run(); var url = this.location.toString(); @@ -225,28 +230,30 @@ function update() { } function partial_update() { - if ($('.updatable').length > 0) { - if (update_counter >= 200) { - update_counter = 0; - full_refresh(); - return; - } - with_update(function(html) { - update_counter++; - replace_content('scratch', html); - var befores = $('#main .updatable'); - var afters = $('#scratch .updatable'); - if (befores.length != afters.length) { - throw("before/after mismatch"); + if (!$(".pagination_class").is(":focus")) { + if ($('.updatable').length > 0) { + if (update_counter >= 200) { + update_counter = 0; + full_refresh(); + return; } - for (var i = 0; i < befores.length; i++) { - $(befores[i]).empty().append($(afters[i]).contents()); - } - replace_content('scratch', ''); - postprocess_partial(); - render_charts(); - }); - } + with_update(function(html) { + update_counter++; + replace_content('scratch', html); + var befores = $('#main .updatable'); + var afters = $('#scratch .updatable'); + if (befores.length != afters.length) { + throw("before/after mismatch"); + } + for (var i = 0; i < befores.length; i++) { + $(befores[i]).empty().append($(afters[i]).contents()); + } + replace_content('scratch', ''); + postprocess_partial(); + render_charts(); + }); + } + } } function update_navigation() { @@ -389,12 +396,17 @@ function apply_state(reqs) { } var req2; if (options['vhost'] != undefined && current_vhost != '') { - req2 = req + '/' + esc(current_vhost); + var indexPage = req.indexOf("?page="); + if (indexPage >- 1) { + pageUrl = req.substr(indexPage); + req2 = req.substr(0,indexPage) + '/' + esc(current_vhost) + pageUrl; + } else + + req2 = req + '/' + esc(current_vhost); } else { req2 = req; } - var qs = []; if (options['sort'] != undefined && current_sort != null) { qs.push('sort=' + current_sort); @@ -422,7 +434,11 @@ function apply_state(reqs) { } } qs = qs.join('&'); - if (qs != '') qs = '?' + qs; + if (qs != '') + if (req2.indexOf("?page=") >- 1) + qs = '&' + qs; + else + qs = '?' + qs; reqs2[k] = req2 + qs; } @@ -445,6 +461,10 @@ function show_popup(type, text, mode) { } hide(); + if ($(cssClass).length && type === 'help' && + $(cssClass).text().indexOf(text.replace(/<[^>]*>/g, '')) != -1 ) { + return; + } $('h1').after(format('error-popup', {'type': type, 'text': text})); if (mode == 'fade') { $(cssClass).fadeIn(200); @@ -458,6 +478,17 @@ function show_popup(type, text, mode) { }); } + + + + function submit_import(form) { + var idx = $("select[name='vhost-upload'] option:selected").index() + var vhost = ((idx <=0 ) ? "" : "/" + esc($("select[name='vhost-upload'] option:selected").val())); + form.action ="api/definitions" + vhost + '?auth=' + get_pref('auth'); + form.submit(); + }; + + function postprocess() { $('form.confirm').submit(function() { return confirm("Are you sure? This object cannot be recovered " + @@ -477,13 +508,17 @@ function postprocess() { } }); $('#download-definitions').click(function() { - var path = 'api/definitions?download=' + + var idx = $("select[name='vhost-download'] option:selected").index() + var vhost = ((idx <=0 ) ? "" : "/" + esc($("select[name='vhost-download'] option:selected").val())); + var path = 'api/definitions' + vhost + '?download=' + esc($('#download-filename').val()) + '&auth=' + get_pref('auth'); window.location = path; setTimeout('app.run()'); return false; }); + + $('.update-manual').click(function() { update_manual($(this).attr('for'), $(this).attr('query')); }); @@ -508,7 +543,7 @@ function postprocess() { } }); $('.help').die().live('click', function() { - help($(this).attr('id')) + help($(this).attr('id')); }); $('.popup-options-link').die().live('click', function() { var remove = $('.popup-owner').length == 1 && @@ -556,10 +591,96 @@ function postprocess() { if (! user_administrator) { $('.administrator-only').remove(); } + update_multifields(); } + +function url_pagination_template(template, defaultPage, defaultPageSize){ + return '/' + template + '?page=' + fmt_page_number_request(template, defaultPage) + + '&page_size=' + fmt_page_size_request(template, defaultPageSize) + + '&name=' + fmt_filter_name_request(template, "") + + '&use_regex=' + ((fmt_regex_request(template,"") == "checked" ? 'true' : 'false')); + +} + + +function stored_page_info(template, page_start){ + var pageSize = $('#' + template+'-pagesize').val(); + var filterName = $('#' + template+'-name').val(); + + store_pref(template + '_current_page_number', page_start); + if (filterName != null && filterName != undefined) { + store_pref(template + '_current_filter_name', filterName); + } + var regex_on = $("#" + template + "-filter-regex-mode").is(':checked'); + + if (regex_on != null && regex_on != undefined) { + store_pref(template + '_current_regex', regex_on ? "checked" : " " ); + } + + + if (pageSize != null && pageSize != undefined) { + store_pref(template + '_current_page_size', pageSize); + } + +} + +function update_pages(template, page_start){ + stored_page_info(template, page_start); + switch (template) { + case 'queues' : renderQueues(); break; + case 'exchanges' : renderExchanges(); break; + case 'connections' : renderConnections(); break; + case 'channels' : renderChannels(); break; + } +} + + +function renderQueues() { + render({'queues': {path: url_pagination_template('queues', 1, 100), + options: {sort:true, vhost:true, pagination:true}}, + 'vhosts': '/vhosts'}, 'queues', '#/queues'); +} + +function renderExchanges() { + render({'exchanges': {path: url_pagination_template('exchanges', 1, 100), + options: {sort:true, vhost:true, pagination:true}}, + 'vhosts': '/vhosts'}, 'exchanges', '#/exchanges'); +} + +function renderConnections() { + render({'connections': {path: url_pagination_template('connections', 1, 100), + options: {sort:true}}}, + 'connections', '#/connections'); +} + +function renderChannels() { + render({'channels': {path: url_pagination_template('channels', 1, 100), + options: {sort:true}}}, + 'channels', '#/channels'); +} + + +function update_pages_from_ui(sender) { + update_pages(current_template, !!$(sender).attr('data-page-start') ? $(sender).attr('data-page-start') : $(sender).val()); +} + function postprocess_partial() { + $('.pagination_class_input').keypress(function(e) { + if (e.keyCode == 13) { + update_pages_from_ui(this); + } + }); + + $('.pagination_class_checkbox').click(function(e) { + update_pages_from_ui(this); + }); + + $('.pagination_class_select').change(function(e) { + update_pages_from_ui(this); + }); + setup_visibility(); $('.sort').click(function() { var sort = $(this).attr('sort'); @@ -572,7 +693,6 @@ function postprocess_partial() { } update(); }); - $('.help').html('(?)'); // TODO remove this hack when we get rid of "updatable" if ($('#filter-warning-show').length > 0) { $('#filter-truncate').addClass('filter-warning'); @@ -873,6 +993,7 @@ function with_req(method, path, body, fun) { var req = xmlHttpRequest(); req.open(method, 'api' + path, true ); req.setRequestHeader('authorization', auth_header()); + req.setRequestHeader('x-vhost', current_vhost); req.onreadystatechange = function () { if (req.readyState == 4) { var ix = jQuery.inArray(req, outstanding_reqs); @@ -966,7 +1087,29 @@ function check_bad_response(req, full_page_404) { else if (req.status >= 400 && req.status <= 404) { var reason = JSON.parse(req.responseText).reason; if (typeof(reason) != 'string') reason = JSON.stringify(reason); - show_popup('warn', reason); + + var error = JSON.parse(req.responseText).error; + if (typeof(error) != 'string') error = JSON.stringify(error); + + if (error == 'bad_request' || error == 'not_found') { + show_popup('warn', reason); + } else if (error == 'page_out_of_range') { + var seconds = 60; + if (last_page_out_of_range_error > 0) + seconds = (new Date().getTime() - last_page_out_of_range_error.getTime())/1000; + if (seconds > 3) { + Sammy.log('server reports page is out of range, redirecting to page 1'); + var contexts = ["queues", "exchanges", "connections", "channels"]; + var matches = /api\/(.*)\?/.exec(req.responseURL); + if (matches != null && matches.length > 1) { + contexts.forEach(function(item) { + if (matches[1].indexOf(item) == 0) {update_pages(item, 1)}; + }); + } else update_pages(current_template, 1); + + last_page_out_of_range_error = new Date(); + } + } } else if (req.status == 408) { update_status('timeout'); @@ -1178,7 +1321,7 @@ function keys(obj) { return ks; } -// Don't use the jQuery AJAX support, it seemss to have trouble reporting +// Don't use the jQuery AJAX support, it seems to have trouble reporting // server-down type errors. function xmlHttpRequest() { var res; diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/prefs.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/prefs.js similarity index 100% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/prefs.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/prefs.js diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/sammy-0.6.0.js b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/sammy.js old mode 100644 new mode 100755 similarity index 62% rename from rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/sammy-0.6.0.js rename to rabbitmq-server/deps/rabbitmq_management/priv/www/js/sammy.js index 4fcd72b..b0ff981 --- a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/sammy-0.6.0.js +++ b/rabbitmq-server/deps/rabbitmq_management/priv/www/js/sammy.js @@ -1,25 +1,41 @@ // name: sammy -// version: 0.6.0pre - -(function($) { +// version: 0.7.6 + +// Sammy.js / http://sammyjs.org + +(function(factory){ + // Support module loading scenarios + if (typeof define === 'function' && define.amd){ + // AMD Anonymous Module + define(['jquery'], factory); + } else { + // No module loader (plain - + + +

SockJS-erlang Echo example

+
+ +
+
+ + diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/multiplex/cowboy_multiplex.erl b/rabbitmq-server/deps/sockjs/examples/multiplex/cowboy_multiplex.erl similarity index 58% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/multiplex/cowboy_multiplex.erl rename to rabbitmq-server/deps/sockjs/examples/multiplex/cowboy_multiplex.erl index 087374b..e0b8b42 100755 --- a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/multiplex/cowboy_multiplex.erl +++ b/rabbitmq-server/deps/sockjs/examples/multiplex/cowboy_multiplex.erl @@ -1,17 +1,21 @@ #!/usr/bin/env escript -%%! -smp disable +A1 +K true -pa ebin deps/cowboy/ebin -input +%%! -smp disable +A1 +K true -pa ebin -env ERL_LIBS deps -input -module(cowboy_multiplex). -mode(compile). -export([main/1]). %% Cowboy callbacks --export([init/3, handle/2, terminate/2]). +-export([init/3, handle/2, terminate/3]). main(_) -> Port = 8081, - application:start(sockjs), - application:start(cowboy), + ok = application:start(xmerl), + ok = application:start(sockjs), + ok = application:start(ranch), + ok = application:start(crypto), + ok = application:start(cowlib), + ok = application:start(cowboy), MultiplexState = sockjs_multiplex:init_state( [{"ann", fun service_ann/3, []}, @@ -21,14 +25,15 @@ main(_) -> SockjsState = sockjs_handler:init_state( <<"/multiplex">>, sockjs_multiplex, MultiplexState, []), - VhostRoutes = [{[<<"multiplex">>, '...'], sockjs_cowboy_handler, SockjsState}, + VhostRoutes = [{<<"/multiplex/[...]">>, sockjs_cowboy_handler, SockjsState}, {'_', ?MODULE, []}], Routes = [{'_', VhostRoutes}], % any vhost + Dispatch = cowboy_router:compile(Routes), io:format(" [*] Running at http://localhost:~p~n", [Port]), - cowboy:start_listener(http, 100, - cowboy_tcp_transport, [{port, Port}], - cowboy_http_protocol, [{dispatch, Routes}]), + cowboy:start_http(http, 100, + [{port, Port}], + [{env, [{dispatch, Dispatch}]}]), receive _ -> ok end. @@ -39,48 +44,44 @@ init({_Any, http}, Req, []) -> {ok, Req, []}. handle(Req, State) -> - {Path, Req1} = cowboy_http_req:path(Req), + {Path, Req1} = cowboy_req:path(Req), {ok, Req2} = case Path of - [<<"multiplex.js">>] -> - {ok, Data} = file:read_file("./examples/multiplex/multiplex.js"), - cowboy_http_req:reply(200, [{<<"Content-Type">>, "application/javascript"}], - Data, Req1); - [] -> + <<"/">> -> {ok, Data} = file:read_file("./examples/multiplex/index.html"), - cowboy_http_req:reply(200, [{<<"Content-Type">>, "text/html"}], + cowboy_req:reply(200, [{<<"Content-Type">>, "text/html"}], Data, Req1); _ -> - cowboy_http_req:reply(404, [], + cowboy_req:reply(404, [], <<"404 - Nothing here\n">>, Req1) end, {ok, Req2, State}. -terminate(_Req, _State) -> +terminate(_Reason, _Req, _State) -> ok. %% -------------------------------------------------------------------------- service_ann(Conn, init, State) -> - Conn:send("Ann says hi!"), + sockjs:send("Ann says hi!", Conn), {ok, State}; service_ann(Conn, {recv, Data}, State) -> - Conn:send(["Ann nods: ", Data]), + sockjs:send(["Ann nods: ", Data], Conn), {ok, State}; service_ann(_Conn, closed, State) -> {ok, State}. service_bob(Conn, init, State) -> - Conn:send("Bob doesn't agree."), + sockjs:send("Bob doesn't agree.", Conn), {ok, State}; service_bob(Conn, {recv, Data}, State) -> - Conn:send(["Bob says no to: ", Data]), + sockjs:send(["Bob says no to: ", Data], Conn), {ok, State}; service_bob(_Conn, closed, State) -> {ok, State}. service_carl(Conn, init, State) -> - Conn:send("Carl says goodbye!"), - Conn:close(), + sockjs:send("Carl says goodbye!", Conn), + sockjs:close(Conn), {ok, State}; service_carl(_Conn, _, State) -> {ok, State}. diff --git a/rabbitmq-server/deps/sockjs/examples/multiplex/cowboy_multiplex_authen_callback.erl b/rabbitmq-server/deps/sockjs/examples/multiplex/cowboy_multiplex_authen_callback.erl new file mode 100755 index 0000000..625a605 --- /dev/null +++ b/rabbitmq-server/deps/sockjs/examples/multiplex/cowboy_multiplex_authen_callback.erl @@ -0,0 +1,107 @@ +#!/usr/bin/env escript +%%! -smp disable +A1 +K true -pa ebin -env ERL_LIBS deps -input +-module(cowboy_multiplex). +-mode(compile). + +-export([main/1]). + +%% Cowboy callbacks +-export([init/3, handle/2, terminate/3]). + +main(_) -> + Port = 8081, + ok = application:start(xmerl), + ok = application:start(sockjs), + ok = application:start(ranch), + ok = application:start(crypto), + ok = application:start(cowlib), + ok = application:start(cowboy), + + MultiplexState = sockjs_multiplex:init_state( + [{"ann", fun service_ann/3, []}, + {"bob", fun service_bob/3, []}, + {"carl", fun service_carl/3, []}], + {fun authen/3, [{state, []}]}), + + SockjsState = sockjs_handler:init_state( + <<"/multiplex">>, sockjs_multiplex, MultiplexState, []), + + VhostRoutes = [{<<"/multiplex/[...]">>, sockjs_cowboy_handler, SockjsState}, + {'_', ?MODULE, []}], + Routes = [{'_', VhostRoutes}], % any vhost + Dispatch = cowboy_router:compile(Routes), + + io:format(" [*] Running at http://localhost:~p~n", [Port]), + cowboy:start_http(http, 100, + [{port, Port}], + [{env, [{dispatch, Dispatch}]}]), + receive + _ -> ok + end. + +%% -------------------------------------------------------------------------- + +init({_Any, http}, Req, []) -> + {ok, Req, []}. + +handle(Req, State) -> + {Path, Req1} = cowboy_req:path(Req), + {ok, Req2} = case Path of + <<"/">> -> + {ok, Data} = file:read_file("./examples/multiplex/index_authen_callback.html"), + cowboy_req:reply(200, [{<<"Content-Type">>, "text/html"}], + Data, Req1); + _ -> + cowboy_req:reply(404, [], + <<"404 - Nothing here\n">>, Req1) + end, + {ok, Req2, State}. + +terminate(_Reason, _Req, _State) -> + ok. + +%% -------------------------------------------------------------------------- + +authen(Conn, init, Extra) -> + {ok, TRef} = timer:apply_after(5000, sockjs, close, [Conn]), + {ok, [TRef | Extra]}; +authen(Conn, {recv, Data}, [TRef | Extra] = State) -> + case Data of + <<"auth">> -> + sockjs:send(<<"Authenticate successfully!">>, Conn), + timer:cancel(TRef), + {success, [{user_id, element(3, erlang:now())} | Extra]}; + _Else -> + {ok, State} + end; +authen(_Conn, closed, [TRef | Extra]) -> + timer:cancel(TRef), + {ok, Extra}. + +service_ann(Conn, init, State) -> + sockjs:send("Ann says hi!", Conn), + {ok, State}; +service_ann(Conn, {recv, Data}, State) -> + {user_id, UserId} = lists:keyfind(user_id, 1, State), + sockjs:send(["Ann nods: ", Data, " from ", erlang:integer_to_binary(UserId)], Conn), + {ok, State}; +service_ann(_Conn, closed, State) -> + {ok, State}. + +service_bob(Conn, init, State) -> + sockjs:send("Bob doesn't agree.", Conn), + {ok, State}; +service_bob(Conn, {recv, Data}, State) -> + {user_id, UserId} = lists:keyfind(user_id, 1, State), + sockjs:send(["Bob says no to: ", Data, " from ", erlang:integer_to_binary(UserId)], + Conn), + {ok, State}; +service_bob(_Conn, closed, State) -> + {ok, State}. + +service_carl(Conn, init, State) -> + sockjs:send("Carl says goodbye!", Conn), + sockjs:close(Conn), + {ok, State}; +service_carl(_Conn, _, State) -> + {ok, State}. diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/multiplex/index.html b/rabbitmq-server/deps/sockjs/examples/multiplex/index.html similarity index 92% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/multiplex/index.html rename to rabbitmq-server/deps/sockjs/examples/multiplex/index.html index 5efe2fc..3353e6f 100644 --- a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/examples/multiplex/index.html +++ b/rabbitmq-server/deps/sockjs/examples/multiplex/index.html @@ -1,8 +1,8 @@ - - + + + +

SockJS Multiplex example

+ +
+
+
+
+ +
+
+
+
+ +
+
+
+
+ +
+
+
+
+ + + diff --git a/rabbitmq-server/deps/sockjs/rebar b/rabbitmq-server/deps/sockjs/rebar new file mode 100755 index 0000000000000000000000000000000000000000..b2bb16ee3571bbd93a596c2d34b3b3cb319a3670 GIT binary patch literal 175491 zcmZ^qV~{XBldi|MZ9Ze$wr$(C?U`q6+qP}nwypDi`yJHo*-fRA{J1NfA6?a*TpeOU zIu|ELIztOvIul!00uv`AM+eZe{@jtQotZ60RL)d zUQxpTvH0Hu*gx~Xh5xe#`u}SFwHe+25kmaOW?z)osTDu~04}fq0RJ-a|E&J|0JH}7 z_Q|T7w%CLSKCfzd;;Kaxu^SX}u@8#HeDcpUk*ZgWmw9#kZS3_`UtZw1WUiKA+IZdu zGnk(-;yDh-DMSl#+?nm1@nFLlU!T;$j6KKESHwQIEruS}ApYxqV> z!(4&uwui`P9~U1NpUgoF!l1tWc)UKon4jaJtsS=+Gcc-u^5|Ds8N`nzoSOblU*erMnhDFI>nzPZE;fsqa%5Q?I~zuE;6{ z^o?8|WOY;k!Nw7#ER zT`Ddis1J{68n?M&$s?!Vf?Yxiix+})vnu41I*9YBd)mf^Uc*ic_f0EF_QiP8;tE#j zE{_f45ir>26VDS*MPL2XdiNZD1$Tz!vmMmyShg^ZPPwLaZ4+m(WXFaB5%9 z1@iGVw*V)OXCc3BuUD;$k0Z4g-JsiKX@6$e%$RCb2bI@P)HUMr( z{g3xIUzShk|LoiFP;q5;SO5UdBme-Y|DA6eni$vwt3kRWtE|pACz@h$I%gC$g(wOt zbs%N3mOJOuZ$r!}LYbh8Cpm9~mPm||=pYnP3$9y3t6~&u9trV-2FSUJ@UxmEh#;VX zLWSq4wh&RAx!oB1`~Lji=AO2vd(HB^X1(^DW_g)`!Q!V=Ac<@=I5LHqbl@fvC6K6+ zAgM@cN3S0tsby5X(0Z?$GjeI$;){jQKebh}uJi`ir(32Y8MTj}ywr1gOX1c?CSkjI zTuI+Od^(5u2V-{)BSxqr8fAY2~fTMTRCF+;HA>=S~O~0 zapui!AdA98J!fsqH5CPdErXk-2TiSd@jAZFHffT^ zK$|u!3TO;1)TH0mqZPHbQK$X59zWZcNf%Vgoth>2@mbg-GGHB-H<-iQg)`efZ3EjK|eUSwuoWJ;Ys z95|zg>z}j@f3to1lnrSU$(Ahzo2CsZ+!0Bo>h%4j6nDX>+(qE5l);<#Y=WYOhEgz! z+mo28q)- z_*gx%=)!d)tJZ-KsUlxGanETn%Wx3ii=e!;q62~(orOPsI)6iS{@{}0m01Y800?ea z;8ebYpFu`oWs*8Jpyg=F@RyI;?n|_;N#mtJu726wONlS|=a5M}#w|wta#t^dSOh}> zjWa=zBnr+}B2z@?Ob-gAUsD(e7XUzYl;z`t+i)~NL?2M#dw_(Le^FZcv!^RXl(VF{ z=BZEaowFlLlOLPLjF@vGoHl`w7SsuH6!%3^q9#XuQkVxpoSK(M3ObtO@~B+#iC zxcvMRQj#n;uMbo`KtjbV9l&bmoGYkX`3uL5tX8JPiINVb(^4XoT5}0O z`$DyQVa9W*<^sFMpWRB8Oi z9gA574Gy~lWRZcnCU2PgMT_401+HBySW?Qs>lxQl4>8?8JNkBvRi2qhFvJN&C9j}P zO1uOV)-Nv|;wccazmbH9Q7m}-qN3QGJCXQk2NEERAVgT1(DbEyfa^p?W?+g6dLnly+&*I1r@TU3#e12Y^ErCQnFMz{@)7{~$`79pru z*`Ph`Z)yZZ#)Qg8nG^AHBtNMmDv=7R)^OLbeEArroGY+(sG$0ij^y%iQa`tHyJ`U9 z1EryUhU!<)AMyrDVh<~N@1=uuF2-b6!X%|u41Ql~X z25O%NN-Tycu+P(HftabTa|^d|DiWz4!YS*On@qsgR%x$1SOXR?TfeKhynBn}) zq7kWZ5`dx2IP>@|sc^c;x$)`!orqNOO<>|G!g0!q@SjV3o24mBR12s@c#2i{>3>Bo z=!qczF69-0v29Q{i>DTsG&nW-+-c+10J9va0Se3l>;eNiV~EY|kptqk3X}QmAaQEO z6f&jH1sMtRGm04lb^!A8X5oxSGMoSLtNtTJsAxt>@t3_WSe4>kaW=FF8%?gH;2;zO z9%PUMpqJd4i`1$n-l0nt;S2^S+x`c@v%v7IDiQQgkO!)UDtR_xwo8M6S(zGnXOb*l zE%Bs65R%wYR5J;VHfIwNV%pfS1#TW5m}K|CxUV}Ajs&JLl;~3s$4=xcGtHf<8Wik1 zG$@)zZ6G%m+z44|q6I=KGcT=SICLI`FdIm5F<3bYSUq6bJyHhLRUVdPQ1V&&3?i}r zngGAG-@a=SW)~(0(zTG3X+9GPbap?ceKgEaI7}=>V3FGWW{?i-eab4uP+bm=e-M4p z)KEAe4g*$XIUBdBI0cblbWqkvctCgpgFaxzT&2V!)L8Vsz7!l6Fl(3DsCo_y13fc{ zIyShAc>fEN1t=Y$NGwKNus;v`G5W*Q)P(pqh|sfFw#?^1^o%9AN1Akx@rsP<0$w~*rr98n9ffTE|OhbAE1F&^y zFm5#i0?&hXFd`hW-|ja*)Zg9A-?+Wsu!M0uMqK9epm+P!=yeZwX{BC7P+#4EQ*yg{5flW4P`Hou_~pJOhVw~DC=~8r7UPJ^dwf5BIf$e<3c#PNz{4==Q6mDX z6Nt?Ojr`&hier>*y^#|`4!5Z6u(4QLP#QCKx^kND*lPRL`{?gR5IP-gtL_{Wdvdh> z=YF}(fxQ?%xG8CXe_urCoY}Q;?Opq%sr7v!YeZr5(hA6>5bK~~(;)jW5Z3pG!@-ro z*QWtY%1Vh&^R!gXc@|9r=1-f7fK94`X4fWI7h(~U&$`9W4`~3o`VP6^)cqNk#}OO* z#W3ginirS(z@tgcCD<5GM8S)ouj(=OJwH_BL8;Vt{%YlEm>zqagtzzckfekW>w+yR z@z+n}9>`#av$w5Axcl?L59Eg1LNPzd0yJ~3jd|a~cr72$M+B%7+L#g!tAU8*fF!=?Qsifc-ajT+2I-=h3cc%#M&6p1AUxf- z&jvOnt-xUM%P&y74?*ZF*KEM+uNK3Z1s$R@8rmwwnFXAs^Ah_C84K%jlAT^mdPKZ> zmxB^!MWtT_g!hat14pMAg5FC5rl$qyMc@r`q&Gyw@h314Xuu#jq&I}bi6L+ZIzs(X ziZJ(oWsi5)cZ#7jMuwx4E&IcNLGxN_=k~R!YbGz_*#H=z3AIb14@f$}sP|UUAFkG} zMNkHoG);MAB`341oZ<#t{^XtT>|MSyzn%H z_kd93E)$9dHbedF75zx=E($cOi9qaoO7@#Kn;stR3O*!-Tz@Q#wJK>ENC14;q=-Q5 zEAG5$)FE&hvcSwC3&f^jPK-4s`wAV}=V+DWDZw`_FV39;@k6|{NvXFhY1(&S1InBg z5IB*G^WE`FW|zDb?3mCF-=G<~qcNEGV>*~sHNKDs73u-U{5%k6I2z@MBF8r{m;6(V zlQ)R_g}g^zyCS&VWPmf_?V87~Z;%E?4{?Pjp)t_AzV0Ssi*yVf%IoC#U6t*P$W>s2 zu0~^>@rGR0gWaAWyP2|hep%IfR6^8dY1p^ba^0r6%_2+gB5-whIF1vl-m$&;^_-Uhj^_K3# z@qI9(rClP(6QpLnw?-n@=4xJ`mKqW~(8_T9$iq%UaVD3ekr$t@*?93i?^y+L(Bb5p z$wJvm)q7#SSS+P>#^N2_r?!SndHHE(^{74uv!iKP%E(jKn|$*}btx#WnM-Kv)H_WA z-8D3I<=fp^8D;a(Dr!31=QrlQb~6q&3OC}@3EOWT7uF+Qv_^KwxPb4r)aq_1&?~5L zhnG%{aw?jt8&TzJLD^GhsGR5K09FdWiq^7|kj@O8ir?!st+`3>v(VLS z=^puAfvflZy_FBfrZW2YQ4(8tBCiM3t2XDl#xA$#cRnqd%tiR&ZS&HO^$YR-IKTef z$#r&{Q_xr6`|&)*R$=gQWPJk5&)c^DDX$KeldM>ij|NA~9I3Q_G8P^4&PfgP3a8*JLj55D|LDJC&)!R7Qut|2%5) z5npyr@5-X1MEb*Xqv0s&$t&(_GINzx1qJ`-ZGkS6dbMQwV=MHx-D>S=PE1QoTh`ON z`S$i^jhm9?o7v09uFmtC>(HlX)ANtB>r}_Z)Fs_l&fSQN-c3EYo@(pMP%8QELwE1N z*2tLBF(Joo)z?QM>hwO>$+oRe-FI8r=vy&tB_H>1{|3jR=zKHy=?IQBgX|baw#(jl+!D$BS5}DOgv3#Ch51p)SWJhU@+h9Vu<^?IkVE zchTj$h!_~xCAhEq8S-R=+OsEzfn&+p~8U&h;u=JJHf3>#jwU34ZTOuld^Q?=#gy z`rh>SzmG_f>2jeOm|o@26Bn0JxcAW-C!W_3QNQ*} z@!p_R(rPJDF&NHo+wDyyy&E2O*V_|b{IJ{|!D+ubW7o-;x?U{Z$T9rHJ>v1^8XU$K z0iK>$`+;OCzhYhlJ#OZ$MbEs~Ly{^l^~p)StqD5WrsgWnN2{sUT!*M*q1{ca)1|47 z-{F~1E7WFFj@L-xm8SM`h(`~0%W0}861|~WOg#3JsNGI9I->zUiIWwWY;H>Z(^7`> zW}`;SP8Ku}Evvx{S>aa>`(yExSeNb{620S%OS+Q4o$YrMq05xj*_~}Bqu(RoWb0@6 z-surqyh5#Gtz((6Z~VWaFjjKCwj4}3H|}b3eq4M`efJ&{5h|M-3_; zU^r`(n}2(SiQRUz*S$M4y)T8bE2xE8kC!X*gMj`FD)Ah{G7x6G7Zls%y46*#Cb~kXj z4v5vJ=GH3Lu4;F8RF;}beE(b|^;-bm!l~ykYj}O#eWdg4UMTi(P@L_wePWY0M{6C+ za%a9zbRHA?K5sidiWW9su1q0r)*i|AU^SY2v)R=~1$^D#evxD35kiaON8e*w|;YV2&)DSb|l`Ar_4WX)R5a6NBE?Qg1evH9r# zbiX+bPvhh0c;3Y11^d4xTx!}a`DXN*7ci8Ww7&$*xSceuhGFy4;dCC!o*BJIdK6Nj zxS!b}2mYuqzut$qfp1YpwtEo-Meg{pe$JiT-Q&yF(5gRmQ5tOb znNA)%s1XUlFusoh z7_Gk*!PB~8;yBj6PcqbaZf;DXd407ZH=^EmxA0{db~Bvbi51S6e!d{%KEFFH5FS2X z3gm~?B*(2SGZ+E+PPW}G=6l2d!@Dej=>9l+mHEBTZWnpQme=a=bP!uxF@7&xKr5KA zoZ8gJz7J>&nTy&QQQsB^zp(o8`WgHbch`dVvc5AtC zd924K)Nj|$cic~0Dn!kAdk~3h?Dn!399$5ka=Xj+V%r#hu7#0ze8@i7`6;(MPSn8e zt{rzGRfmzuYVYj9>=^wV0k*q8emqchrmQdaaAjyEoIf#Vjq_=^=(sAD~SUNw};ZIK6ror;@{3*rM%z@0HTI)-nAM|MM^o z@cXNhXl0F}f--vBvU&4kg=I;VzY>Zyzb3Td<3dA_EUraiq^Wsk+h3GFbu)D{W9w#` zovl-mg0LtI+*}kW5Bc{l{LNh28w6BoT2w;%4FLZm00WqH|Po~-xC z@Aq_z>2$08R_02_>+nk9b>RA9ew}JnC*)`<0R2>DJ=^YQe;)EWll(RNv&G`#$n8)} zN>;C&r`M~a_k3x;*6ZT}FA-{ojo1z+)2dV6)r#nGeW8*?Psb*Sm)*s5uzz6C4t|EF zQ~UWRa^>@ci;g0ykP96@9ly=vxsHwpE_B$o1tS5`C&_pm4gZ=WQj#cDLfH?%PFV=$|1 z&Xt?Ua^qe=meLIT)M9^ASN-fdD}L-8eWPZV_@!lcc%8!A>%>K{)iF@8_Q+irJ|;^u zADex0sP%#a!FSkmvcb#_r?JzGL=r_cWAu#SJc`C+Ur_}1#1e4J-)o{z+4 zXD!oE>M-M3zYqEDEhDHh)J)8c9&3|3>@ zv)XyYsseHS$#nWm=U8ES|L8-!H2zO&XfYw@&?xPM= zYd#KP0Hcqi8pVJM$4gc-&-eiR~SkiyjnUbhcJcYyuH2npOCLWw=lC~d?ct4|_p zxfLAR7DAPSrM0} z6iid#S-2@NgMyz1rKovH^m<7oQ@o(GER0o&E`&mQ)12$NXz&ROH~-K%aAuJj^~~ub zkal6zIl{LDv_qzM?)C|LJ7J<2$|6FUben2fym{eUvAapzv3}xL=Vi?s!dIpBJrbc>3b~g4F)+Ua${{mJ%sy^1prpGcW4R3JMf?7ET=fI!ZfWlk-#_rovQ?OdmYoAMynqQ7gBl?yMdoF*4NMGcgK z%9~J|g0I!pmQd#_e)`Jt78_|JX)XDZpYAu_CdgIj^t0uUYz>-&ctSC`rIfonXXMu!x*uq1&21UMp)>Tt%n(Le3C0yC+{w6u4xgJMA=+n`fJuynFn~jV zmph|g7tum(8HiCRV7Z0NMy6P3PWJ7#qS}>J#F0}3vdP9m1+kpcybHduUjn%o9>d+y z#6cCQFr@Ji42b8SUWEqQ$3!rqVcJ@S7~GT`XlkB{6wZ^?SPfVmQ-D6O@Wl4%@bLYu z?5>6TB6(zO7FQ&iR+1tCRZ*NA0`6U{-sHCvT)OrQ7SBs>QJkcD0sZ4Jxo1@;_Mw@%V4V{cySfKtl9g}Xfoau}j<5E3LoU5hVBLX0DT8g*Dbf5#?vwpv+30i zs9(AdXt1gaYz`=YlhsV?$HQ-bZOR{JWe4UB(g$z@?%aMG&eQrSBL8E~MhJ<#T(*p`d0*^|2Q)yLFAFAiXC~nKx=S+tpNScN-^M^O1Z! z$wdbnWq!A{KPBi`U)oL1;aMr;5xh z)EoKhAjQi2QQPSA$(Y?a+vE84eVMt0E9OQH-{bZ0JUYYe>pnYiKO1Y)<8U>+xZCD@ z*L?hJR?@p=@$G(6*+aM8UGa0Z&L)LkyYl<=v+hP~+xs+qYwkrPUSKlVtRJZ_m|^%F&v*{^R>~Fm&}O$NjYJ_Rpp7Gu-th(eAngx)46M%TAKR z>ntqdYae^GMC<2E?%LwE8A$V8xR>DPX6U_SlIp8R_K?ZU_V=N6L@VL8ck4assMTAC z{6`d*Z5O-O<>z{G*si9MwzsY8qB~ip$YoEiH=WDX$@M#d&im%lZaTYj{$=Tw&i$RG zW7p5-@Vb3wZ2_GQel`#H_G;_xz0>_B#O~5ucyfaWAMov`^3DtGsz*r0_2dAJEcG*E0j^{ZQ?F7o!$LPds!ehDfab{_0uP!63 z-UcqW{{3e|!*~tQ4-PcD_q$1fCY97@hU{%mI2gD-9D}2<%r#gsa;&y&U6FeJ!xZg^2q&XnXCqp3p(qQHX)-y@sc#sD z2m4bGA5P&7DNXPjbFc~B9KT+V{YD^ zlVh|i#(^`%5l5-d0)P&FoshoOAUv)HD;D%9n+Q@EB*F#;EUGDD@~EJ}#! zRs+62g5Rh9xWGyx!wTO0Jb`7+g36E3f=P);!rSwN72)1dC6rmgiuq0aI5%o2preYC zl2k+KLG_JiYE22B;~)xy<5318tES9}J&CG88GmL->5xQ_^;<+YieKWDEa(XeWwW5i z>@hK?IEAihLx+uH?py+GhfN+Zzd9xsxxFQj->8P#FHSju^eNF12t54T!S*)5_8Oc9 z(Y@6WYg_(6$Z{A8-tx)DpqkpxAa$JFk@YwCa9ZiCO3f7~@BWmvaz0R!jqLLFNTTG&Qj=ul7&wii0sj~8nFOs;(PlZBxz=LV8=6XbN1n8(?OF}$}98PEorv8~Vp zHrdXD9KNwXgG1=V7UWW(2CU~Iy{0R4gy(=L5wJ1OCK3<{yGcZOmsj4A0m6#j1nk5n z3M+h|jjl>^z;t)Le}Y}k&(Q1KpTip~t?vTa+{yRKgCC@|WGjd#ygC^0&)n$~0bnCg zoW67{+cClI*aH5D4ZP!l+lC$(LoG>4mkzLLS_&I`faxLmc1 zoj_DyNlWr{hLsEdj6wSeqkY$8SvWv4QLTI zpt~Kl!fst4a-fU;_PuVQ9Ua3+Fv|Xl#L-EG4`;r(iKI{|19F0ED+H#u3{!w1Beduh zGUCth@5ZoOcE(G|TZX%SwPMCdb}>&Kxc3!OB&>2nrZ}i`*(2e5lJ4xM&k^k2W^Joa0X}t3$SYIH(6?j zBU%;E-WcoP{MSUo3(5qUUUeD8+b%P0>7FB zPROAxhvxt?76VCi%q@;%pqg(VlwKxgFhb(GEo0s!RHRhbNyr43evZf?vezVy@E7-m zs;RN(bQ}3d7x+$E+@n=LLvlh1B$mk@7jO_!Up#J1j19x{#&GV=a&yX_nQ|5(d1`Z6 z>8*}SwW?1c+J6&Nw2$PLV+~{lhAgnZQ7$isE~wd~jqJAl9M}pR+LL!gDDyyzZ8(z$ z-^#@{^7ihHp=Xlo#sB)AzFXf%7w6lH`xijn8o6ZmFe2zYhYuj-jy_z@b|B83yX-MO z%u!M1 z`u@v56}>y>vrDt*&;$GV3r|sxP*YR>ysn!<% z>%KMT?0dl}BvselTgBsXsQvZFv*$}v&F!F>?(6Zq^~J<(#_#khc@+KkxsUwT=OS)4 z8hi$a9n*c?Jo8)PaWW{b1_kf^I%&Zp*>?AHw;DeCBKHjQ{n5ndB_yLvZEjuxvWKSkqO?8bdE)|WC?bxtX_-iWF%_lF|7l(i=A17LsW_?<@ zFBIVPaR%%zfK{L|?XeUF;-gg{pWXzTB?l9^b=P+;S^VU2aNp7QXsjGv7N;vwGnqB! z9I^yYx;XV2v8W)vrT-bQ(j)Y!shMmIq?X!43+o);TA8k+f8(sjFGWwu$<&CW}=K zWa0hdaRFwlT&C4sIZ`fdF3rAMBx-9^a@(f_QW#y?n-0Fm_gzBs9|&Zrl$gvn_9}EZ zIcV%&joYjU0W_m;X){RHJ{Z<1(KQOLfs!?a7YEnC=>`s0y3SmlG2VVWQ}}w)6cZ2A z1`}0dQdJWc*BqV!-f7yp*#^1>y2jD9)wM?JY@bx0OrJQPT%OsQM*If!b*U?}E0q^4 zFP*b5i1uIb|NM4gD21St_)h{yjSc{S^FJhUF82S<;C$Gj?Ke+6@&FQkhZet&qA(k0J|&g+?m|6r@d}i14V_mOEL{B-5r+nfPpXX(~52<%4B6`jnTyD-VwyKX-Uj zRu?fA8?v(Pa@S%}5?3dPaa5O!5Emacv0j#yn>iIn%7QF6)eLmk`!}?AX-cCjpGZ@x zRZ;!@E2%3(X{q6+UAMfDmzPWds8Ogq;a5bgHkIN%Gpr-rT5-HM?BPkztZru2;QgG^ ztjLk9AiH#If|} zLfZsybK>o=j{hLe1j?GzNp(PKRrG+k28a0p;$&s<-NW4wtV%BQ@qbeWCb<$U$X#t*eP>QUIHYVug zgMwHIqS=y1RF}t5N}1y1#&bhal`hZXDoZn`cyqVnDgZ5=lA z4TZ-NSfR;j6&Mwu90qzH!UmDB^?)NDRYbmSp^q(;0cA7Tu6JQ zyyf9^Z5gGoJtVA&KxwLyJ;bseypG;5v;$@q>{1S0Rr7Rw2GY2`ac30SlB<1yQzU2D z-0+7cj_ZAr^w_bud&d~@HR4E*4dQXe=}F=g`}IMYSJJZ|5E&2wrrbTNAz|g<%`%Vq z#M5JntgjQ)IQx!Uifz9o$9`~H`UXMTX;jC;h3^U%K!EcaDLyRqlmh z{*V{eQ{iT=qJ%#WanSxCJ=^Ui~dyoL?D|T zGFnp7ovuSfRp}uH9IwDObI5#AQT^3P-vsl(wti`6V~rE(rJ*rvY;uJ{pzJ4B@^rFT zH;EE@(N5z=nB2t^)HiOMnm)7jF0fqmY3w_X;V#fz{qS0ghZ=P4RiN4Vos>Q&?=bDb zNr`mU1UtpHlMaId)4er3k9};RU9-xDY?al*=Yk=Y`=@?1pgYFqZuA@|;x+m-d1bL#L)Q`~-uJL14#CziWGFi~~WE+qdZh#k+ zrTpfpVZ=i^;X=F1QccefqSyhC`184agRq7 zd8#+e(O!-rvLv zHp`1=fW->Gza+;0vsu2P0oo?Ip&HTZ;}&3%Al$Og>ksC`)YGmjE1uXcmf z6`F^?QA^YKR#){7u0cyjysR{DRlu8%1cGQy4l5>!osoFQM?xHLTUfx!M&~%mhPOS# z?iWihEFv_VXFOyGU{k>Bn72jf3LF88gpyX#IABs@NZ`*Q!d(jQ3jFi4)@KI!CuiS> zhaFSAzyuu>!9i3M&R?7gPM8rlH5Ct#8Nvp=@0C}mB~GB2ggf!F>?_@ojc(q=+Uq?8 z?O_SomUMFzZ=6tpbvz@tJE5;TY5#4GE8K_%F0;IrLL`u6f82fC`qUw~wv+A!|1sAV zh#dvjROoA%y6ghCr4Uz##fP#h2oi9nKs6C@$}t1y z2w5$X)@``^~B5|*yl!J z`8r`D`MQbuM&GKVfhXa!x-#RPhv?GGB4}uAHn8z@<0S5NME)-Z9D<$u6Crun$V0jZ zHNnDH!OgR+*F5=?Cl4T zo$U*HD7uH2Me=GK4>~8q zFRDDH|G}44p1&n`Kj1E}Jti-|{99PK+E00E+fA%?%1wh&jS_K1CU#Qv$bB%TkaHyW z<4t51|3eA*`SxB4y%+|!V6c0-Dc5CxfS;0rzy5krihs_D&;5G?t<{4o7aVKtr%39oY5elP|jV>%x(a_r39H z)s;CFzuVz48G6^dz40#J@xs)Z?EVJ+*ZOrh^Sc*cepc^yzWTNHcklcZ-}gjn;CkqL zV}Y*5*Zxc^wKgY5`=jp@KeOe_Yjd~zEpzPW*W+ikz2uOu^W$*bttXc!IfnORewQV?^ydWjqL~X@ zPWw&NKZC+V+{Sl*YC1^PM*JJ!-qYfx)zURLC@F{YW{DlreyU$$7uUu8h3f%i7utSp zpRk*Ie@+O%2(AZPf9<^f8UQ#Jz96Xf#c6Ec!pYb&YiINL?4IKxcn4(9?J54dzQ2E+ zaByy~-HB4J2+a~DlD87QS}37#X?6D(=<&r3t+w^o^ftQ?zsKuj>du`3+6^%(m+&h` zG5w3X5IImRU;Xkm{jV-1%EAWdz#gp%3~4EiVl-ze6;dQstGI{buyjvB8C3HN_9hUl zTcn`Jq1Y(7sAA(`7*f2g6WTeP|;_V+~PFC(DtX!OI&7zD`r_j0WJ|>v7*9b{4K{`U2wpoi-H6&{_4IToo2vZM`DPLLDC+1wo@|Z2=woSpjOh z(sXAXtm4f@TPm1t<$1;nqGfDN3e=_e`aIf`$jAJIg?6I%YiW4Q5XZ5PdZr?)bopoa zp?_*Y_=I4m&%x54A;VGeb_-X2cu${_DELM*TwcMr!x4-)`^S?qBa{ zzGmNGzv$lL?wQ`)>Ox&6UL61x|La{g@|_2+@gELv{124)S3%5w{RORO;AG42UopI& zHMSbA->KWnHixU%3x~`i^LaJgwYlkMR+_7YfR{vy=lX+{6_m2gNtVy-3+cZ=eCaj5 z5|`xa4Y)iYHV>qQhDQ1Xh;fK2OmjiT8uMF_h!R@|YD`0fB1!xo8Dt2HKCjft@S!1z zT(8~Nci-JtpPt>$$H$w9um%7KaIi4&5ip!aIDki7fIimo0g>fDWi1Ibf2TZ!Y7L?u zVQZvqS-QvYm#j1;nwk09$~NAT^vzjf^vgq4gtT84ME(g3t5;~St93!p>NG0ACpeYZ zCP~!?nnHQ10h)l@Fv&8NHJpfi`Q)V0u$>nBfhAhp(`IEZQ!^PIgzkdVp+;gUZdB=_ z8nt~;u7RNzXl6ppc}F1iCh4k^j?v^~WoR;!`{k|~G{MywmdZdpJOTL^Eqq|l}ElR+>QP=Os}nuzKWM?sLuTOg0aDPtAsW35uYmJ^1wGX-Ad>G>Y8Hqr1uu-U!a9-CA@q)DNQe=-~$#q06jsOVB$ zkfEc}(TH&YILAOKO6Q%)SX zzJbNVNlwQ$7IlhZW=^rUb7GwDplI(J8;6ESq%k>@A&VeOjdM6CK$7Xxq0cZEGI%e% z!&OLVmkFK+Avf83aFFd;K(^7jL7*#OqNhbBKB0}*y?1a+fJ^Lj=EF1>S zfOIaXXz{BE86Upu~~j%OSy7AIydmY=sKq^&BAR- zr)?XRwr$(CZQHE0Rf$U5wrxAVv~6eC*rRXG?$ej+{Q+x^88hMuq8OSYAwBxuAjx%c zYXF+^PB74=Qx(;_ar_dlYnK=iG7_R8G*&Q?P-Gyni{)7I6BI1EyE$Y=7^v;K%XtDeXVuf$BRFUu@I*{uza@nHnIrX;IGS zJ64JL_GEYWh2JAr>mH0D0C_^P`(joc*LPha^l#lkv+DEVdX%8E-OK?K zZ%LoO;VulV>tgjwC+}<1n-RhO)7E=8-R9V%MZn7M=p3@u{%6MPq*mA5U{xls_t{$F z{3))L(95U8-~I5n4Psue@B8NLw#OKM!xN;(&$sca3qv+etmrisxBJ&y?au@Jb7y*P zz){lu-kC|QL$Ck-_IV}Y*P-y{pPS3qS6uOvu)K=dqMe_Gb_siT!qcIFm%DPt92O z7MwYt>{fVgBRJ$D4}sLij3JdT3Hk3go!9_Bw={^*r4ki3ItFo2r%J12R4DZ20&lF@ z(s3b19@xIQNG0;`9DV&X`hzj6(m&yH(DI;4H>Nae)82Z`2tHoZB_aqDTz&fm_KM;2 zCqSjR#o8)^I7nIH;gcOf&D5m)a-e(t-6VxC7$xE(;U|3?`ec7poY!rI+B+F4Z4IFJ zdpVm*8JT;0j1ijDT_vvNUP@9`?IkJ-QruJSPA2leT{@J;w`>Sl ztxGD_u}gQxJSn_KTBI`--VXkq1-Z}m1n1pPJs72}$wja~U!>o9L1lGMk*`r2glp$0 zF43-$6BvJOG4K+_z?clAhd~=Ddn&GxTw+QO02^t0YEnY62;;bZ=EVZPBK}YPmQWAO zS{M-s=72lb zrGG&~d#^XZ?K_bY?~~yXNxu~u9r32Hx~VCQuyC}LT90^fqAFxaWwVcSDs6H$kGfUJ zQ%pC>I`r!I;Dywx7sYA!NvmZT3&r5^R%pjMcI-`tOUfsc*o|x! zrsuG0|K=km7*=&ymlg~7V%uDUMSQ7xHEvyAwZ`oZ<@V4fGT{oi}#3iVpF7&h@{?HcX}b)TM@DNLjJ zTY+X>ISp#@W;cF4MtH(S?J)~U5fguYheO|EWdcuU8|yRb#=Z=s4-IKdcmsL^v%JXs z0rK33Bj56FYqUvBJDOo~?D%k*|H@^gC+7!E)7*`={e&t(pSM)X-0g+UfW zYYEGdbu23<{ZeelUZ%ncY;KZMdAP3kj7{3bGHs}K)e^lAxw*our;Myr{~s0@@#)vK z52N%-8Szw33}traV{MP-?3%RuV(`XpCaGe%^kE&=DC|NU$`S^P7}e#nK$RU|F7OPm zll02BtUb3#{<+}+C{~@@Y0QP@ETvy@p4=wPeWaW>RYqBc<5F~jODEUWK`dnz|@>qq7+K@ zv#|ydVTIvt%zwPW5nm-8XdX3!-KI;)`?%3lD)(Xdh&5OJ&he(DR;XW zmPuE+GQJwLfYDU?%mhs*JVhc|p8!Vh zO6;U|lxUg{Zzz=Lg$5VNoGJ(q&+{CVN88IatCU^&&3j%qN1m{bz| z1F9xFe4?FjV8uZ&l96UZ0hHD$FlXP>plKtcu&GcSOg0+2s2O5nv44f2>L%~6Oms5@ zYGIJ3ed;;1JYSa>-oCV`){ zvG(8hmO7!fXn(JPk;lUL0>|_PNzsTm$*`gt8~G9xH+k*c7o>pZL&Ki$EhQ+RVFyLg zpJ7F>vXjSWz|NGU2;<$^`;>oE$4b|A?%xo1es&k(_N^VNG`T9N9oe>%WFNLsSaTYY`{^;K5#`~8F=60 zv-QzqALm-}n*#V~dgC~XfvZIeCkL*P1^f^!RDLugq{ZE#{|?TlQG81P0W}m?ZI{vt zoY{K?w*xQ0dmQ(;KIxvS*K_W^Lo|B4XdU7%%!fNn-?A+bTF)}_J0E6Nc< zc>tke_3tubgJmX~ zXCjvy6jf~NegORn^O|usg2k%k`%v5 z#P^6M)GjI#aO3B(mKlGD#fX>K!!^&vOos~KX`X`?{UNB|l&&i_r+>91vj6IKQu z&=yjyHMj#1d%m-@<@*Te6K3jj1#%@c#qSckQ+AG#SQ1VaBm{fsw3`nsd!V$==Ohu% zxd2WDxm1NwhHr2ivm%f1Wkyj$u|OIE4u&_~Xa9X?89Y)e(wKdy6w?C5n`PVN8=J$4 z1;*lpBmn0VLCXp*F8)_b0X}hXE}zUSLso`_@vv;jN=Yg-CU@miPsCIkTO&zpO@6># zIgXr*B@2x(%xaSVxP)~&3nd*~d8Jla?^f7h zZ+;#TbE4_+1|~9vjI45kq)K&doab7Wr`lyiSOPeQ>F*ST=yRa9p#nLq90_(aD4S6} zw)B_l!CdZl}bS$ zpy$|Dt6;U^PuoFtDDe3x^!Q5YTw#!~BYNCR?t(e5P?aL2XBzPMZBA}OTE|6rz31r1 zS%}7WkP-W%`0D1g5)7?A>V|_#pcTF2T3|6aNDAZ$`1Tcrh=y^>yNL_w38uA9h1N1d z8zm7FX-(Tp5R>5obB+E|06mfhUK@d@HQ}QFEg4)qBX&1q(i2+2xI9_VI@No)%s;Cy zcdWO`uVmJBKpYg`koV|!Vjpy#oe8rIJ=Eks!fDMDK$z!)x)sD^6{O(wfXelE*jP=8N}SWsgvB!f?rj}`W;P)Vi1peEiErD)q-r16F}hff+sAdH(*yOPKs z456J>lwOdPxohgO2zgg4&DlF`d}cHBgo$b}QNvTUrO^0h1Z7vzNu8Ga16I(*5%X8k zL~tcLWb65mx$=)v5H>P0FS?C72yZ5Y!nw#{0a8Lg>N7Xm8h#2;ZtV2|?P*t-*vTJ- z#`^H% ziYQ|HgC1jt9zjV!`~}4W#s0#&?^;g$z#2eQU{3VLzCcAoU=iD{p|aBj!{jp8i{!b+ z=NLX0n>}97c+J^D7ms4=D}~1*;Kb0iBZI|Yr9Hz);~%n5XKQwa+=njt31u;0DK*ng zfT6Pk{N|DY5HvejlN)RUxL7FNdh*hs7(JDEU`s=+JI?EPRK44%WTD&JJ-qQ_e_Ts# z-=#p{bE$g3|7rC{%ll{?B~b{d_J3`5xaoZIK3fPQ{3e~Fx2ol5d7ixKt@}K??fX0# z{r>vwTUXpX0KCPQ>Ib~~%>LNkR|5{o-aDzTXmk_31n-Cjr#O*Z!F)-!1h|++~RO;Z~ZriLeG*BZ=&znv`U`O z@mT;~4%cnTjRAnIDFOoUI@DM=d&%k1rWsFv)LZxderodyFoyN`iJO|{t-qQw$f>HS z>b-lWG4Q!|{bA_As_yzYn8c*`M7=rGC+PYR|Ipab_x0Vq7wC1T@aXG&+Prbf)3WM* z>U4+~Xt`+Jdc8YR0kY+qtUGm{EyD&_VK#EdkX(1UBCbro5;&Wk7|C`#u*NuPq#cb3 zrRRn+?dIMwVVTqrmDCr6J>yVNTOPHH5srnGIxod(1jO&O$rVfwwTGx&Z^=y-Ga?V5jH z`!M7Q)GDMB`TGwYzcBh6v=8{7F=O|qA4EPRmufL!EG|#R^y}2DI=`KxoG>xJp%mV} zkp4`+hRx4+M2(tISa%G_ENZu!j^*| zP`QgKR8&}!8)bAyg?q?fUY=hn_!9eb!P$5?9WMU_pOT`nVI~jdQ~rRC(o;-%ox;dlYwh z2fcj)0|FxqdwIJWw}9Q=QGwCkzSi4|f4VYxPY7V(=Qp7LX8+~Z;cnZ$r}Lk5zP%B9 zxB0sI+I#!6Tj+_SV4S-7`V-ja^z&bU!uI${R5DN?AWbkJApHMLb~dp!GqV4`xi32v zDThTtEPRIbth5fT1nPQCN5k?YPWuhzq5iXQ5G&l`Fw~@jp#xJKhWWK{XA>?W7)Vul z^TgfYp&--%pde01A*h;2!USAs6pfJ@Eo!EPKO7IoegNM**2V^X29UlLjI_y0vr%Rs zpjl}05k^~TP5U!NCgM1-Y&df*5$-05L6^xT#XnXe6a^%k^(I+i`3vWDV@tFxpicy` zRg}o^!%{eD=jV#g@dZd1QR@U0LH7dpp7d=6M8~YFQ_x)YHyA(S>uw455_QBTv1o0U ze!3`&1ca!8Lv?mU^;Qaz4|o#LxuT`^cPXrF zgO~EtMe$N6LfYK3O!{`QC*@g?r%O75Y(;1XL2UAWy@LjnI7rF6Deh68`cO}o0t<7! z25>Y?*zvl~Fjbhm`8A*mz^V)|fl;qbz*zewH5+>gNH_%rzZy#hzsY_5ym-d6U>;(_ zcRbr5jIKSgM#J$8huOZ?M2x09$4HFu3}eB}n^n@`bCv=}#fQ*b?|JK10J zgpZI8DlKh^>*WHn+aL8jc>Wl@rjX;X`;Ki76#`?X?<8Nq8UEhBP!;MnESDYzv1X(F zp$a=Ie#Ih{f$i3R`bQ4?cmfX;{0Y_#DIQdK?sz;}S}mhUf2I^blLO$3;0}*46!%HON=Q97K z*WL!+X;?E?52xwKT^Bv&ZZzIuex6pa0X}+)9;~2N=TupDlYBCsw?)liHyVJbwm_4w zb?ZSo$RS&dLV;3>Qi>{E0M+m#2io^vaNFh`cmVuAi(&%C|HqU*KC#*N03Zr@;g^JBy5bn157@!+X6>CJ2fpHyNj?{2=@ z^v90tFc>othSkjTqT?QIEv6QhD%tKNs(E2IFMbdCyvD+!zija&$qpLwpk;SCEf{;r6R!usMVeRxU zUF)=Lx)T=eHG2cesoA30R*GZuQVZoP@f!826&{;+aqzRvLO3X^Mx2ZCvp?R`b3EtG zf3P9Pn~jlN*FhePMorZPed=`?H`Hm0eaCIWrd2E43PYwhzF2r=Az^=gY*8o1UJkHR zzllpp*l5sfYPHju7&s1N5kS|lPYAKCB~G9GA?69u6u^T?bNqQMGpvBzg{0NNGpGS`uraZk6mSamgjHwa%49!lKk>RaDyK z9yHtGSXdLtFN8*By1VRmkU##JRAguEcTmq)U3=eDtdVLhKC8%vDgH&JQ6X@o(5@0p z4y%_nRXU9sL~%D+n1|6)9K>$6<0i9TI3LcqgOt+NH|`_^1IK!3(cG8fb@M~$RKaGf zeO#GDP6nQykS(l5CB+CJib^_mkYIp1zznUxSAre_XT!r>_}y05#po!6@YZBz*I!Nc zsoiD(D|*?do&hw_6)gBg6$B3A)qqiVy6}oQ-KwV-P66!RD5lI;)!cSH` z?mtat11$vGCbw{=J`}v%5O|EqNM2EKE86fYK^`OcAH^58Os~#EObolO0X(`Q^)`?a zvg&Z4LF!P;QN+}z-j?q9&rtlZe<}q}fiVY$N9*xYZ1xhXx5;Wmwx-V*LVN>laj_NI zl%#ZcSl|kS4T<0txiz}xwtEb|dQ1-a`fn^ABLExGbz*m?`0zGxUI7T~p*1ti_EDm} zjs2yvy2T&rYhkv|kp0|}KbI>Y4J*)7+XP3$Wos9N5vm9NtZTIp5JUQ0rVdvpWWTw} zJ@DJGRxX9qQ52qSlb6cYLVo#ViB(WGKwhD4RSAbmg2^sC+%0!MpA&SJ5LA}v`_ygX zT(XVpYiQs-T}4O1?53)Fg5yb_MSx|Q=1H_;qQ4B*)*dNF`dz9*=_O4ie_=rr=406u zt<^tk)xZDEM)ACllzI+X@{^?f>-8dn)~<)Ha9;!p20tRk-VwgCpHpBLc9X&@$B;v8o0!4@3vMpCGL;*L%}7 zeLll#bvfq9WokuhP_D1naHoaZqEZO&gl{zc~_P;xO8hM#*r%R$yMD z1^Za{ST@Ir;shqZ*`%>o2X%SdZl+igKEmjQG5cj=cgTvqDHIY|!}j7%+AVKt*u6WT ze_4UDc(!v+PoXln$m8x0DI<@^DR~0N=wV+H{2)dzF9+1!mB38-usEc^w89gPL{+mz z(~_&T7nrLHkk({uP|kYFS=eZWw*o=M}(v|)l<-2T2WH;8%~gpYQmZNGgIdd-2Y<` z=e?gT`ftIofc7p{8_7H5JY&bS$bV4roI25*T2WTnS;#tr2uUcdt(S4a)~&A$m@8!z zak2|}u3u5xChcLAXlmV||@ zvf*c{RQGb83764yd?3#pSR}NB-DDL@{#Y-}A-#*Sm_7C9T@3v;?S=cXiWmJ#uPyfoi{f)YjA;imqm9OEl%&Pz4JSkYJF1}0qJ6iecCx}H?cXxL z;4NL8s3_hMeNvWvPcRyy>OQ_GO?1SUh>`>>zRcx$`s@M^T9AN$x=$g?9M8y9mhOk= zh#_eClF>#w1cuE^Ki(9lM1%olB0?W#g#rh!GgoO-;6Om>3JAE{J9J69ttx46FK5sb@M}*a z3Xahs1?^4?#V#@*3csWcUnU>Vf~{b=N4lw|!I5MI@gZCg+Pr~z5QHAZ1d7q2QvB>v z{bP z;8&u8gGTsfRY<0UvHxBIEDCR*hT>m=%&{fo@a@54~9{H zDCadT9w2PjOE`-*aDlCY_~zYso^RXgD*(tSgMh)}#d=&{Jc zMz7^NqZZ=P8U7Hu&nGVam;=AK8Sl9*%&C^ec@Lq_xS99uBg2MS`O%r@h{ID`fGcOm z>R$X_Ypdg#J7dN++cssR-^NAE;>?!YX!-%*<2%|S%I)|8hy)Ou&Y{oQa;2aOaINF} znqu+2#AP`Ad5rBvy?$yBki|*Y&&+)raPV_KTWOv|T<|0Ld0lzAjvjNlUDXxH;QQV> zdwZJ0jG5pt_^2%o5%Q*JpHKUGX{GnS6Ke5zte#nmFSUN9Av`U#}Dq*U>M%U)@16t!cgQ?>4i3 zX4Y5Rt3A&-KVBy?9R8f{w@05GI~#Ysu&Dwc&RO^wEU9>tKQ)81!73NY30@MtZz|1s zZWFyDX*}NG&KvU_M5TCs_{;}Ybb?6;CD)h4B^KFyLywaQCUHMxr3|tLk;@lsz64z= z#+$c&UBqZ2ww7dlbcHD`>L&E0f?%wt}n@W@`sdI{!lJMfeEr z9TkLHey5xs{8yYwZ;6FYY)@C#QL&E15x;mzm+-OGZ9^GyEwavAE({9YS*W+l% zuV=lhm}8_1(-v3T3kPjd;r-frN@_7D!}iK2o}9~*rf~S3nr?COx%(ot{D}qLZzpyIr&!JarARy@0@W!hzk@56#U2q2nUS4 z1z>Y1j39o(LYKLD-h7XI*I?+9cbd7d% zlxlH_`HolSst)f~c?V+a`MtBj_2kx#z;;8bU}8mE=j_{Tum6v8(@%*`i}KC$x6fSlUpiDKa% ztqS!57JZk2O1!f&g1B%|CELY(vu1B}*I08CY-IfL^pkGOZJI^>bPa|R8W#MoukE0* zH}mo>#VHtx=$vy^R~+j@*hcbKv@UT_>*?tunql#(Vl%T2*2b=DHpdF5wxUpW*&9uG z(KR}6)Mj57b%&J`K73Yq^}9ItlnT9Fo^*#17&8U~==RfL<^1+bNl-w-DIH1LsAXj% zmrgAKeCm{f+J)Rf-N`b!c}4Mr?1^j7)@_Pgl%_ydtwtJn)6qHHU_kziEbE11l*Xm1 z!8TpUUkBF?)#r)g@|vXgnAxLv1)&==ASd=~VO7=jsHEb6MFV zT!QN1zN%~VV;V7s4%NFTz1X+2vxWMhStL6>f_L~pMd0fo&w>yd%X*(4^(l^j_Ss4{?&zJR zPua_^#kq=WwJbWmSnX}tmrfD!36bIn*Jd6AT6s@12ZYD%X6r(16!LR@L$G$l4zJL} znKSEl&1+a+y_)0bpmt&m3^$@%Y$UV>HjpCV%UDmN7VWTF&>;oGcAiLp=!54pY&Cfq zO(p!1YnYEW{9@rI4@4^m)tbKT9e$QhG~v-(rm`7G0mA70*aEa@HZzHMz-b6ZA+a0g z8WCLfuJZV-S9YHugQSSUou#YXNj)@&uwJss(P27YSjbhQf9FfG!y(!~;?e&S&99@JWWCIk)k#BCXgkAICVzZbHj zAP2xD5~(H`G$GZI={GX})-`VHxHBNK^pNQagl-*5$Q&6H8!$NU%PZ}@m5VpSDLk1y z!Z!Y^KIvJ1q~;TXCo)e7iJ34nSpl*>wt!!TTzWt_bd_V8M{O#Ek;L^Xi0x9-#hFJ6cgo<>fT30p zT-+{EL6g!E$?y)SIAO+v>%zy&pjNwoI@mU1R?cu584vu2d!#xLYM^4Q*COce~aw%(gRDczR$aRslbu z!%Vza0jr)aDmIwa#_c9_pHn)wv7~U1cOmo96 zbKa%E!3M{aIJUuQj?IGd`9Ojk+w)>s;^6*R)5_KXjZjrY?uX4vd7zk@gKFIqiC~%uf1Payuzt5PF+X4K{5_ zOZSDMutkm#8bcwJ5V{v5wyj|>N%lqIv}J7m4h}y--n`%SOGUFwoxIv}K_F!H1ro6# z2Xga}5bp58w!i7C(U2Q8$nyuUwYa*y#}9t7=w|<>EC3>a;_HUT8-8-d8uZ7QFZr22 z4L=>@As{lV&@ud{&_3$e_MS{R_Rsv7Y453-c;4(m5k<;j_q?Q>d1?|ZKGw)V2O14T z*s%M)z}W~|x!BY@p$YV2T#PACmFKVA)b@W&)nC?mbMk`oZ8aDynmWkM3Q}oLZ({Rf z*j82bW9V8K8Z_7I%FZtW%_k>IoHu0C%VWM8Z5{+$>IQ? z@P}0QaIkm}PG*Z>XV5Wq@$H*pC-9#4V3*21oS517&wa~7$gZ3R60^~To>Ix!MtL89 z@h%x6W+?jMT!V_LFN2adU$*#9Lu-L&SQQFDwNl~s&z|v6sLm}B-`;(5aZXVaGxe{y>nOJxO}POLn!R+v*J7Mr(v-w;GH(gu17)zOlku|gY5)50)Z|0{h_;59m<Y*<=sGh;=988Cfg?rbw7~LmagM8WGMiD2O zBpR5s4E2(Y1rPq1G@~ZvYnz%4CUK>YGwC5vm~#JCCaW%(ZS60|Y&WLyva7t;hSBd- zKA_SXf`)Eh0RAmQN`F+|hGMG2>R&un_G+%?J8C2aWEo@*s=`Z_ z3V%g391T;A`dNYX%R%t%o<70O=!PviTeFN!u>hminmf`IMxZdmN!UWDeuo%F@H87u zT7(oE=?^wRe9n5N%!q|k*^e#AmLz|g3WnXf`ip`Jc1SJQ zO<+CZ%#EfJI3Ad2!ifWr@6m6f*=&Gq+W{^ftEu$m2l{#;wgnB@uc*&?br_YGtLVtY z!->wu!(sW8gkGGSnnn^!9Cp7K<2vHUAGuGiRoQ!&1Ebw(cvRzVc?HrWuYH$Hy;?&K zM~1%+wh{>9juIPjBkUL8r+7IszI|?rLP0kqp~6fqP@ zt8Aol3@gP<~W3u zN})@rF$qcAv?~k^8(#`MjOUUs#@A+{pn5?tmmtu@!6MxxoJc$fIRtAt#qq%fsk)Bv z>?#^qEUiFq9{EY4hq4Rsi27W$i&bO3+Pg}n=%8fq&XT2Gg!5XMQh3n;DrJfuemDT< zKzd<*kOASVC)7JU(>;aiQogY>sgyh)GWQcigS-ah{Ce~tlsPZG7eApGj&4YB+C@;9y77oA->Z2RdKnOR#rbwjhX+$+&D|T_ z-(G~;Rpk?Lb1*9fi;>`s0MOwt2(^+Lk;~fESGTB9PLBCiR)4?A3|z<1l=k@($o@y~ zqLUzCCzfcrxN8$A=q%DeyJl~4k^OBL2s6;1xNfkPr?!dD7CVSI^z|&9SFQ zE#Y>333$^SM|Nd@Rda1RpX}jL%qgWj0Kt^DN#x0zaj@B7@B_`1B7ZMvzgBeu@_r-^ zKd12bG`16od8X3+Fr%*%MPjZ&{@9_8AxI`Kgx6s}hAXD508jt{9#U()-;@+$lvB#6 z(`Z~f3VKq$FgYm0>!&^GFn-9sI;EG74|>}+^SkUS{0K_vYFL?0{X|T0k2ezCM@x+0 z@!<^PB>=&`Xrk8FA5papntE$5s&l;2<7K?}H;)05ht;RN+9L%*hHvc1jWS{Kt7he0 zC$izbOZ@BurvL8O<7yHHORiVPdZ9GL#$y zYsIvgrMkDK5yY!yNHOrT3flY$beEOow2TmNY^OH*2^@hfhGV<~`@A2VWDf0s%o<4~ z$%X5nru_j@YV{njmLy*UU*HqzxvLK`GCRXE>~trYn)jDpwMA|AbHy_oQ8I-r3Xs_t z7WO2=L*lhJl1p3T>kXhp^k0Z^BXr#CwQ+I~qZsjys1|Xd78Ou7p>X+oF)gAz?G|!pZ+HwJCfWFJ z9zN>#j1D9bGk|SISX&7HW-^n+?dP(fwjGzDJpPTU&EgM{0fYqZiCA9{mwAZwa*mwu zx%F{|@<8JAh09eCj{5n73?PWCb%*TpgWY=iXmQ$OM;IF_fQ}7zWGchHb@e=lG(sFav*u@C!gE;aHD}fbJuc7hPSOsHQLT2~@`y zx$q)T#qU`L+^%i8NBH0Y%Ke>!vE^v5Fc57%L#g{F_W#u{q0he#!nBR?cjP^QpSMA~ zX)S9}=z^@rn+}DXjukx?@&};cOq)C%26_e$lcEhshWkXOD!$~G5SutL7L_1isJfmuY;_Mx`p%|lkG zuX9>*Y0GtKYCn-Y0K9&BS^5&P`V*VWbTFzc;}v4X8pVmO$(QS($U(IcF=PPEH~?+` zfVx`PzE_9#2JS;E<~+Yy)pZcoG9W}P)}6Y3nC&(-P9m3n6w&B63|Ha=P#t$v_i!Ap zL4*IrM4qayxup{9ftj0xnxnx9SkI10cpKLZj+t{&FQw8Kh0-UkQX%nGTWi79XM@5c zIYXdE>W*BIDO=h+HM(!|g0OdW9ueVU-*@HwxN@E0OayI4J^Y%aB*$izBAiGF@xOS` zFkB|l^p@LZ|NRn!D=sqY*2lshz=oIYf8s*86hDwtTUZ-$Eqeeb{pnDCr>MI-i69%f z)VmW3?eO}^_V3Wr%|)~WY|V%jo3F(N#ReaS(;-uFQGl-7?k`d1m40Qszs+wI%~Inj zb6}A+_hWzIQN6uy7u&nM(FgShfqjv6*6Pzvx(n=!w-o23F&Pn-B9Fsph6no& zf{0am$HRHh@wIaI1A#Y)hqs^f!N1CU2-(?{=QF)~=K}*4}c8L1?uBZz*kSF<6n4<;15X0sQ;M{ zd`%GoMT2@u1{x1Qyw_;kX8~EVs7MZgQ}ipii>EqBt}Z_#3Ee|o|9-L(_r-FJe+kk3 zftwoJ5_tW5F{MwNF&{cH6$++aKIvo8!&pwKqIW0ki~O;wj3X3I$7iNwj_F`&cz~^Y zE1VVh6?c1&iG64G-X;3gKruGKs`$(V^mz666{7rUTG<+W@8ON&`~=po@s)aBXoSpC z_LwH2sekQ`>a$bFxB9O3A65_gP0TvI>n9UOx(9(r5g$IXW&B5LRNGF3yFg*(7aFyfn-WEeHy5_OIP;aKTuhs#yj~64*0bW=vX8!f2{?NY2i}qKTX%RLG z4PGg><=FGTuNQ=Gp8ZQx3elVu#wPj43GGq4n$K_~)EYj`D=X9#3oSH=ffPCloQu*+ zBZ1RgjJz}M78UG?1*Q|C+hclW!xDISF%=A4twenicmxT|TDEP&v_fK>^zRd(#4Srh zsP8?WgVAd*;B1L~r~KV}rrzSO%R=1mnMZlus&)MM_fJEE>;eN`Yhv!x590w+<(9aM zShEb|N;8A2uQH4=8+K#~7@J$~BeW(zqx%$sG$`LgfJs`nb~PdP0&TN|4?=YQwMx*X zF1-gaJ|?YQ*qK=iGE98C{b$)R0=)+17XzzJ8#P0t7Bh&E!S5%HHj%z|8MFW}VU0#( zkyF8*jlYQYP=_~gOAsLV1|Ch_wr+*A;@3$g5;=w@~4 zx>o1aAo2qjrWKsGDwNk={JvF_VLmlt$I{h)3&aJ5rk|0kTv{CP3wX^QxP@c++Ft9` z;CH71ZvFPYv5(H)1n~%F-@QC(zzf@?bA~H2HDE^2R7n+?9&*5A3He(4b(3_!0j3op zyBfmc<<}de;M%gl3MNLMfi6-&-g2#*K2@J9{%d>bvkRg6?o1mbv_Rc$FLL@5Oe>bI zKXgTel5aOL4w1K%aaBSz*!s$ovOB%{o?V#Ee-&=O+BDI(b;1IjMmCz!rBQfe)Wh4z|>B6vUTP;_|iMy?_> z<_QH-O4CRAz%*pR1@Q}j5A53o{v~1nJ;WSTcCk_Nn+z}KmQ&#y+`<{$(q^KMas?|bW7iIq=rvr0aag2HhvaN-%3dm35scf~Sv8o^RoTjWj5@lbt z(%;M(L0}@eFhKxR7LZ+$)N0w8O8#K-G&#bC(Y_>)?De}jIig%(XX7F{UEM?Ax}C6X zkj$q|^Ng{vlq`b!-yUGey*QB!Idi$dp1T}h5YNXLX0L{Ma)xC$t9xcn9E0&+ihO4s zNuaz&ImS5x((wohkmJghVg)^P6Qp{c0oymAk7C8q8!q# zIjU306Mt;Su(rC)O=2IaIfNa!?z}-jYE8z;hc3`;fpc2#PUDg~c43PKJ<2%iYs&}s z%)m*`(xaU8%!t&shGYJY5f#>-Kqa&A6>f_|EoGz~WRwDr6S_4#%;xoM@Dc@U1LXy> z2jc5Xp5Wbur}hB_8+lp<+ZB202`mAEZ}v*wV1;b@>ntNklj>(f-Kw;X?uN{5dKxXqoIMqN)>aRT7|(`A#6^9UB5m{%Jf=38=b)9R5pMZTP9w znVcy4dK!35t=?JfdsK<9ygFn;G*7{7-=fR<222f}qT*aYYq&B0X6TqCby^jDHlXqq zQ|V10MPYD9i<%$an^|C3^@r;qDyTvT3499?%Q~x?NhRK(P;d6=IotSe4fK zn8k6*9~&Vt%;#zEe7A5~egpC4{r=7KG^+TO{`f^aT`JTYdaj^Y1Mkni*mg+o=rH#( zO$}skÄOHTVITh)vs^gV6Jp{2?;E2B-oe&UucuQ&ZrpvZMOWvFpq6`-)0PGGNO zA?4|F(W+=rtQR9b}r>LS2U0z;(|hej;GHl-O6aKdLtFW z<0`;bKlSDL&-j|3hxb<4)YJ-CGpE}5;#fu0<*XxZs`BbY?ERSXc{m6$h-jv=o3!`R ze_gR%@|~9IO^>$5SR&NoG5|K zbl3SA2t^|+IdD|3oI*7VxV&$+*%22xZ7^D253dYzr}HVpzQoU<1}x1TEZox z^VR`a=b@{j*u>+mD?RdWQ4o=`eeP}CuYnH|##XK;&f8>}9Bku91S{Zob}yoOe4G#5 zg{DEbu>31guu7ll0a)xu3gyVxWYwzxoI0OA^QtkXas3Co3uJe+Z<E==ioMf zWcBCkGhcfKxeYvijJ5C$as*mV5BU!Un5H0(FEHBH$~jdr>p(ethm{ z8$H?Vd@*<1R(=_529$L#ao3UCn>{QKUn}^f#eEj6m-2LK>^#fiGc(sN=1v#JPgg(r zuk_rez3o-(_W9!;%jP;AzfDXwGB>%X=gOWdTioftds^f{{n#(oj_kgq>o%*eT;{pH zj#yaT96xTax!=Bkw)*{1o!b+=D@X$-^V>MzsRGP#*Xk{YK1@bn)m(C&VY8o1{AK}9NJ3j7S{)zB@4DDE>>KBNSyVTpIZdLSKZ;8thT)Mzr&BVlGE)ZpQojX99w}; z=s#>5%X#rHnC_!5am>b~QsuE3U$D=<_&zt)<#S_~m1Jl7(Up3D)x@oBfF#O)u=GHe zb%Cf$KOX&zm3%L3Ppt@!mqN}~fl3x1djckgWFhsinh9P{d(&23?};sh+I;^SJA|Sy zv<;#&wgb(iOO?Rv_$ljU(e%la@m>1(+jUp+oSH9FpS3>QqDxnm6_T8+=rO-oEXNc#)J?EnqE(1^1hwUUxyOok<`!df`l zP#R4m)mqF{HkV^FIo7bbz@)AwDQo&ve>>srNerLxsoscrOYc}VYy}A`%_KJFms^=nmfepUsM2sJs-aDgy00IbO1+@~1`RxslnMpEDi z0r-T7eHr*gxJMzR8DU0HNrAZ;*ayh8;G-OaMifdDNOIn>87QR))jYZx?gx~$z;29R3-}iXME`s7~4_@0;`%lp#)7pgl&+ z0E60qeH(bpXtq5No?qzRz8ksBh`Af{%#ce~Yi~MNNr!S8;edL+n#hAj_ru%y~`Kyjz85MUKc)opQk-b&9Lb`hZkV( zXzD$x7xL~v>pkul%5O~Wp=yJQXoJp60*|rION#eVNrQk5O>3gaJ577{R}2m@_~d?r z$PKyJn7l*NmU7C`jYE}H^5Jog16_OTmY7W3Zer*J(XkPSa?3d81I9Y->BQj?P`jK= zV(4Dy8(vQ={@zL(EbD>G8oaY$JUNs|;Y=CyMT7xHu5U}uJKz7AK$vKwlBPfg0BHMH zw2SfoOdvS@cmBXy9a;-nwIg>N#6iZ$X}~WpPZa13!5;unKseM?7?1Fvl2`CA7(@_A zVz^oJM6*@#w&msa#^}?FzU8(>_q9c9%gcp|W^JeURm;owmc{kt#>S_ij2S zb2D4-?zq=!*6mmC?Pu<-mno3*qXslNrMIoJwXtF`&P5@4VR`{MK9nSGPTeLIc9%n! zs>|osK8QCt%GHzWKCQS5xS)<~-xY}<`aJzdU^anaE6N$$o6voBzFn@^$>4mRW(r}$ z=I;3oyd)w8c_!yKAjRHo*sIuBLz|53A41GyJ0(Uqw*i4FOdvV141y(Wx?tcOz0`gi zG2kD+*-OaJgQFWevp$UT=hs3+XnWprq77rPxn+emB9wRxiZ+iVFtp@v|1g3DT(aQ7 zeEJtK;%Okl8W0}ugnF_g_>5vbVyu)f#X_P6vNK>w8)uQMQX-TT>HaxCF2c}gqh$9# zSE(dyh%`Gk+lP|n2)>a644IF_J0aciHKAPu&W!xY7sRg~q6Wsm1C{h3L03qJ&V%=jG= zy;QA-o)jHxF~{kqVBMHhIUk@MaN7+z3k7Pg;6b-{{J;PfVzdY0yV?q~K&UKM@_x+h~B&NUr6+JblrhHZ&b#sczFULutuxNy2 z^vQ3C`MWSNQ%zH35ikr&Xd>gNM9Lo}3Zok%L%YY1D^-z#lQ4?b2cm0dNlzJ}fMbp~ zg~QO1qK1o-$|T`52_eFU>r>HeM5ar>81M=?sp|&^%4=j)<7^i+VI$=toxcR3AyT7| zgtth=yGBSwFW4Sb#TZI)I8uvnqw@S>cjPuIG_!zZo*}*Oqe(D3MlqT>unRNI5-V`? zkauXf)rZKpWKqmu3Mt?-eDta zx_NlU&iCXpl&-x#o+vrv!rUmA`9+g0AQt=53gN59U?5E&3V#?0Mh+S@4Gr0Eo#{PJ ztPz`lF;kfyS71!24cmz~iyo-*(PfM>O^6>TA<=PSMk;ccHZ_z7DGa5zxqx$_sp*ZD zgq`;C4Ldw>6r5rOsY8Qk^lO+6h1XOg0K$+AAh3-s8&C}{9jI@h!)xc{#|LlDghw^b zSU9$~bXr(ggK1_rj~UCju3JQEd0IliiURBd3Rms>gAu-@){pObF&cu-y5DV`=q0{8 zo^*2WT(3(gD6cbbfgD~C-s zAj!Wh;@_COvo`CrKpHaclC+^~C<&1zoe0rG{!|9wEp}vQVW(^m^R5fLDvnXDOAJi4 z+C1jO=eBI=KvZJGIX64Mw=^)Op;sr+;=o#`5Xm*evGrx;j& z#kx{d(l0Rf7NWAJnRCcCH3$2`739#|SW%f1ttc;pyawD_Cf-n@@1?jhhlgl0D(EDB zjtK>{lL~1jkp9_^U%+Crn3JmW%!BSJpeBOrp#A*IgX~3Vq`{De)#PTG)H&9;BIO0% zFYfCT4-G>p2cS;Zaj+o$IITeGa7f|QC)UjQ$D7n7N&rbTND-${`l$I;;It;>XM0h- z>VetE1I+ydy(X092;P)HG~?O+IjWAl3#8G~2|!WP$=Pvu*En(mvf}y(7s1NPyUu|3 z3|I;y`w?|EJM1^y!Yd%K*|U=bT7rV)fKdIW`Gy0cz2Bv(Co$4Ah?dG(&BW)j=KAEUNe2^yRxTLS8V3p}>ry1}aInN=fa)lPDuC;k_Y7I? z4I!2+06claGo38*8^g%q%_r(eob$S(=)t^oRO0?UNOKQphcjocP!wpA<#W!6bM#K7 ze0tf6C+`RluTsSn`r??l0ja>uJVc#2NLkCJA$!{AuRZ+}6`T?nwGbybEY^RP5EeKZ zzI~TGk$)CegKczgm?7O2f+Ge1=H>O+E3 z9hwp9K1A}MK>)X+z$0KgRveXZ8^kmVz~;skK(acN27qESJxeBrbG}rXW#HaPN&}(d z_#G>mCZsFb1khG6Rtnlq#heVjQ-A=m>JHCEZF><-bousU-o(@)bG^qzvdQ zYGt8JA;EvEl4H2TOqh{gvlG{L4ySw>x4hBbVtKX3ky)g3cAn88A3J=#d`=V2gdj^p zPE}W&S=(Qgj=Z2JpbVS&^m0tyaA_kESdl6owYc8^J)JC*2|SA!Wrb=CR$$8i&p)`@ zz3RZ-4V;Rw1$Nou+Y0<tb2BTdvVkgkEDA^do<++fuXB_M%ZiS;CIzXaEig z^2`{gDr_OY<50bg4zx0-y=$R8G&=n zH0oeEmvNPyI8ZPDN#Rer zY=Y?c3*rIg`cHxhV|Uq%yjm?+=SK0@haq0R%mmJOp-c=LbWEo~FuSgR8Vl44hBz&A) zE5Yv=tZKRVs_$8C5zz9)S&c1Tk_FAOZ(I~|1*^>uuo9PBTXUkubgxy7#1NSDkAsf) z>p1ahGpe+G9rl=x?VoesLkbt%XqzSSfYgWPyM;QLOIXeJG%oLB;X7a!^$xB7&pmk%griqFk=m+2FW#s@+^QCD-u1-clA zfr_I9yeOB4By%N~4irs{+M#1iP8@6_r|Aq?O8l9{wbel?sE3X!Qc=&Gr9(D>HL^#d z+NA8#3?GkA_6DuW_+(v8!*)3Zmk946mq&0V!R z|KysmMAfudR*T^SH@11;zlCmGT46HIx`V!bx`D!pdg z`B3q6a=xc+b!_7lbb4xu3O7hKeA&@^5B2uwV%e@gytK<+kd1ELU1peUZ)n}_ev({! zEqLuTD=$B{w-0-{1@L3g;Dpn@4PPhKF=ct~k!63Ij4zL|`7XxIwc=m;eyP8{Lgv$V z$6~ejZzkN|leaxnJ!qL_+7^Aw?gks@KV6<@I^0|)Ke=K|X|g^q$~yS8FDM^OUfCcd z`Fa{?n)cu2W7aR}a?X9uywg4QrH+~0g!W>b(bm!TG*U zj=?W-`1HB#Usxj7c`2W&_jX^7de|;!eLh;ZT3SnXm>2EpTYr0dKW_vd6%ElKOl`bB zvMD-QtvDYYww^M5BQsN?-N}!q+Mb>!ecP4n_xe&~KYS~+@Bi*UU@G=jZm9Z>3Sra1 z%}%7r^=^6)Ti@{r4U{s4yku#5?tgJjhNQaROg|?5j0ThKP(bf|-@K_h(vkUCjgNEB zquQfaOT2vr%iGc<=y>?pUS@nKjarh!kafJew--l z>bl5CXwSD$iRhwT4WY)$FlU*z>TrHN9dbPK*<%FK>_d}Pz=_7Xb-JcfcX)~YmgP51 z$Mb!7B(h#dw(KhAI2?q3I+Cva{+ZqFbv)z$;@gqh-b(s83c%bRgx75G{r#G+#txw^ z@=@vTBu;BR-P)Q?h3|QD^E$(8Jv?)&(lvi;ZSx-+ zHSW(>PYj%Ht^NMkua-RddS|8J{aPKL7izg7k;HewA+O1p#xd%>_BxsIe&thTdnsf~ z;9{MvXYMpt(~?yrb+*2lEEu;L)nSPRldvdZ+~e3 zZQ_RQ^0AWHq0{&LoaniOS21-cI}M!)a_6@| zqo9{p1ybKR?641FeCKOEv~8HFcdw*&3w>|H=+5e%@|ILA#WQj{&MZ(+8!>Zv|3V{S zNSX?ry&Bt^%vDJe*LPz}eI5NAqE!x_*2(?s9ST%e9p$Xml>(}Qm4F0FF?kz-*o$`y z3LL;y&vJHC9rc{VC`@DD+$G#yzZ6tqAmkVK+O25dHt^R&7B^g-ShoGld$2A<_yhee(B1IX z{m%M;+u_%J-%+#dk0GISKmCn5P!pZ`+tZ2HGg}FPHwQ?j5iLPS3ntRbnjxb> zMFx=!Fy?FDK_K}+@5bErwJK~q56S=E_1|crRZ3M<002W`{~y4|$oPLnR!(Z}smRMM zvd+q`aQW~`9YE8>C`y5XFes!EfbDpk+es5k8{}|$WZ6okMCZgtW__1_2YyUCHA3NL zk*i$p-Lt0}w*Lagd;ycsH@&v)J7jl6UAw%c3)wFOmMqRQ<=!tp*Y2utxo$?b#0TH2 zXK(q2p&QTbe~CR9kixxiEk726Pn5=On+&2$cFk5IeJYLTeB`<*S1g&5A0irdb1GIM zgDV&m%acDOf1FCY1m%@3jd6V|BeP8wYn$y2ZEEDGs?U}rc*DLEy1e``s;`(Of?IM4 zf2fwPWjYnCMZyc*dX{vnn>MO{(mk4$t}Ao3=Q%aa|Jr>s1zSwUBC5A*^wmaum21R#!(e2WR_z$`ZU)Es3oZxVX7jDkepxK#8PO)5l12gu6#mbSXac zk67ANiWk{aa(iYwEy-ZA%=l@S0j1Y`mT5dA!kP$^L3V|T9!W}4O{@8+T(;&+Of@9% zM~Nj{XOM_|SE;T&znqq#(Wo3$hd_DnweOj$V)zvtq-*(3;MFusMkib5~P1DS8ERbzyZZ7NOzW%Czf;3 z>yV>lo(zOtN^&WKP^G}QQJGnm#2rE=Ik$k?%=s=^auD}iD7UabGIyQbNy^mN1_ht- zS+}+M`)tvATBEa&E>pc$!cL(AeRubb#m3d*p zE|a>WQ9g{-H0I)6Q7TGv8hk>p*j`7vtk~ySL&JC79@3y_Gh1WPJdXe+otA4cP<{3q z;A30iJ0ivlQ{3_FEI+idQ%LeNSZ)l8Z5qKHuVG$RPX~DAEwg(7-s@z zvOCAB*{BMs_@pj43(}(hST#ozKY#pTGk+S^d1pavS7;+FWq2Nj$is^SF?fu;9@EoX zxLN2igtaV9wrT(3inH0YEl-w!=R6fuCn~&wAsZNtUN5W&Pg+o8z7c;9f}5_wEfDIC z12w3Sg^l2aK^UA5o(jXnLiE(sW*&Zv(}#2}Wyf-{G2Z!%KZ&Thn6Fz;)ZZ$PK9L@(JsAn>-mywX?M-fAUK6IC5=Qbjeebi3D;{T+rxON&z5W19 z@5`?e+*|s}QwcrC5|mHm+Ut(#j$6CMb1bd=jI>XDl4=u@%5j(l7>!NJ=QqJ^a8bO= z+&r3}JI^u|h;@k#Mbp$$6GENekBV%UJDH&377d4Blbz#4)|QmsrEC{ zA>30_Kk%OwKn_jhx_@z$>7_M|20e@_J?~A0CQ9{8vdbtH!Zc#OP|^-B3S6@4gnGQ5Q`^?4sjV@yW;N zxE6bHtR$ouRC;iI4|1gx9E=g-&9KO#l@h;bziqg>PFULW3ef6Sy7J_?L-w`&r41N? zOwZ*Fm}b^Q4bry28<(MLqB<${AP3~Xpo8ILHEDjH;DYRvt$yQZ<3j+*Z$_p|Ujpuu z6+Qn_sJ37*0o^2eooD9Y)az_dM02*q69EkaH&LQKNcGxC>=Mu~H?!m%fRg^FzNYW2 zVLPNEWPh;4n>$Yf7M$1|fLO_8|Nh?OzxuHr{B^SFlgkzEdl8j3kTQ8F8k=rv2{-6;ER0$y`- z_B;*uW)cd+@emCHjlTt5_c>^kuR&mF%LRbYfV(F;y9t~EI}_K%WW`;|NgPkPdh z>#0=zkomj2j(x}FIKU{(DL)el!tH<$gage;wSKi%yJkfe@zoYsvrohSy8DOd0H7J# zfr20buoIvCb?f}4W|tbw{<;k1i`=X z&p5!$HfGN(;i9|M7-D^A^0R{xPrG~%j*wVf^A#oQVMEGxpaxi>*;S6tz~Iz3rq*je z;ValwV4P>&G?e56+#2wx_SGx+l|a`9co;as3ot8RS^X!_3j*%JcfY_UseMwp7(#yn z4*;#oSK(6XbXRzddGuupi;da@&nSQPS%>Ljf~OC1-JzZj0Mac!y&`5b0f)@q5pNh5 zvl*qmoL+!N(iK;d_?i%b78dNsdX}Wm26G;)=5>v% zD&_BehK+?mqd{>WsaV)~DT6twfso^@0h#f1Vb<7Dffm~F*vqkZ5YC-IEKh!yGH?da zCqoT;mKV8@SWUTK2==PM=ft)~9YeADf+%({{iOcU{^%SspcjxTVKy=?p8$u$p}Mpa z_)+ekun|14)aP@+ep!nv#Ch}@Qs5V2Z6qwN^9Mzza};e4lvKRMwL(lEs1lCkomf84 zIkdv_zc$R)SYh_@z$K+<(3}c7)`&_+{86MzZb)e`W&OOOSzyuv@(Clui^AtF6odAG z*&V5}oS|7;MU(im>F;0qt!KyFvDSAz*mN?SP>+Y|KAf()F}Zj%n_rOqxlZqjSe^#V z=oB$0Qz?_O=+j@tgEli%9yD50&UC!%B$(6dsbY>d&E8*6&HZ4cAyIWQ5tbHSkvw^k zlObj#!ib3{X=&7;S5iyH*z{aDGenI4THf7>Hg6FIhzy+I!f-KIy`(S-}jaeVM*WjUxH;rp; z#XA=RPe8yT{f-$r4bld)3S+T3k5@!7y7)m+$6dkLDYSCn)|k0{LBv6)+%5Tj9tu^4 zFhw$m-FNb@?7_5ntLwcm{6$dxgZ+}vDC4Bb+Hbu=gQgxB)si}?t?I~;p_>YuNF;$h z>fnKKlO~))$?X7Qx z)Arek1%sgwR8z999-#7T0GtDIMKH@PuBy^=A{5zoS^|3V9)>=#lt*cNj5 zhp|!z(-2rnoUXvo`RxbO4nj|2purYt^qu(h*J&1=6ywhv{=DCOKTh5#dMqCqV?o0x zH_e2SR!|^oL$o?tcrt9MEWfP#E$g?1#)tP#KcMe*qd$EuAiM!(to-3c$I)g@2BWoh zc#kyNMK@;ifUEJ+F`T7gNtGto^!&4zbwLYg$(Xne_A?u~7AN>77?z(;-xY?+fv6iv zqvwv&L^qzYCCd73o6swz9e$(m=#4q5L`Mr=1$;2Ncdx4D!{Pd8ly0)m1GQ{tX|2*r zFsw}gb#YuydkOjN@9L=7(Vl(gh2LY4Jxt^ye*?$luHVn?3ta#01cBO%8kRO~Ub)Tq z!z=%b4&6GcX%MfsSw|NH($TweoYt_|c>tyY=AmK34%W}8dpO#Raa^} zg$_T~KesycKBcadXN*_8ocBM;1{>gG58TXt7t@NT&+rrx)X3d7A1_e5VWdN=hQ!6= zWN}Q8!f1*l4Q*-$WP2~5Wn3nr+#j*WIHQgtQWb zqJ{C4pc8Vk-oGZr@-GPyJFa6{w_|7C(Dx55BRd)joB~$S^uIegUznlMK^8qrz$0WA zB=p;wxBN)KkDU-y9zCIL`KQVt;37AlcGTCt=yj=x{Y>O{t3x^#*7#;uuYjFWq2ven z#P6XoDaX2baJ~eWbul_&W}iSTdiRGx*5DPYcc=)fYoV32?dm?bPpc$67OW+5(28Zj z6iY}nOShBaK0MJ_1%^n)^O>9n$5Cd{;RoVVXo9xjSIWj^vg2~O6Ga~oBOhvp`00d% zhiyfg_flU*U9bRL0DCOL-*Hn#tMQF=;R&d8^YF4)P|uCjy8N8fPbO19Br9no(2`ud zXmxD-ZzRq$tUc@fu2i(W;>EDBr=QX-(_vf>o@p5e;Hk_n_3Yvzma5CmnSh@7`b?2(AB>Z>8n5bKCv|o2_*y@v(cc2k zcF4GIC60alm0nc?y%9WvzZhJTzeV-6^bnKyo0K|Ohu64oKA#GPzE?TCfs4G(FJv&= z4kn^3U)c(eeCL~ja_^-Xdi#~IYc4>WENX-Kq2r=QwgjtsBiD%0-|dm$_34^JO#o89 zfF-@77hkysus_yUIKTf!lT7}G)O<+K+_WgA`ZBreCHMlDvb^gW()=`!{BWMJqR>W- z1AcY~yx<=JRutju^%imk0U2=vMs*%Rhu;H-%gg>ffUj@Iy;hsSf6Sppmi|8BKiVv( zjC#r!^cO>V!l%H)NBn1N0@EI(IFGXW@!;r-MeXyw1%mN`)yqo*?*`y|clPV21Kff9 zxQrNm)&%Op1NzbgW9>1<@nS0Tp&HY70eg}~ee%5&?45`97FIQI9YRTw^yr%8ArOE6&zH9 zI4v*)k3#I$Pe(m})pD}Q?oTHQCMseX#DpC^3Z*9mmM~@m2ndC-l98=kNfjgLL@bHY zy3prum$-%k-q)-9&PnF$_sz49BkPF1WFyI=V8nAEN~bQpqrOY`Glz?Iiu!R+lP=_Cr%v#(#OAk;MI{ik@lAF>#=ZAyhN;5#=#5b$fzD&zRVO zlpy|aJ3Q;ku|X5mvJruUW{S}94y*--@l-wy2YQzErD5|7gqk;=)jqsQ+OA=f-7;yQ ziUxgX3CeM

I43^tR9E#-Mc(*4)K2$ST@^P4oH+En;0OD+sS`9vIziFF>p94-T(F znllNb&eG}VHd%gG0|VvKi0$M zBzF`3VpNJ#to%by*K-x1Ib0|vL|XJnv}@t>(+_KM(C*q*Qp0G;`&33a8^;#eUs)U}rMz^6?LMsAS`j?(ph?Z!cNd_;T>$={MSGK&o z^QKV5A6DI3SGN8kfG=!q9`v4lAH1G%25@y=8Yx?oI+TbU(9GrV(mp;M=-F#qMdhvH z%CkVvtccHh<$=F+wYo4}PBNJ+GMSR>);{Vwz5+~->Q07dj){Xx1+!)u?CpH9~ zMS&jmosx#VmBb~TPic52)|9@zvd)q~cCMCfox18*ozFH-MaXu`;er z%-TeA*UM)wJqb3oGOjQyiBKFh^q+E=`29@1p%dKZW(mHo5$=jqJRko zJP22e<17D+?V#QLxJ_+q%ygbW(g#5s#30O&8ZUMYev7&^v=pMEG;8KhDO1!39$ar< zU@I3rp(>zw9`|@B1f$%CP3?KysRw@r#-S~8d8XQo(hX$zDdm5P25|fzas}TNby)GU zX?{ga9UI<@DhcxE=FFZ-3pLodphae}I@i^mnA8DY4d?b*=sdw7PNN`Bl|Y*L{HN{9 zR5mAbz*fuyj1TXC=1Vzp!dx0kLGv^Vn?{^8S~ghDU@+!YI-yQ4uP@40YG!~4U$O$> zinOy;({}Y&&DNX=l1}&8gE{h#FGL*p5W#bXI)w|6O4VH_5HPg-S((QcHH>Hl79~T* z)R+U9!lXzJ>;tR^Jn8xHe=&_VcdVft*@^A}Ap2?>DT4h{R_O!P;aQ z1C21;-xZB&)yEf6eq12`Vc4;AeH2$dXVu+!~ujwOv^98qq(2N`a zb>A4-_8sck5e=OrfMR@uM}Y_+%Yde1>AawIIZk3+YvQB>gsJmeQO3pT!nrUkcV9{8 z?JyVY995lLoM_dr6+yZ6x!2pxO(*2-2H+fYZ$deuYIw&C@@ZdLJBaLr_6tLTAzM(To{(pp zwJiG^ok$XK_zQZ8UtE1_zV7xAozIqoEn)Dt;{Jq361G0F3T6j?d?YKm^ zail47DGK!;Zz;@%-RJ^Iea_@0ZBskXJZ)C^rWOK?XrgFEDA00M18+!$GJTtPL59}9 z`9?PJ6Y9IEg5>o$AV%dt9?5mLKriqtFYwMU1hxD`8r%zWc;{wS;155F$D>#ehPWIk zVdj(twIJ10b2(;avQAA8b|9&P84_OWu67&X^~ zW;o`l7l5-%#llX{RP5)Ofg9Fxil&E!tRP!>+fHn_8Y3N4+oPZ=z$Ky|Vb&vpR z$ie`$++&J>9tmHv~#3|OAQ zA|WWQlz9C?O{mb9ex59BhoT(gWLVBg^1xm)bU`e&H%fx2)aW5D$udTgEgUKUP_47b z{716xJi#hjp6-lT12Hf_yUPO|x^1JuJ(Vz}pXHE3R6dl7fqhN+?}%!=KT@w}afVz$ zNkvfL)^UWIN@nJjMdtzPyauECsyg8MivNtd<>cqA+Le`yoO zYyPR9D^Kz}8dx$(J1YLQ2J8qnv9%J048S(Z&F8q4COznMophIUh##{g8u1gyW>6Sl zz{u$4;CG)xy-Xo&m2sB4HL!}`JeTK3(9s_(6{#*x!+t*W%Jizy+HuF&7Gk@L&W%ScH z9f)evOAl8ywR%nzt*cCQ(Vlyc#uE?8F|HE}r@*5~b|Y#|Vmn~AWe)vY_C=#)gGkq0 zB=q-7ZmJ9l2_H^PXH52#nO#7|SLsZkvi7lC!BXr$uO6`W%svtlt`Ue?HLF-?O+J>j zNK<7>w8Ez!&#)jb6fQ-UZr(Darqy}81|p3nX0wv9;N(h2?sZ1)CV3P@@u|Q$TbBa1 zkqdUrXk4-GH`;Y3KO&WxHxq2XRH3_9I2xN_uO8yqC~GQV*^C4*tvR7JDk)afDN*m{ zu;jIQwG$8mf}CV1bG+Y%a2!~3u^*Qh?qxF_7)AIV)I@}%?r3CAZ z4&axqTLJuRxu2c4eXC7NjuLl|4_g!{_k#)!79X+Hw67qdt#y#1zK~GVZ&vWggRFK5 z)vKezi{JJ!UUGuM+1U{0kZI{9dug(SNumat#D$d4aNkh!O3((^ z>ntLfp@=0Yu(M~YbQpq$bh|GGMaH!lp@KR<{QO=aHUY5!A>ZQ3AfOH8v!#0ZeL~yoPI`m=UDfvMt^(p1y~Gq^CGcK2HrQLPw{Zjobs9!ULPU z!UCoH`Ki33SQg(HW?S-R| z45^E*urJEhrDYSHBD9aLgx4m~b3Vrg3I=0aH>B@0xY542CcuQ+5oVBxA%mK+bJ|tt znH{-`d*X}fAQaQyKm0C24N=X+ySD*Ob=X6?l*EpO8Y*(MqUI1GCjbsv9WJoS3|Duk zqWBj1M@E}=y|(%gWf6(jUd;x0)(v+wGC6Nhx-O%qN%ju8-U}>?IB>=X*Cu9JaMk~+ z^qfB5y7UmFZsXEUtV#x~t&ujePur=DN`(!0R?7QVJCJ6G3>fAGZs|4rM+l|`W_cgJ zJ_Pd#Uw`khzDIUX7Um<0u{+F=2Kd?(to;z?Gf^jCnv?sOq&NN9ZFK(%WtGKAqdcu6aaK;d{DK_IM_j z$MhS9)s+haedFul9DkSV*Gyq@ zcz>Y^4POSco!y~9j)Wt%($ihKH1Tuz={Zt{=B?pl+jVN$^i(6po4M5WZ~nW&j-2Aq zQauN*A*oIop2vmHY)(sWDr@oUzhaGbeLRbghzcIPZE#aImM9#|UBlZ%wC_(}-=}Oa zY}zgLS7{*%L(92A=v>G|QJD>;?e(+VRly0qD;DSRqS5Big;kATmX17wQX2F%UPv0Y z1};39vVC)oEeAbQ-Fpcav&Xch-)73OU!#&=mrBz{4=F^gMy{sJZQd8FkJT?X%TG6z ztb;6HZlAr+_jE|Z$sLKw2ydU}c~7!YoHCR(WHd(0y;XQkHQJar`>i~4p+e4`TfWmh zF=f7An~#ZhZAVVpSsrhv`Nu`w?Mg3_1v&4+YmDO19ov+Mr&6Z)Z*-uBk{;gm>?DrJ{>;e{uY@ z$Pk)LzMfV-i|9hDL+6s%(>KfD>)Glntwg)`zyb~3l1IK@Mw)`FW4^_mwN1Nm!`Ud; zQ)=on9B=0#zhGpx+Ag(g2LkL5{UcY&%UoIQpF`BkI;c?|u@Tq{HYx)FsoAwn=n|uM zm0w=p<9p{pm5z09rJIjz#n2%sj0^jrXIwXLFRM_phmWPJ z-}3-h)hTl}M#tl+rK{Tk=+aYCF%HxDYQm6IO`ayaU&r)9ip(}~LDSzd&XOtP#V#=( z*pMd-r#a1il{r<CHNHo#I;au7$>0?G&aRT^x&= zbFRlnoen!)ek5dp;~DOj%CtN+f`>UX-|p>)In%vur_~uOzcpQ7iPDTdf=)?ELt>4KT~@DVe5T^v1%yOB*=yXy58s*iaDFZn7s491@qmwURlJ#;n0ycJp1lls#&!tn7!Uy>UNzk19vtgB&yu# zwR`OH7I1twa5_(f+#*Q!%y0 z#3k45LrO>9II_pG8p6BD*=M%Jvrt0o#DwRoQeQIR%YCJvX|=((`^?mr+QC=HMNfN7 zxOFWaT!RjMNXhrt!_3-;FA%{-A_f^mz2pzXA?7g**UL)X)?gfitVw%jXT&irFH`QZ zj41qxqx#LBH}|B%O+9t%~t^$FgH1Jx@+ie?*lBlQ7qyfJ|;nV&JBE6Jo+&#hWY z`_{Ia&B;!#FzaeGef`50s7>kIX2&t>pxl=7`desuisH^c3E#sKh$1n=&Yc80k@ z)t)zxA;;@w6wHk(#$;Qr3H83t^qd2oq1l@b=$7%9M)7VFnIVfI>#phsQ%Gl%3ra2n z6X82&c~tsVVw#N1s4j(Ako^^9TnN#6t9JpDt8MyJEqFGn~E3yO&S5 zF$Uj_D&a_&m<8w>3eHPZGx3P*P@478-_Z0ZZp; zX+2liXoAEiP`-4CBm*cA0s3Hoe(CVAKjL7BPw`L|b$?O?z>Ehh#{#X@;jswdp$xE4 zgdmgqao3 zHRqyPwIeawu+VV)4NpJ@S)kDPYL2^f+->`HC zu-w6K4T)HdxLozgdIo5_;%4s=TlA?|4cK7G+#CyE$1^_Si2$Dqcb3w4iT{b*6qqwd z5Ej@mrEQsmLlyZj`M)jjdk6xq#6@$Dq$D4R{GjqBm{WL*>?8B#GX;e66q$$TxHIJ< zn4?S=u@~Y~os%WOHiWar1>2Lw-j#)mn}R0H(S2mR$|7Uu94bmMIVGB%V`r3bzGYX- z6Ee@iwo5*h<>JmLc#1}s1m((tILo7SFO4|^I$Ruccv2R1H^g@1Q>>E(sLV;Ki&>g7 zSLVSs#5s%$of!=QwgR*40WbEbvxAE2pkeeukj?Qy4GmJ~UVr0q?%@9$p$ACiSMdf1 z0O*4P03iIoBJ{=<26{$zHue_Q|0`T{QH3?fI>qQ&bbHtySA38l`yGnSi8`*5z|kU` zbVy{C;^T5fF{`g;;rq0r$}3KCemR%r%E>N?Mm*EwY+;Ememcvqfpm^e^uRa=+7wTP z1%oA#)i0t?^)NExt0Sp%ai1I5xpkJE@&&o|xqa=ref{CSB|C?c}j-=@!E^N>cE)7 z%CtNPO~IX5#@Q}oL1z|nYRWD-nFO@TU7Ri)PGKpcRHPAW_P1q0rRgl>oc86{ln`gK za>=S_;UCPf6gfPh-bOLcP+cw$!D@d<)mV}$`^%xN;xfTNuZB-#Lw4&f0#aF_vly6Z zqs6*l4a{dht_52CH^Xqgg)*0qa#Iw!t<3Zvm5;t)#i2#D47+@S-C6Tb!lkeXloDcJ zw3ChQs5GrG4J5y~78%n3QyI)ptV#2C&U$$s3YVcdzhE_6v(6j?lt}s-{-X%K-cn0t zRwy{v5xPQQb*MvQF0!K>6-1KT1=)Jiv1~!R9?E4e`k_4|cdhRB;ur5c>HNoV`%3Mj z=$m(UTmUa%(jJ1`;LnqeJj3E!8$K6*rpDC)f0dZ&u?!R1gIX5SpQ^jw!UDpqdtVbg}N1kl~0? zNgbf6GeGdmCYfgzAKF1RUZ((<+6#ipIu0Queh5Kga64Sc&~E@cr*7^%rVU(l>(PG5 zCjr+YLbjP9yLw0YyiPw?5Iq_S`4%@8UV@xcn4n8?4q?1XQWrUr%GvHNQtBxYQ26}3 zG{LG^H)po zUX0)M!)(<_;Y%T&mv00>e1T8OM-j^s>&Ye4?TqiK@2xQzJoL?{lx=PC=N7ZNb3JbQ z6Y1^}vTAa_!^4n=^t<-y^5WTu{_H!>BAVx?{W%vHg|CF`>1q^Un&-26{A!va87eF& zZ?p(~CH;7rtJL+U?IZ0hEJqFfRMYS^H0CC!ws3w|&-dWcuInYI<^3SQ4IZVJvz_EO zNT`)o$GjWE`|~UYtv-gY?ptbm*6-XQ zCgnIv`+Y(#wkxas>(Xj?V##UO@8)BrSc|BXm&xaRVU0=7XTC!Lu0bxkdgpz=e6e!1 z=YBRoN>9MbZrk@fr1`F!&9OEv&P31m*XkGI`~5j7hp)o>k>y)2;MEqh)y?}>t^8%Oq@cJC@nujlo;lF#c(e|5ksuakw|&$ok&@AYEimbJEA zL~@LQO3s7*WVbg4+xEz&b=CoVtOnK%_S16+eZuZB{BZ5*hucp6FE%;7d4Z`X7BZ7t zUQegc+xu=z+zn46`{PP8lao^z=@jAGi`NhK^XWc`VCxhn=o=97{ZF@3vuAPPhFO_V zl8W2jvr+DTlg9R6h{{=}F58nBPXQ`=_}`RC2hPl`{PS@ zWr;U((;g{w=B}`kd1iq^z@OluwvNo!qE?E;^HcM3l*q*0x0<(4MQ-ia( zzn#M)LpldVhKkBV$!&69f8FJ$!o`Ll@5|mO92B#WN{6f7EbiOhH144fs?-r!>WO(t zcxiZvPKGcX*cR`9rSyMb|2w;1bN-w`{hQrYkpG{2j)}3I(f<_SxTyMBW2xczow~)| zW^;0UW|PWf-e`vXq-VWhcV1`KzYNP73T_TrE*8oFjlSgML|t5A&UsnS(s?6 zR==PU>eZfRrmC_4s}R+)4<@Rr@Q4m;O{mGFG-Jx7EI%do*HV{Zn!xB=oT$dDH5=|M zz~s7m>cP+ztH|7>0nPpOScC4iQjRlsaG53_6$Mn`! z$X@rBSBoo$9aWI!smVw^FLF?!0;%v=oLGhtp|YqLU~>_nvFNC9uT(y!!B8o_Xg0+h zVW=%c%RE-qQ9)%|m7lDLJ?FWAExIxHxHvZN*-+-l)S6vYc3qkK*Faps8f$q`uexZ? z8c8*xL1_`P$=Z*2Eg7{@cJl1VCF6*`z4*&CUOuFPuyaX7Yz{`&x zK12rDslxr&FmXU!+JGQ&K#{N_@=-ZSU*hlVOoJf)Kfl}Sp;FG#R&23?C>k;Igy0Gc zQjj&hMDE~AlUyn;`&9gF2}r_%OTL6?pq3~^RQX!&BS4s*X;T_osnsOZqQp-_Z1`$8N=i}^cr5IE&>~3oH!$6 zORcoH#z~~lHu8fT{)uOn{}mQ5vkbk}?n?pZ7o^jCmOEs0Fi1kX!EUH?s|}py>22yC z+5G822|~lbPixvUWVHMM6Xrp%K$8H~y<5}*0NSIZSu5-Ca2WJ#lcE5G7pYEQ0tX2E zV&P=*2tmxV;9jgV<~a+A^vRNiGkHoH6tO{EZp|V0CU|=YS7IUIgWYf&*}vPG1F|Ra zF%^jOkRn;oL8CH2gCU#70s3WZ(6)qsUN%lRy&o9IrK9v$PF6ntQpq!HA4}sfYZ2Ix zq4pKh!vz*jDiC82W0SY^iwdQ>;3G8*q@#?#-A+Hnh|-f*ouvQ;PTAwI0s$)w*;`3K zY?=$U&DiHq5w~3wuH)!-^xI$N3(+$GT;~7aa)ZQgol+sfK862yt>O#LL=Xd>bi7Is zfQY=riw9DRjI}e5Q}Yk=Ag^!2wCbaj#*02qnmcAmabm<EcAQJTEspQ+MimZo9x>*(vq{)9zlMxW0X7 zvK+%rj^_PbxaqBpj)YX6oHO;*cD>I@DfwM~Z;<_d3^%wBY$AJK4b4*adU}10M;*t| z5ao5bKfK>g-G1rZ47b z(0={I*zfns<*f4>W_N|M^?H4*RQL8!jc)G#-fE*dh%JOU9wHkhp)M(`P+QpR+wq8`b>UEzu=#x^%)axN7IO94+;B|yx_+dk23Hd&B0Nw)$FqNB$c8 zlK9d3BKpGSn^b!Tg5T97e|pCfTKEP0?*z5*Y0(EK0RR}|1ppxWzfQjFD5j?&Vfb&*(+o9TSa%9oE@6vacOIj5KyY7NXm3Cq!?wZdbNgr zZAiiH@$r`A^S43?otfvy6SPdTGq8rt=|kD{fZ0iR0+JCUir)N82Vo3ZF828ZCw*xg z%?!W%+hz4Yz>q}E@+3#UCeuw&7z&tCvBxkiXYbvz z9;uKmo4l`rE5vlX=C#A-=cbEkoBHX4ms$my@#ph#84RP)gk`wAZMuWVU&40Ub}IDB zA`OI|D-RuuXW0}OM37;>Y9~|24h3tN@+j>}biIb5br>6@ZF8Hr=ruX#f%%~AM%($K zW-xQvdc7PSyE%iz7({WsoOz&H3d$CPP!K`npxBTPvPr7gHJJHd>0p5>HrZ{I5WWSiq~JBlFo zyo|b5U!*CGozhz?d4W}d?W(Dl{a`3lDLz+2wMpZG0aDbvh&vLFFpNsPm&z(6l5w2^ zH5b(S@Tu5IJFKy47_@EEchyxMt?JbWHz8L2qkK^)6&UYXJ>bRX8aDDq7 z^=TW#X|DFIND_JBG8BZDGE(w50m`^5q^i^);1t#3Vbkyds9D~CkcpMD&hH=1C1w<% z_3|0=r-K=wgAm~`pV5zjPxNN8 z@=9yr(XUBgep9XnwboXjVl;TxgfRDkUqv2TohO98Yy2gGA(VPaqzs1@#XPj)O->@_VuXDT$cw>7?8EGh^n!atCCA@ ze-m;H;s@~T%<<;&Pb##hR}2v%f)ntC^l_H)&kAF3cO6F!hmmEpYNodwro2)+i69JV z+RHyU+UIlp4cm#S?*1REjPOsKPUM2LFio88T^=_QtEqA4JnG499Pd0q zuur;w<*Amd%x?(aRB4KC-!8pc5fy&cnh7budSy}0bKH=aU>F9+YdV5%bg0%j#Hds^ zlTvo+Qk@An)#1!`z}Z0BK1JTxB8WFXU|XX-erM*~T&IryfEDuhy}?h-X!iX+0s&fZ z|fv ztTPF*#Pk**)9tV`Jg^px&^!i2V;Fw5O|y$cE7^z!X<)Q6 zIahVUx`dHt?4;BLWQ7h5;>QPVb(91tywrri40a+eC5E~|=V5hi5=>L4Z*>%Sxe0=u zuFxP@Hn^oh+y)rW8DX0HV?%mU1Np=rA}%m)b-*rAHH?CVr=AB7h0t$A2R>k?Nfd+9 z_`3Ua`g)UyyE0~L8|(@NhnUkhTyRS&cJSeh16Pysi=qAgfX z7+ntSb8>oT!5#js9bE~F#^SDy8m=BHbw*l#caN5vNVp&SJIV?ogSup|4CTzt-X1@1 zo>mz#M8{u~5l|FrO#!TG&%a1Q-u04BtrI}bjqkvN%hu;VN)u-j%&>yBa6Q!8(9r*- z%2(#cHD>p7n);h~V}F^uRaBnooTerHnD)xu0*Q^q5@C^9ATx#*)#qvrkcmYWvxyYt zZp}o_nuUaQmd9wIq`qdPwU^Y7sFDW1xBDjr_@k_#*vF02ufcO2w77a!7UtzDjbK zW*6?}@RS!lPFE-BiCH{O(W&HCa&9GeYVdVSpm81Ex>3CfMUA!4LVZ6&&A*uJjlO71 zTv??-!twy2fI0|b2k@C!V5p^eiR;o_b$&>E3I)KLr9I*YW?TmUF62`)?}ueR8H5k) zoExCen3x{;lZ(`R5o2{)O?tK z&+F$rTVT{2o5z(wK0Fo@le7!+fbur;SW0q-hD40F>g$p#`KUPb{*Bcp0S>!B7}hqgYX)b0v01keMa<^{7m!zolrlPmW5*~5ZCK~< zi8;7l$z|?qjChx6#|`p3Hv@pg4X- zAedoo=o}M2?WW7W#{X2G47lzepwvic$K`hNxJKrB@yp6B8ez1Z5+@o%JU{&svtHGc z)jL6PQLMF(%D+m&>^o($?$339E`K|&F~^S0f%M3oYsU$C#pU$Vuw`6?d&{ee#Y0;> z8ypStM(|W&1Vp&US732I=%O6>c%N_nG-alWc;s-GGmQNKTr--x@gL0%Tw7e@6fX$% ztq_A|`;vv)$MwQ|W+#Cv)tk*VJzzTp$KD= z(cdBn8N7+Yt~=ytDBS!6{Kh8s%x|r{YA;4rk}Xvxri2IuSmvj!jq1)SL$!g@bQUQi zY*-J;A?o7@oty#o#7b@y0vukRjgRt57WfC^K^P%^jUkRBo(FW}Fwhn3Uz>6BHamow zeKiig_mmMRf~?Ru@;5dMVeHY%8g+hN@lZQWNdjSmed!p8AD94rE;`egM<1y>J?V^M zb%{G5@bGW=z8P4eB#Jd5o?$|py$^=bn6jWxHdt~#xBkIHoKafg7i64DzAT_BFUAQe z>6Cg?wGt|Sr|1${LT4$3+x+x7DGTE-+;cK@LVQzKUxEQ$0IS4zVB=J2a2*!1ykA((uGn7-vmKdIT9PnId)>?rNNq8HM|Vj1t#I1n=K&CO2n=p%4hJk0wnv#5Y^e40Z`Fr%JZa

=>}4`LY7CC3KVHt#D5`4hLDbcw~ViVH6~2w*vN`Vy7~uPkTDUr z2Q8mpnt~m)eVdP4-W^;ToY6ORfNZa06mz1@&Sf9eF`la_n34xK*`5 zflA$xlZEfP;OB4%S4+d|u!t48c4KK9y8(vsF{Yn%@f}7bnx#Lg=#4v|Pi>h)v`w1t zY1apK^}*q5e-!tqE^aXrv?d&=co2h7>5mcjR_9K}!Y?MmjF)kaLNyte8-^s4VIh}` zt4erzL*gk~nT(ZL6-j64?E*#G%T$vAnhxd%eTc%|m69N{HtZg>Bv`YS?q-^+-3>t&58UCIgrw78_L!;@773>$M8u>wvqm@hgxZ!z7zyI<4#*ndvbcsDNkGk$3 z;-;(Th!Oc(|1x68x)ie`kl?NFpcalSz8WMnq_C)b+f9va4umw_xn(Y7`r@FEBUw`O zK~7JOfF`B-M@@ZJWEFfq7w^5)JhQ1uHx?*-D4`rmQ>rW{LX-h55m#dGy@e zQ>gK#h2kuRg?URG@JA6fqdx9H&0!Z3YWV|!Y8UN$F;->=1h8A-2V6y0_tlS7MVDsy z?MkttHd)`J8~7YL`U9oDi!ZPX^9b_|CJ=zXb>XK~VBLOfHvE~4dK*>c4y|#YJG2wX z5bCV3?kiSuzuB%REAj>R*_p{us9U_&D_Q^BQr<&U-}=4QyAkVCQF}`r_g#sCswSe` z6;-Alzt&ke92*h)yo&O9CJX&PuWncA%IKB{@e|~o_lXo;I|TFTtIx=Z1LUl+vf7K{Vn8NXlXER;R3vR{UHqZlPKR-|1}fyPOQj) z{GLtO@@v#+JM-fAhyOxq($J@?-uD5hY1oab- z-Z%%~D~{(SPXCuUvx1XmV-+Q+8r9|$KBYSuNOK|9M#j5~ z%WQghk@hopQ||}3C-LhbDScT;@Jm&AdTFR_{&4*6u2o*k{bCmkJXS07OchyO3ub4h z(5v8Ew4>}>`6G?-{=cp{A2o1wl+~9!1GKgvuC@m9kGiTFlUh@U1XO{dSi(drf|nX` zJ0M8~WUXihOl0H(zVXRFMQes}k)kMEti*`Wlo8~Vl$5N7hScgi?ObknM7y6q-n#}f z*C>m+e^nuu|U_as`HC(z?jL!H>jVGqD?!S)?uKbIKdYB z`xhh&sN&08`!Kwq!5U>WEv`(HRX@gX+ZUkXa1`O;`#;vBJ|$ShH1mlV|3MK%b>0%>tQCOI3gU@1e!Qf&_ADZ2%pl}huID)O?@^Rkrr zqWC$LLuecaE+RytU!~_6`n)0Fiz(&XPOB(3D-5jF|Tqoz|5UmBD>Vc3SXiqD|Tus znz`^?edYEQ!R|5p*ap{~J0Ty%^0En4pF2S-x8ia8sm^(z)5UoM$F*F*f|2_N(gJkN zlO98DgHWJXSf{mkfQWB;G|8+$)__@%k=v!MhqmX-1-O!*5dvI?H0R`!0tD7jg61bQ z=qGoBu0!1R4th~NGe`>fxwjS0JNHWUMI+*0NY(k5_rN_F6ijW~pZN9AGXFWNgk*_# zi4UF}3+~cC)I)Iw^=5~-qGbzmP8Rckq68l&h`(|TbyI=0ql2}pCFHxY86l}Ej8KGYe;aM{_?_|op z4#-+Qwlz(S$|37YkbRZJO}T_+%)u{GU3J|#!Rh0sSmfHNdhLqSOla=u{#B9heRse< zbf48o{l4eef*DvTp$Q~aijoYjN#;)jmpB9EnM9;1W_8?;duX%3p*iFfOPCub5LPQJj@LKnl13 zx^_kwM>{8b@2WtyDq?=?zBD%uV%)dAiog8wnNsINHZW3EXj)&;M8 zH;MAq0Q`mIpLTzixDi@WYQktl8w4uSRhS><|3-zM3NEEq;I9UX{077FRDv8LJ15$} zPeyJ7*4Rn}9H+ktY3Ycn3@ZyJpjgXE3H*KI0QDDMfFw|C73GO+Z}CfK)hQl|iKS@p zlYv|sCY4fu5=ww|O}`LhFG!#9l?BpZguW3S+*Z{eH;0Z5kQFG;f<}pbd9C$L>Q=BxOc2_2 z_a}aVaeYrfX*@8AIFN6AjLb%O7HE1wRAVQrb#ri%9EpqLw``I@jePz<4}xq|yMP)6 zYl2|ILBUIab=on4mk`#nHK`nGo~4&7qE2NQua=K${={YeALTi!yAwj z=MBz|2UrwtD5#&bKtldYx4Dnaa#TP~n~U~B^HW|q6lS1g+JFTe~F(*wlKx%h71);#S!HAA@J!Md~1P9hL_)*+kSO zB8QD<{0#m3F;PX-2IL4+Is`U6XM_Hp3n&`e0ycba>j2VQ_yB&LDd_rD?1Xi9_S`Bn zA_4?;t1N3$5DguWo>`EN``77zs!^l3_FC#R5V0+(3d1%k9_zGpJfddi1?ktT0%Ka+ zD(s+TwqRQ~0-JQ1kw73JSJV&38 zaIAQW#6Y|C$}srRa{4a{w)1hT+5)0m&#qMyY)L54K-k&(Smb!Npw2n`4<-c&=QK8O@cm zO1)%;fht*~)^J)$Z;J zjb;ui=d@Cb%Nt;mas3q=@vR;=auTM?tc&t^Q#Pvih%V`lRC1MWv`!pQAH7a#+)0N4%$$#2Vl6MX@A9-!?{ zj4y|eTgDqNKxEfAtWlgcG zIS9_sLnY5<`bjJAHu4H^QQn`B;cJ(VFrt(rUn1o2;dp^J47oVAyEwEKx4zK!(b!GS z=YK!reUX06FM|ta@dR$=jWt7m-b2Lf6|-;_XX<$Is1AF=E;B>K>`lDra1#75&(Am|oS45JwP9*$-T3FWDkS#YuPnj} zOl=dMiG_E{@Jo!}iRFvCrUm?j=|`!irI!2zD6|*hjEU6kfYYiqTxk~Un9r?K=n+>LKCuu7e z%Yz>C7wyO?;PuL#SK9_}M(zHq1^uhl8!+aNIL;1+5rNvKTCc?gdrIYX{UYWM_9W`{ zxMIu?US_85$`#AMr%77Ur*LyRHLf$t7EiYdyw03C0kKY!R$6^4(0}H8h4^JKaC3r0 zYg|P=>Dyx~t9zw(tCahkJ;IdCcJnHDcVqV*YQjVpHk6q>uv$u$e!|D6VP+|MYcKA9 z4L`Y#Q>1s|-}jSum9IT6YAZ#3gZ8}|{?P+JIA+2J ziT+R}ExuuzACYBb-`o}_;F-C6#60^i&KgPof|UBcW=WRnI)~U^eXF#s6LslkzeAZwvw3-+M<<4ZRyaK5C zSY5|rP{iPZ(BvvG74P<#6fkjO-)k@U_P1wR!Si{izr3HOJdc8R=k|xN%w9F>XM2@O z&!~7d*2*|f=H^XhbN9J8E6cpO^x^{NbvtAZoR_uwCU@?a#UmKz_32w zw4COfG}YoRg5%3n!_jWDUD`rO`C5v~5P4qum3 z<1E!n{aoRY%iD(f==*)no3&jT+HLdlzD0laJLf7{EUUj|v%9bO_~~$=Yn8p!csW_Q zY=?HUxv*(CYROr-{K*x%mKPAoEkEkS2m*qp>w?PjrLhutnIfSj;nV%{zV13o zT*-D^oE<7#YO?^RD= z=c64d|ESiYB+Tc4b-i!cU zq-)EiCGD|FO(5d%UhR@qmD-xQ4I4=i`i_j;9M#Ixw&s0xQC(Zi!61U}z1?vruv4xC z#CwwZgWX&<@8&j&t)ouLU;;U-c-#J=1xPj^!spNA@ z)3xgQc}MD`ZYZT@86_FXrRfx>>jg9%lO*ef#G8;3+oBEu0Q6no3$+^Kv z(4}{X*biz0FEc0hfu;;cmmX51yydawyS&w6lmx$_dn{Rx3tZ|vrv2{rIBgL}>sI;X zgqPdE)9pPKREa!Ly1v8x$E~rg_-Bv?l#Nf%6!$yGLGUmzm%vW73(LX&tI{-MKdXDD zOMS`g>a76YtjeQ-V#)Ierx~s3hvzfepTh8p^t{^bhFYoQVN@)@?k9atCPZM z)RE_@G}HM)Z;8@Q(eNi2nC^q`^?tiVj}4zDGgreC{#?y$#>%YIHJ8u0aRX!)Jlamx ztYz1<>mT4!lXJR(;hS?lr{NWI4&idr?iy-!>WYzNF?%s(b6vGEa|&%u%2&md*UY5B z71rFS#`W6t??%?#_zLn^-8OTzJhNFOEt?mXk-Z}Y$H39Oh;7W>eR*ZV-3@=tf9elpn z`8=w9y=?|}c<67+l)IC~r{97cz?csI^x8*ETI!bJJ`x5j!m#->6?%c3e=jY2TWgf= z3(;8mpvy0WA?HU1W5E3_^UQn5#guCZR`*SQ3%?Ym+XrR!I+yHk~$^#A!9pp6snt%@zfe(f;$b6w}&(csl=p+Lst$qqpAM!BiIUD z=`|$_5B)Q0z>qC=yKl^&7j9sz zZ_cRJn>4*^+xYn@wiXEVWfLGv!c1sv4gz`m>C3oE&&#m5G+vvOH*I-q7}F}^77=xg z{i-+o8U||^X&&yUZ>QzI%s0OOzO?oPW6zV})cgthxn3@vmXVH>wFly&i3jI4Cb?HZYl=EdKpvGnNJ42mSKl2K8S61OSw zIMw#>wI_*r#CZ#fBxJJTlFxoDXUp0$n2*jFEC2=}$a6cTmrJ2QKbEaYIIT>ewNN4nVR4Pfr}1c zxuKigqm)9 zIqZ11!=HD)|LXcj#2g4{M?l{oesIzb8NE?=N8Ijov_tatPK61dBhc?jWy0w!Br_3_ zEhIMLU55ne&&8{V!$Fd2j&v4sVDNb)=#WHrh=O5Ag~r4gBcsWX$Apo@48@@`q#4CY zGek?0;KyMYqozpl65_@UA5h0he@V5*SsRj$5_OD`*QI?4FULF^BCANL#$Jsn0&00V zBxm9qkFY5SPRBtV6SPUy##tP5wn$Zuc-sXZ#Cg|a^!6~nLhXaeXx}hHQB z5k~EMe&vcBw)_D8%Te(Q)7Nss0|1z#0|1cze>f@+V`uCCqEtTW@XjdDIeF_DS3ka4 zFw%@tKm^1>to*5UfHb?aBMtWvCk(B;x;vi?y}Pfk-;eJ< zIR>4dV3`;thHXr1^~FMRa`Z5GhJQ#EYRZWaX{s#8g&24+VoS&vBmP!!RKeoOj4Gq3 zJ=>EO#-W~OIC&Cgh+K#hqQ;UiqeKi(Y3Z&yR?zV1CQBPXThE(wVn`KMkxY;&7Fqa8 z%a~e6H2E-B%LqRuYE+&_=hBQ{#+E1*MX-aT;T&ERSZgk}M~IKWVMjPanKe0&EX7j% z1x_%Qbs1Wk7pEcf^8zUGg; zG^$$7U1qr(&Yh0wlNB$wq~@}k5hwtdYrZ1C;CSCf2E`gI-Szc68Qc`Fc4MuV4 zU#4&^dncg8G%k+ft@KQtrU;L=$f%HX5uCXYWn}5-dOKI1rJ>XHZ@(_1gp-Qo1wxn_ zi8{QLpp6}88Ba_Z2vA)kQplMnvsH$6tQh~I(Q45?T`Zz#U-P1CRWyDoBYOGL%*^7Y z)ioo`T2+3p%~<%YGGn@D{qpd+7_qc9uT3dQMT0FP>ZtKtJ3)?LFEWc-WggzrpqY_! zN=~?c0h57lN0L1$j}0TlzJm`Z8tfvq;1PclhPr18SNWZ=#Q}9PSjuy z!zH|c079Efg-oK#FeVyHGD82gX=iM0LN|V8KgfEs3cLAN6UmSm35mm~LG8wjX+%LQ zqfyzI>@=LPzCEH*nNgNK*&tFs6g>ndIEkMo!TpdK`}2F0V?atwI5gRD1S2dP(S%`a zy^T@SFwS=d3q4?*J?=;%fyY1_1o@beONnsOUd%_Bkgf-cI@k#r9bD`$cFOu97$xWm zD500AN2=B&kCwxZ#(Ry^eqOdp;F&gYGG}E7qgf0-he`i%E*U{a(pnVp-#*|7V&SvK zn##AES+%_HI3a0oY1UY61)a=IC zU`B*k5~TX~ScFjaG$7fups7FRxQ4P!)D`%&_N%bv4DuYc5L74yNb=ArsK%1%x7xJS z`t#nIjhyml@&^x374cCEj|ecz#|X23%oBvTcQ?9W^O^*)Re-HQT7HkW=li zv*q;|q=Q8ognLTK@eD<$#*Fggl4qq5My00p)$vc_5LVxXWQycK!4+; zO}C`@hqPUueF?~PQ8OQR?g9`oR)9#yfLVeu;V?#JLTUDbCexyzl1t#x@WAB@paViQ z`uo5IR-D}l_<{$aDdafwzZJAqoR>KW2v4;;fSjbj+o}qlvYg5cDl^Ve?o|bVPoZFV ztH3m2@;~|4PLnvB2Ig4WwrpxybOy`v=gho`pD_=Wp2Cu{wF)uY)ha>6MHqSy@sVj@o1jg&z zz+~DeJVu#@p3SJmZCatLDhX=AxiOEa2|DvHxW;elQCaJQWK7^{>)gRRE6S#MjU_NB?tSjoQ&aXB7Y z?+^mgkB;M-c&;NN9bWr*@%ID=9Xf$IPM;AA1e^fNb34?~HaJmrlJm>!^3!DE;@bE_WfkNz8f71on9VFPop{=Ycp)R+&@KFs2q;mD%6%j*M$ z*CR0LYW-iass?EmJu+())d3{Hp|TMUE|P?D$VR@6;om3FvnD;OZHyRC*Hc<+{e?Qd z23*Z`n`hv~MSI)NPNCEK0%ph*{98g`8`dwac_74`|Io={&s(snSKHd>o#Imw`x_>3 zNXnr2X8}PUW)eVJS#uTnT+A77pV!4z2e3XII1Z~lSnoAg_CJCP`_$!+h-&CQL=QnA zl0*nBdNEq_#1HU|=5YaF0MDacD zj&0kvZQITl8$185Iv4xYf3&~|ZZ4MhQ9Qq#6PQ%hM=3Tqj(Q3a|#)h&nW#|t|O zU$sMgxFZ^Gx{=*u9@F!oRgN`9v2~}=um`#q7I${Cr+)rjaFvjDt@qV^7xVO}x7xWU z@IDLAGjcqr9AxmHyMHg9j!eWVVw;zj!DRv67Hf`XzCEs;$MGY4uTAgCTrM`XDxZV5 zBbYv}D@o*tTiaf*>i0*XEk4TLduB=(Ac)#NZ~kf_)8LQem_?(ZHOk#eo#*q@S39i| z-@7GuJ}wh7Z|3)#4KquNJztZ!2_>@N z)tL{SUlZhq2MMr{1Yfglb~DWW#R)TdsoS6HCEfU-2{T^nT>B_ZPUncji`~4_g@}gxx>id^X!@#F5@yj-85r zd!O^!E(c#@;jIRqul+k2J#?GO?#9b&#(#gr*5|c4(3z0$cnz{$+PDQ>F3rW8XY*7PGr-mC$gB|J=w4&GD$+uNod>OZhy$ zh6X%I?+@;+yp%j0v8Tv;p36*5xI6%yw`E!XJz;v4BAGso`E-;coaNF(b4aZGrY1`?sO#ITSl4G)*SKcK zL#XYW^Q-fvZl4uh_N46|$~qix97a3xJwss->1xhYymt3KsIR~(6Ce-v#iKi`%cmtQ zx^mK`MlPde2S39snwWec**?S8=r=FtbX;-WL(qC*Y}rA$awlaRZ>nT5ah-JnZ@dMQ zZ<_VE;)>&G7-%%0G1 z*q-2>KFk}!8^T-27a=d`Z)o2jzMsjP{AK1v>4WdZ_tq?%2oaEWfy`5s%@liR?>^3h zPS64Ao|(<@!u3MuMc$3;i)mID5Q3pVD1@c&NVDmB-(aIio?EJ#2= zg#UH=b~JLZ`0Mol03K?vf6-{oH(AWp)fUDVp&%M{)#q zNHX%VO_S4aoQ08lK_)49bJl_}Y3ZKe0xWH<$HF`j${(e>VVbbCoG>kradp443m$ON z)Ru-0Rd}?ia{)*%8I2f)6^#^~15~5QEp%_C$iiBXDvF#%T@(XmZ+(4!B57kHL|K!% zS3rT!W>E4t^0)})iMXlzx*F#{1={3kN1WZ`eWag?>cX<7_ewLiN98#+ezP#C(V8;R zk)(-DqI)N6n&e;}uLE)$YQQYz4F(Ms8Ot<~v4na`R33Q~H>6iLNsbh0G|G50r?X9D z@k!G^L4tHYSfzE^M9M%(SQk(#%F6tAk?wpy+Ny5l&M#B&>WG?QS7_bH9&rT@)zQId z<3Fg;C5FMBWN&oIFdp&&GJH+}29i6W_?W3}vCvK&aicKByQSp`>Scz)Or#})V6YU* z{ECEzA@6a1$}IX+vI6o7{7WLv{HmexWt`{|B9F&$O7fkq1=x=^3eM{3$mRB7rZEXG zmgw%K){x{kO;r1fP{ifH#@F_eR5$fF$inwPB{vg$TD$1zV^rkj1cl=SY-R+Gl0wII z9N`C#vEHE-!ZX&(Z)C-0w>^fD`mGZI(np~WU;a;6{N z^|OGiCo#08BYz~Sp^7KZNsW?!1*h7u0-VMw_5{KKR6(E!9C&denx4jBY3hF^xlUVZ z#hjNS(CD#@*qXL~2k{^F8ADwFBx%LaM$m|D(X(x~tDRH8~63t6U0rHfK zpi_`fs=egj13pyKA=(`sy9Qor!qeuKeGCf6WKUz}!Kv_&%g1g-_>3JuSi?_s)i7^U z4us_bVM=curksR$&Y#`RAM_kb+tuZ0RLjn?lB?e7SOI8S?2Bwjg-kyu2t>HA!~ubn z1c3vT%thr>n@}PqgJea9eR&|um+A&-A(ZgfrFq@n8|c6OC4Ax zFRg=rrL`v8uAThUB1vh=!%kg(PY+7z2ZBjJ5kvOso}<;iS4hkV0gd_rFEfRmC#hdP z-GMl;Qg#A0oCI4o#e_O_j_y;j)LQw75UiV_7P@O*t&Ds&!nAfLg3zgix_Fc+_ziX27**G#~HTCUf{sQRATE zPe-4E<{_T|ubvLZE@PH;#D)vKNe(!epDZ0#}~;R5H~OdUf=+ z%-zBB=P!|h>`d3bFrn4}o3K%+X^p!;b*45i30&BMpw@(Q^&UEuc9bR9iFS?d5bk;t z#nFfh8Sr7fi!|l9G=&d!JERgkAPX~z5mL9JjyE(%CY%uHvoNqUa`bxUfzOuhLIAJS zw+;>d9xVjH8Uk4mA!OLyhsdB?Eg=OeSu*Y|Mr{4l1d|5WN@r5%D+`kie6CMP(it^m zCA|rs4m{iFdWT=CjievAhwMd=T=>3UticQj8E%9XdIBR-hl+%nD+u8N`M_!S$s)V< zH#ayHsZBuabC=J>f0PH?s%ra(oVW@4-YFO?hT|W*g;ZS3XLYP4mTZx2Xmvt z;;^!*fT^;gNzn}1jQFP!_X%rWBv*q+pc^-2E4t9Ze_AWV{r!xiP@?a^;U!B0=FD$k zKF+V2^}*l`lr43-FR2Sw9U2PQo*y9>C0Fqm1{5f=7662*;Ru~SN#rSDs>8qB+Vi}l z_qf8}(mF$gKk|xw(KURE9$eFOLtdKNTH2Yv{ax&*VM@xT@JEJ@T&K zSF?EP_{3=XKtoB{D3ms*#=d)7A9;G3$USyyfn5#lwoBymjJ|(Uj|7vFa9h-@ue7;x zrWt;f+vwm4aL+@U2kbBpY}saN`M+z{_gOD&mB| z_xE?#nx|>o-;LG`&y(`iHoNXMdmpL&6t-(FkH;E_4SMdax~=b*48)?Q_32?g+mExE zfC}53vb4J&AvfQZ@8uwTlO5)EHbD*N`qLs}cDKOx>#=7E0H14cD@XhJY#>|+u&Jhl z{N1njHt=kA!RPUOD^r4_GxE--)Ae3Nucq3QX|?5Z!`XCK-Q!aLnDxihd#QI>cfWsc z>2mN8BU-udjmLxuRC?cOeQ!`3>+xA|{@Ao}tzKTO9~^IciFRopfa7qs|3Y~sc;OukuXP9I}?nhrHUZ*+(~TlHq^baWy9cxcaC9HLhryvWFG8U*Ut$FpMhzNvrSr0culURjt@Pq*)W;V zAuZdX`LBYU@HH}vxhz@?yL_K$MqLT=&>5MAnGZ(=akD6Jl*TCQ>P!WsWJsqd(8EhD z7Jonao!oBDZ_t$1iW?yk`K_#!&mLN+uI@!&+P|y{0&$mZGYv7K$l=SX+Nv~e!8yGS zBs-0ywU}Mz{vO=K*@V?t*Z`D1x|bzjIlS3`PnD!4ayeGIdtEbx%3KHK`2 z(4mzvYFoB7nI1VYi71M3ru09`{DdlF+%=hN;^zd&F`>f^V?y_!)I&xGKW>3+k{xof zQRYL9J3RMn=>)=rl+lSpl{*;stV|LBn#28HWEK2jG@kDIZGv1JscQ0>b^T z8);`-3#b1j>O`q&xT2~xtF}ol*Ud7Tr1VPr!zhmaLK|;}!ulz=_@RhHF~+sUPbg|& zh*K3w8jcGKLYU=C%%2tR!#Rb7`b>DY*|}C}FSi5S+W@c78eOV<&!0~mq=;Cj7fqE^ zR}CkhSKiGr+&rv!kvy*pgL{E;_|~4$>XqM3>e6Rg2OZ2YdJbGunp||eo}G?m^(*E} z=eA>3%k)ELSsuH^BzkLB)63;!Ln}YTMt@u z$ndrH1>24(UQW6>`2)no9673roGwl{f5=1>Fn*6V$m^V)Q1fxyc*ZVQabb- z1Yv>{-egu8>sgT;R&EPe8J%8fW$>+Rg>V_Ng~1X9pSNrgk#6Qt%LHEe*i^2Pkm1s; z&Ow*JMh$C4mkj`K^09`U5MwAF&~=B5iMZvOj$WhCaVc8jon<1XjBxA6&d@loOadEK)9-7cv|wGw}qenwwQMAw&rQNhT&fu&dX6&7!1z zdS0V*?@i_TPd@P(I?J;mZ1tw05Q@s77ycZys$lCxRwX#;PL<1f%n$2R)zTl8g+)*} z=hv)OO{HS{UZ!J_+e8hk-{ie*Tq4kKs@NN)=gbH#i9e=Nm|i7I4xj#OvJKMgF_Y~{ zFO_OFiz||AA||++-CUiX!#I>%4t`*4I)rOCk0L7ZwlTg`wP+GxD21PBRm%HK#;KMulbD+v5Y{iMV?day0 z#WmXy?yb_diFoKJB-}Lv)hJ(aT6qb=VI8Xh%d@~XQNUAHZBxoi&$Aw>T>P)SFeQ@k zHS>|*vRfwmy=Cp;r!+nW)vDQ2zafcSezh^Np9t?5^TaQ)Dz*z3P)dwA_DlO7c4mg5 zN8zX~vn`=L3cmq0vdV9p7=W;)l6M`o$=+-CW?}EnP__(Bssk3SmQ0~7kRrBVO0!J0 z#>8&5Z^~k=%mI=1x83&W_|Paw;`B4lZ01LK6nS8qkJOSR-O8EG3)IjOiCvd}Hb{~p zjoR5JtO{;tZNx#x>|8-u5YW9X)|0n>{RIsvMV@wVS)oi>pN={QQFe=gl1FX_nnRgd z(}&+aXMOnd@y8fMYC>F~bAQ^Y9L}-(uo)-G7`6XIyXS0qC9`5lpfd~&;uaw~1HLQ^HMpbiIFM}GHfab8 z#sbP)@lj;tXjEN~2)a9%sC#IAH)O1_4X>TL@itmCAPd5budj!HJEyknsmCAXee7#? zRMbY%VTu@|6Qja=T)1@R2Zft|b~EQJwvqPSh4w}aOi?4+**n}qDx+lyYyPuUJUg(& z_=zXBE01~2SS@ffsxpk8mma(iliNoR8Y22&L=HWqe!$*(J`i(HL=c?w1a8O-UA8!B zO@$O=9B4U&F2&FGZ@6g5Y@mklB-J%onv++iR$00*ny!lI#L1G-cwaWQ0~#%5OBsy> zmzdiYhDW!z)&*o(9jez?rrSj48W+wx(P2wa+tV&e*{%r8YZq&Wq^XP5In*-h7a-7v zX?gW2UCeo_smS|LUosJD@Rkx)*`uGS_a3wRd{f7rQ`H7=N1V0VkiX`cQ}I}m^c?sq zBnIBrR_#8xjqSd3d_QXLDs&o`bU$;(?aI_A>9YN%ELcBS4YGSfdGS}iv6XxfeFHz@ zsz35F-VB;nK^D+iKR#P6D5Vj8llMBIQVD?oHu zu*7WK+=Oi0-$FAoH^&s&8A8b-8h}VjYgj+z`gf!j3rxG*sAtm2oin9x;t_xVYAt>^ z*22*mf_<}4rj3eN(3NMebbHIbU-pKe3!l?T?gm;rq2C9PKIkiC>MPgjbI7GY?cr^E zmmcJVkUqAsUNDVV%x?T_TKoa_qv@#2KW~pynydp2;i|GgP8zdbF${&3tcOl|WA{@0 zKj_T`J1jP!Djry8Q&r&&&X$PnPna*Y)CP*#F2HzpGG3Vzu+`y?WT%eYyX-KSbVZ7ogBtYI+ zkl1WR;V*lLUL69%`RVNq38Z34tp;U#(+vL+FeZswVh)+*!Kwee`9$)fFO0K^8taKQ zx$K7f2N@VkM$@cO&T<@|yl*%AC!2xR*dW{T3kznpA2h}${Nfcze-*zTRQ|QKs#Z*~ z9e#wi8*w2DP|&@`JoHRu9m`W5bI%xjB?2yJDkLRjVC2yLs}@xJx;qY^8wT?^)H>k7 z|G~V4MQ(InI$}PmfJF>b%27PoR6FswE)GBBk}=5jS5XF0F)`3W0g8x7cs|i&4((s9 zE5ms*u^`@b*jQq}F%UkA8H8cyFB&-MpiAgdAC%Y3%?ZS^G7BqyTya8WMx7;V*H!`% z!)pOmAs0B)5_1Z4U-H&{0_~jyoRO`mI@gyG+{qP zB{wJG7RD3LsDX5U#oP^9{=r7pK_j1V80>O7M*vB;8qxE-Nc84J{*GTq08R_X zQa4t;5(8WC%E$YV+F};rQeQ(UhVbTVqlY<`y8|eX>~I5on~|gmv;uUIOAyNO zl;|eBx;3my4a_I}8ov=y=PQK)%%?OPskbX?lNYaEtqje9&@@DH$GKU`3zMNh;SHyc zX6^ioMZZQ#9K$zh)5m!}vw;c!<(R z!T|FuU=R`9MPxhbgYPf1ZoW*c?bbwG0ay===YtomvE6SE!Zu%?ksbL2|ETY1Ww=x~ zT&hYuUuMmvZLna!zof=$zZZdFcIsc=4}0GaZ#LLoRf4;sUzX!zDjJ1JXDSTiJs`_N z96#`_F@Jkvu3=-J0H`jr;<`esSd9Uw5(a>nTldMZ*Q1F??&J$1Eqd~4Zc-Gz(6|f4 zxQjW)FJLfUb-Ej1Fd#M=Y5suqP<^m#K1~`Jd39r4I4+J-lr_VS0-kH5#MjhTR=z&1 zy51e7zBWeU=Jf12cV@EZbSsCNSLQ3$QGn;tS&`07yXmowCDA(2PD+Yz&{mu`m0tyR zYMb5fv-?rFq6bi6p&3M>Cv3%csuA16rO#!-4gtt-j>O)spP_SNuf z#2*KTz+d70yb!2#Emqj5&+8P|tlIS}WCE!yP?Pzx_H!sNl}b18w&e8!zL^Tu5k}wku=rm8ug?6A`EHe>mKI1ybK<8DXyqtDOLk0)Um-~do^W!pd5f4^ku=` zmO%8_!#xaen`?+**?>Csjs)m>X$;PBU!-U3gr3;IK9PqHTrs{~oW3c(lh>c#Gd@E4 zyY;*Sx1iP#Yes0B`IzH0yx&^lKB1g*eueVspT#Zw`vET<$xXaHkgt{S5xo$mKzpuqS*>=bZ(CA|59{2bHxSC3zjq!d%DDu&b0B`Qn7q*I%;FWN3k zuFulI#EY7sv10wu`U2&Ye~vSU?}n# zzRTKeU@}tx9a}AOS?@WHUyj#pEs7^94a*aqg$f!vX%DXhN5_qoPe>P$27efaEcHF+ zC+>;i>4&|(#3$znhGuL-QW~ie{H==<)AGg(j>rHx^8}{OFKfpXyuOv%C$f^BK}#x> zA>)KMuYp3TQXzg`rzI(53dKLD!1Pb%3q}wJf`l>a2147>@j9T z+y(1KW6Ku0lL2Q1a5*znu5LD}o_N)K1g*Bc&J)b3W_xFssbxGh86HN?;d^zKm1hWg zITUOer%HBDSVSrR?8JLm9)LyqtR%ni$AAZg`-%Na_4jO;fa*cl{Ps2=CvRr0^nI%L zCITE5{D1z=Wt{T3GYU{1%ya!X_npag!&t;7xZ^X93j)R20kDTh3%tI-y4%3?C!}bD z-$W6+P^V8U=C;$bJYDfgSkdlVz2}&1)9(~izQZT<25VWV(>1^T>Bdl|hnsZOULH9( z7K|A3`hu&2Hmo;5IS6>hQQG(;6BcI37O4Dt+7BFUNIV+4@b8K9?1c4l$B;CzG!jVd zy1i6!ixGqMJLyqB~}((J;JrMI2FH?o|%C1C*d_X#MYXt&K)TGy_qrBuISOx2CzKL_3l^=st- zHRw6LU(w$lc_@Ne=cx(wSNpl)Esh+Bv(D*JsJ?+9Vaz=04a4k-oukjnDP=ngP-k8} z0qjx(?$M!)1IIB7_LNmy&T{QZ4VeO>x}^jRE5P_vholv7E>RYzJQ;Pg&hMWCbL&z{ zDR?Q^N)Axvbj6gQTxP6zzmWvEgMR7OcPH<0rjvHbPZ6YKX@ExpWZv)SW&Qo^|nR% zaINNE7GZq0d81)uE?bP|qeiq|IS5tDDeh&xsw`AqWn$ z8S-*h&*v9%=sYdukm`0Qc?(&JwA7{h15K5pPb5nc;)?1a9q1HjZA3;#{*A zP0g4I*xdEo_o$2Yp<7UlsOSH)ZQsPE?#5Kd+bARF_sjq?@LbSKH_ST1DO?H@?& z^+ih%lQ=opZzHC1pFPvk7g(})@(!&drauyj{)Hc4PIQCibnD5roU;#}M@R@o3+3vU zCJ*UoYYR3G$@@*xvD*p9`R?6 zVjg}{=*Kg&f0|{ra0~v@XvnG?qmowJ*vwp|yP~4CP^8MI?3&m|HZh_%IxsE=Rgio4 zXh6qz`%2LIfU>>-0q{{Rw#rcMo;%@7K=QGbw8%(1AJP+L_Z&#dsJr62nKKvHih2=b za2I?h_$DnaXlK}@ZA9dX3Vl(l?FHWZF)T{hwX^Gm)_lTa_hTP0|U!GbMHT!Bo z_065Q|B5*%d((SZQt=^`2%Oy+vH+l7NX4(Kqn_7C73;kMOTU!`c{^wn-ZA5z_NNlyR%&De#^-PctxS!Y{qR zFSS|_P+?Yfo^0Sh@d#R`RW#`NOg_SMqTr+nh^VqU82i5#N zNf)@5<#ty$?wZ#jc!xpK!5mNrgE$q}*0aN_*#7*esG4pTK+9+g_~UT*nGQf*W;cN*i^CVLQFm%5~-n3}Mo zs&s6kXCl1bd zJ;b&3hj(xJr#cFgUW^Ennxo=lh6h=?77|ay3{*KPb0KH;{xlNBPT5L=Vm9a};z!uK z_E=6wgPm@$I%DVa+`&~L(k#_l_mM=CFuCIpGQ<+*Dixx)JCyql1i_`)$RH;lZczXlfhm8by(O7QQdTU+N(a5 zR2SQgfvHLB*M|mHeFCJDi`f$(vSd}*7hrcWkQJxbj=An}BT9OC6Fpwd^XrX# z;MAs6LFe(|N-}i=%&|*9_)YfZpRpRgG_|e+5TfaOpKqWps7>Gjk9d=kn;Gb+>CG$` zwc-s2`q6K20!0Oa@(uUWuWvV`ZXM7`+Foul>NlVUL<1^yCFX$ij4g7U6dlwba{VG4 z%{$se%oG&cDWgVw^P8a?JO+2|gJ7UZGmHB4V50cRe$HNle0Xf~3zoTT(6MlXLB501 zfbgem7@$&`eW5EOf9p(sm9xC1bYKp*pn#+mbgI|+bM=(043;oi5YA=6;GR>pNk$3yB++9|@+q`qVCF{+ zG2cbk$a>=kbq!C9b$EZq@PQ8Ji9PoVer4dVcZUHK3=-O8NWlEO{A_&0Tk=b85XTfV zO?&Ri=^bP@@Sn|(XmP~c$2{iKYxwF>h0-?A%~XPRf7NYlTd!#J=ytO|IHan?4W*0^ zVogri21Mj;l<(9|%>kEQqHsyZh`?(?Vb=IQh!O{4>w_1<2sNm`ti`M8^4JzERQRdi zsWmltC70qDklmLE&LjE`w-|DE5QukrM{lCo&-|v{xHKo*;(s2yA>SF(ZvUu5keA1J zdmz*|{8)*oWrvH|)6xS89%weDuBzERPuvB*arM#P1=KP(Xf+s7&WQ&q>?NaE`i~h< zV#^R&UcH#&T0mbDqbP*C8W96i*0v4NaM3#(1W{FO3tZc8juv_GY;#ad43uw7iDZFa z!PQveFABWhp|Dng*X5=n7;jAAm|17utLog7FR&%n4cpX>5O~P(gBX$4IGH{WqwHK0 zYN7&XbEFU5y(BY-_qhhxkY{8Tbc*I5_Xl!47yp|1vFkL+y>BgGa9{D_#bgqo$^KJu z|NB?EQ8FOeL0DbF_r?Y`8_*xXJdOu{@}1V56=cjtV)UCss%hr#s^Ir4@W`kwUMTBG6Aw~|SUk7CC0rNh0$b2r1AWrLot~t1VbgW_4Nd$%<`_4&$-wiX8 zCi8xPOOOKO?Jy!xhxXf}zt(*VWqh zVfxID7uEzJrDksKID+YNluqwB+~Zp{o5;VHfocTDbhdm> zS9#{jYa-G+htXI5W%9T$pqb*<){G{!BMmr3u|={s@xoaT=sYb9uIy00@GIk>L7(#K91l+UDfl7 z>5!*KsMV?qKRTD>ysYyiXGr=(uawwdNG(eJg6nZ>@)GkIa2AYZf^~#7E6*+p$q{Sh zG$_x=5(N{50*7CqI9@?~hZ7}K)Cnu9#zFpSpm_2(ikRr0_-J#GU2^>@#(;8)F_+ciF6Kz5RS?fgD%RKqM4(p(Z z*C3Pr@$8x5RmNWD*O4+J=6DlJ4|{<7nLwL7>2YBgqS_e8k$wGPs8e;!ppVR;t*ph& zyR)R{l@4Ja9$H_g$n*xo7fN3z56XeB=;1f*N#j#B)bSQ_mM76&kI3;i^=HXzm&2E9 zl&^ReHrBJ$E2yOB7B5yBsvV-#XBt+lB*)N;deFcKF{j+L%Ni=VwCG0LqfNcUmYTuh(w(l z@vmaYpK^&$+P=7HzU1C71P731a-KJ893?!hdc)vtI94ZOw05ylMPI7(>}ejk`v!@I zn-n>|zfXR6>7q%4#k_7|nXYj=eOO*cq`e9!!R=It_KRAiA&SSH(oJ^-i8-`aXh$6b zsf%?eH55CfM`oj`k$3n%nGTevUn=7*5+BG++4)~9zpH=f(ISUhg_&Xl;`wG zUN#Btl;e6ScU6*Zvgr1@3EVzKL1t>XzVks6P6*e*7d&J;uJ;yZx)CiY4N|u3Ern*G z#abmQn|yia)vN`CJaxx6W*weoX2aXg`*CRvD_5(@XDtWmq`m-$lbvzB*N5z^BQC%W zpR16|1T()8bGYI}(*1o*MG#mKSg>0ab;J_0Xe*uC!sa%HP^XE(93lJXuU z$Q{>wk1!T^a{GQP%TCy4_#Q>G?I^0cZaDc4s6EqFmG3m%E_rid(rk8o6kMH8Vm_p} zL|31&E~qbkb|E)V`CO%Mx?W!rK4m&RpE7=D{=KT$dhq=bY5klN*(7>$C$15Wny=_tEw7)NTWRx0&yY%zh>WO-E$W^?MvNXHg*fXV*y001UdFL%w$Y#&F&C_ zZkkDVs4#fuMj2YOQ{C-+oSwxignVuJwNTX3-mZth8@~FuvhBb8W37@Qm5)g$(ldOh ze7L0o48;L z{`hFuuAY?_YgF}_&_3#@yI=bss2AvmJ8WeNZ@s>YT|>ivZa*cPK1=p^#^p6lo_#>; z?(Iare_Cy~JUT2v?4)%O+?99EJ++m6DdE>#V)8^T8lS~l`fiRNp^JLZ(yzp5a%_9( z4K0^j#C%qfxNBdAI&W7H+2k8#qOj4YVf;1899V1>*m9N3uN$qm0^}RnsJ3?V?wsV+ z?%bC-Z0*@^(T>d!us`F3zUxUehoGOyHgLZIusEGkE#A1t6*k<-&NM~-3hFh^OHxl2 zgZe^Qoj501%WJvaQqN^@dJI7T_Z-x8|)?!8V`ZWanV0n}pPDJp{R=IX}2yYdOlW#E-Q!8oGF_7PHM3kLMSB#`6D#vawg4mYA zC}!2IS@}@qFgmN~o2*|R*$CMwfsf~_xjm?o!)QJ1Qa>W>SyF;#v#q78{Md3nd%cVO z$BpTHQTD1v@029djc5L%_&W7g{oYs7buTGM1G;q)q2;rYJzvV`=P=Ncb%o2Oe5~oF zYL`2lzjeO_Ox{&LaE3}>X+=SPz^7cXQYm?{q3P0cq?TcEZL7K8-Qn0YC<8;F)NktT zh0OO^s<&LlQbm;$SYoEk)ye350(i2qnwvm^0f|IR>+t%mTlFC%`kjH{M$l|!zjIxg zA2F1nbVrE6tNU>yQ3(9qozhIQBH@17dzoBnIO4tI{^z8b8(mw0^UwX__C*UT2V(^L z9M*z?yQtL}pWI-z6b9ms*JASAqqd?l=etaGWHikf4PVRsYApRZQw@aW*;*L~U6PI&Kqh0JPlxF05f4YG z_4t;uu&2@GWAUi%6#L!PW@ZbSE_C;L064iNQ-B~Wr*gS$=877uCd@{{v?T#feb%x2 z*VV)nV&*3w*NMi;L&MkZ14Rin0w)H~R0Fe?Zv`J_X0xuBX-90=Sdl^g z!%V9|G;}O`i9DZcj1t@)yDjm^t#aA@>5dPT$*{e$WYn`fCBwD|YluA@x^s-~eZHtv z&iURV9}_|WZ@;>Z8E0lk7DudRAbI{~PY`McMbzYM=+< zM29@Uq%++E(MasP2D_&4D`KiA;7w|HqSTdHSNM}-LQ4T)=!|+UhmETMFt+CZgw8mD zJ;#qaQL;0`<~g^CaxUXD0QB%1stTK8Ytz4q6 z(|sZ_e8;I-u^Ru*A11NVpdeyoh8}-^iKi2;c)|O8bDr931y` zcJnt1$`mwiHGUBMX!tLVtkq~K;?r-Mby5>WmNS(pptAayLDkDfeY z6~RW11W7u3&%IO6rNAPW|54m3i}r}RnFlS(p-pPsfC>!}b<@bBNs|D3b<9#oh?nwX zO{atpyeP)k@Q)*=ym=E5Dg*_MM*Yw(-X<&$aOr&6ILP00&I%%rzus?7zgZ9`4452* zuFdi9gmuk9-yo{Qx*9>H@;@b6htA`EPlWa$|9AI^V0tP$m zX}$;u2gUv}$&S3fu+R;Pyx?{F(eEp2hfx1T@AkL4#sA>yj@7c~@`>oUwb~5*?gxFL zCFmm*08SJ@C+jy(g~7SOHV?pyOC%@TUm*>%Zlh3in}}|xvlea$%tMKePC0R+O~W=KUo;+0-VPOylV+)6 zXHwv4o54$_OfR;S+Lx4^OM=oEM5(w7$k!e#l@!%WUnodp&y7f3Zbf1$ieda^GN+)j zpmw$LZ?{*PH7#s#5l(->G2V?SUY>=w3}qHsCpwCZS>Xz(HEdYMYGl8^90jdbJ~H)# zCM}|D4K&!EW0!DY1fOIqAY`ejQ!zQL*Z~G7`gk%^o|Cf-2{Oi*E}zoqRax1QB28PD zxrydhEt_POMRsf`UxW1Dgs73^bTOri%viC0CrQ=tJxPkRV3)P@95LxL9l+2_uYfIn z#*Gx9sMPlNf@Zte(0N6Qm#-A8M#CG6{+yy*Dw-NCSU13DudIItXbs*0dkVL6sx7%> z2qrrPdl~5p4Sju}@ycLOHUy=RN3f%|M3cal;3|(a&4fxrs2)?=B>4owSQMy=rgk`2 z2Q9iQx8m_I4O^bzh>{rg`Qm@*I;Y@FqjpWlwr$(CosK)UznC2-9ox2T+qP}n#`Nqx zbMQ~?sd`V|<5kb9r`C;&Pcjvol6HC+oF<1w=mxe&4YN17sCiV^t?%q;|4*aC{IN5a_`Auj5(EJJT`q_U1TD=0+gyqu zCAC>rogjrMMmBbYwczAbvzJ+7jA5@-3cJD8p$6ST!Ie3Q!T4V>L*hWZflgzDrrxpV zzg&2T1F91@9qTZkKd*iuui|cZ57bM(1H|q$?gJ3=9R!FE{)8Bgn!1z`IV~(hS{dsv z^1cx@W*Z{#Kpk|U!*U?f6TBlGHpg+7$bWX_Z=zcGSva!Z)Llb>9O4AjE$Qn{OGpKV8A#?X1rxcy1PG%FKf|r+2$UrGD1fn5%dEW|LZfr= zC0t}x2{-sWJ&YI=s2uE)z0fBX+<=GXAJWc@AJaiN>c}3Wlb*ASULUAA^Vbl)_92WK zFlsM`7DPf|jg^pMH9LeBa3}qVr7G^BItx06{F&ApU-`s7Hm&f;Ksq6{i#J zX0%!--BT-Yc!Xs2+uAen=$vXWa9-#}Qj zmMwjh7-GxtRQ#bhRIP*=xYL^!nI71Ub;Qece-|zuDv;10e4tWDmSylZ4~S+O;sK!x zDM7TCIX1dVPUsqaqRwqHFMi_9f4tb%Qbc^sMpgK(g-hkXMiO3a3RimUg z#!pSdGsW}>lvXgx3w>TssyZxA5{B&cl5c;E`-EkNn`}*7Rhog%(e{{(g70FlIi4dP z=85$sFb4}ALDKP%Sdi8cmw&UlUpn*?-Tpk1DR6_r;!NTeoKgsPVcOn?B2@xtL99Tc zAHn+Baql$^)HPks6(na;Apx&jDj%n`&94F5t3yNw=+%<3N{aU~WnaON{^p^!a_DjU zeQX=FHX83bh>y6&fVhTUU2p0qXCGs+(;%p6!MusNxbCgGydU;w20m<#No;-hvR?x+jyVwz43(=5m7q=XP$2gsf6 zYXyLXkoq*-2U~5SbRf4k9~&W0VQmacRWQcNVo`0jzugvOC=*GEKFR|KX|hqI<|REW zj^2vs4>4^^TGE?T!d7oIoXed@Ypq%elLb^gZIq@xDj26XdZNW7MrE_M`bLzdQR6@= zXaQ30D)yl(Jszi1ka5pzCnsUT;iDn;@Yuk)F0d^mx7s^Ac5+MKy+0ABj)!xGJ-KaQ z;~NxPU&n12&Ca0R6s&a)(^pqL9@4fj zAo5|gRs<$Er><9>Ky|d~y=$7L7jNXw3%hlRZWOkf%ug>}hi`aXulq)-&wO4EH*cIw zYn*jIzBkP6x-2B^xDz?|V~Vw_`Qv2Xv)ymjIu3StTa3%cxN^U*&eES(>t3{zGhb>B zTwOX{+|M~Rwx4!ICx#h1eI^FpX1ZNJ9;{-J9A;ibS+a&MH31Q_{bH#>Ya+ z6>a1EiWb1K%W1pB`+G6kT;#?+S_5MWv)#XT^y}dq#B2fH=ktY-Z1;vcNK{TmIp-%+ z6s8rc^@hRiYO|-p^fBZheg%i)7DKWW#gaYH7_7vKKlt5s6x8bf>Mry15)u;m)541G z9mo~-yeO*4;<3pErhET|>$!Pe9qe&=<=o+jKjySG_(un4)x-dvBe8%gICy zI|=Y-J*Djoc4uNdcz+i301F&ZmPXHIDsU_)jAAk%Dn;~Us|flnzgpo9dEOxr43l#CC;T4L%pBmEJ^+1vcx4g@=n1qN z9=WZ0#qh%0iB{aT+vUCEyyJiEcm?(n^uZDgCLD^{^}MtE$3aH@L=%X}85S(I5}*e@ z7yQq)IVL3f66arW$Ri675Z?c^Hk+F|+u8qj?$Jxr+F5%kY56JX(s=Jc=WW~}+kOC@ zj3{DL3??oUESjDcCWef}(fx0gRDIQ8GMe->C{!z%kRM1f@OY2_h+IjQD-ZnHuXo$` zQD?vAKn-n}0TVO$AG0Z=If_*sgSX4<P%I~mI!O8v4#8k?lgZaxYA4;ECFKjf#_Q*x>g__3Lj zi@`^U)gs7$gEpDyFl2`ID7zE4#T%fGRSR@6yOZQ@V95vQsZ74&Z{y6Y$)=6bv zDXe<%Yl7c|y#S}OqORij;X*ruo zHK%EtfXru1s{^v;%gk;H#A$3gx&8@&aDW9Zn)O?qSCNfI8C%>8S_h6{&Vcg~_z~<7 zfLt(*ahauq;+`$(z^+wm+C0P>a^kI7OIm=x0JTVNDhqf-%MCp|ZCqI45PP^5Pjl8{ zsm_^goosH+f-AmiNelcIi-$ty;~{BTHw`aRPqj4u%Wyojz06KUvg_&>JbVc2sg|FF z6VRej*>YHS`_H2I6c3segIELA1IO~_mEC!=?XME^@^tJ8H+AdZOldOHhHkQXi|4gn zDhPdzf_TTQLZ&gk6* z>}_7Pa_+{PNkS@>wQ-kVse`67g}P?jy(9|SzAU88B@*CE(517iSvepW3YoFo$VtQ?K(jV zhLfBtN9v8cNZRzOCTCqMs^NEA7w?wsJZuq{jo=M;s%C{N?GhZ&a#_M(?5wQ{`ncNW z&sD5ZzzapNOK7%gskR%AxV(Qa7Mm6(BeyA=OJnKc=y}f;y;IRgcneM&MPhWsxHi$|6UJmUYkw0E^J@>m0+E~ z1Af687ZsuLkAv+K*3V{zS1h4w*c}~LKMP7yN&5R2>0;<8Lil5>H;h^HuzSk-$d^3# zzR%Yd)=S1IyNZz%W-$#&G2_(X;oo06 ztU6)Dk!=GcOm@FxypV2Y>119t&-OLXjQHc~UGz>;cLZFz%4=;u8j=}m%%}i{k!}~> zuLW|cqhhQ36O-^1`pc|WPj}e~UUe-IjCbc=W&IvTUP`Tqz7io65d91;M3rqe#r_f+ zp@p&|)){f>1ACD`(jqoWsJ3c~WbF1qmj8f$+KYQAB$3UE3aD)GDsbuh186Dz7^Le} z5c;LHr9Mgw)1{uXFVLnGY2mYc(d=>sw@NS5*24?%GI45jM?8hS$`zm+{e# zYbO|13MOiC&kG}}5 z!J}zLQ3^@^^znwRXz>(5{-NfnpylWY_vnT<`9z%ZEoNBI_&mALR=MnwbOHJ3j_%cj zPK#_&(e|(dDMna$cn-)hogVXwV*ehek}+E>7hqTovq#Zt*HBv9uyj*g

4~DJO}c zXUXg;>^B(afstcEZ*!TfIkah%YQWCb0@s18U?)tIt`YAi9xwxg~^hw^Q*$r&$=GaFE{oWJRy+kumZKLkO3>pmPI;@#f-brBY|o%ikEzgUE)(O)4III-@Wa2GSQ&i_0Y@)cSd0rjeyo#Jtbv)Zz2m>_D>kOg+( z(UdI@d!%FE6ob#%z`TQh5BB0t56{OxO^0}H>HrlcP*=2KWe7KkS|wsFoQ8OI#9pmc zX_lq{k$4LZBvh+$U6fe^kjt5$%r8+{PzP20|?Tc4{0!4m-zlUt{j ziOwPBnXwKWm^cxRrxW`&Xxu&BeLBM$q?NrjMdEM@p5+fcIx(SeTQ@0Hop2VJTZ7{?E8V0P}I7ie? z`s@SD-S{QJS7uhdi~FF(aR~QU4*UNc@L#qI=u`EhL2d~YM0O3Dm$8zQrvfB+&l6QJ z8v-B6;kUSIcWvQeoNJYc&2p>j$VZ$jHdldFStb+U+Uhv?Qn%BMbHox5^ICp1P-r8j z;KJ01l7QyCDIS7D%UUXO%|k^9ly>c{!4R}$i8`fKq&D~*2OeSW?X%pqyxI)XPYSZ6 z*-fK{C?O|otIH~8A6WHcdaH6$Q$q&?Bg5e!oKkIamfmv;Eh&tih*T$y7$6(lkIbF2 z+Kt}SjWc&lCGS+%DLReEL!wp?gI!S=3_D7JP3DP^z6aGhYjVPC`&S_rw|O5_BFTCx zRX1R8(;M52y3qz==hHjLw=WISR^T>J$g4FcQ?Tu@@ZF@U(cWGm;!q0CZR` z5SS;Ldg%SuwGSS&u@|S9+k_M1cwMk#=f)sM6L=toVNUp#Yl9(3QaV#s1n`D8!K3GN zh|j*)X-&c@rHw29?2FM3zBM{fkb);K)R#ob zZJF*d4gJ?cszw*y{T&1z3pQMTXfI=@f_F90YF=LMAr}uDKeOU`MviWv-#lqXOP0uf z2>_uM+P#w?K9r3$Jbr^`Tm>czSp2C?AyiMu8PhN3M$CCQbtLeJCF~ohsU2`plf+Z2 z%)mihyEHpp_mdLDJBT5gkC~BpHw@x8%7fP+yulVWi6L44-yiiQ)h1x+l{tw2ywh9P zFoxTXk>>KQy^L}F62oE#>Pm6&HDUJqvGSP`?YS_BqfwCl?G262AjYeF;OKB)J@EYx zUnjwGQMoR|X@+IwR&brMJ6jb071}!E+NEo1Cqy6ja@Yk$M?u^?FLe=yZ#12NWXBHc z=yn5)Cq&s3QAMn~dhu=B``CPkK$(oS8_(KD{nT~9x%J(zL9?$f2^EN?$-J-*$ zD*@x`rR^kI>YGubuiCWEP)~k~_Nz_IS;IrmzTdZ1U{CI{@iTh{yl69Fy?)HS#(Dh+ zaYuX)m}XGkcTZ)Deo<`-9zWFAM0B&-| z>WrP%r?y#$A+)Y-qTy1T&ISPU{OBe9TBif4b%%$u$-W<-Lp+{-vF>1ei1RjO3BoI8@}%1b66g>CrSM^c?^paRJc{3VOGU_EX*e zJ!fWY0;JvV##@;VWp?AlcJd^O|*o0P%$L8TFt=y;${`Fy$)xWY+0} zsDUDOsTPz~yGC#>HmG+cN38YNgmKIx7fX8^?0SnMqjRQ!S>vTcKMUZtCFL(O!o|sQk z)>T7ld(SNi3qPG(UQB&WYsGjjldr#$;IDeb!X6fR*0`8QDd!7eTRS1P?W8{qQ{}f8 zJg-kUbwDSug_N2Uj_Z{Wv9MWabCS4LCFeWIvuh$d$$Zq`JW}tV8S-%ihX+qoKXKb} zGEWz0!tW9XWaQVF6!G_Hm4eT3q#h>ui1 z%u+cdZWnH2p3*R(s+;}0h7k%1??p(Vx5iL=BG8pDHU%>rf^}=et)8HKKbDa>64Ja| zUS-UO*kq%={-VZ=KWUxH+R&{3D8*sVNb|EkA)ACg%M-rQ9WfmAD0{=ZpV)$!^pCGW zL%!%Yi(-s|;HF}zsHr!S;^%V;b1N9lSdO^TtGRXzQ(7k7ick0Y!T8eex%YmFe4fH% zJs3Ylm3|A#R)x~#mg6-@kdXT|8ALvLW3%zWdAtmf4=Y`tobp_RiTiRN8~46dhny%^ zdp$wVj~?HsFqb{6mry8e=wYAq@zMBa@IhY{Qv~@6WbaW5v9RDlQiwhRk)}@{SiY#x zzE0P&r0)9z?OtGiZw6nrvAkrfat2O6HkHc38Vyc)@(I{=I6N0^f;e{j*anJfIrCEg z^7kN}fCTEkVe(G|ia*h1q79_Be)|v`=Nf zfZ_x4^v~$`Y2)Yd&x^+?L@CD!{L)MO`W6E9lcm+OczUn=z+nPCiJid@&9ni0oJHlU zA)W+;)iZgm5`F)Y``HCctQY2FY1W>MLqf>SMdw#G0?o@r!p%Z zA07j_WZm=5xV*thpm8k)=#E0@Q#R5UDwGgK3>GN}`+)Gw`<3-{!Y0wzWL|QjR`knw zbnFM{n536a{f8mLWMZ)`Ib%Cbc-uG7q;GYE!xLiImB=FWVub0r&dOPz^QaTqOrbtE&pcf{@sWI*|PMqKKBtd!y8*BB(~2t=1$Rm<8!dleglrv zwngci@J#gc5=y(WX0-aydo>QRM2 z=DLgaw0s0y)V+D9{d{b@`R;iA<*1sr(5sOkuTW|N)I@OnVSn?+w0Wi>D`cZZF2jjR zBGHqMQY~;OPpl{<@sO;;A>q)mr>Na_nR{{~L+(1s$K`L+gk?wWvI`TQFxJi%GW+W# zBu3b30o!4PH0NLIB~|qcS3CR=@o#)YbM^FTT>y7^{*xGv&4%({^@#NOcczZKzW8!F zb`!M%uTZ6WvKsV9ZGR9yai74)-9k;B&FJwgL{PF~AtLvX zR=))R=Cts1fk3L~6th7+p@%_mwjw~{&Z{uJd&eH&_ktjIb@5)8`PxJ`;`U!*)V?q; zEz7i2iefnm$%20uZ0vDJ^JhZ6&=gt*EGt8!lSfC$G!2gVX-um7mhfZOx!j40G?C_* zOWz$Zzd+>}!54Bwv*=^eEypRv9)lvTO}tdZDEq}?X$o`s`Kf%)qo;(7m3*N`i$BHe@2CNBOpVcgRFTOOVcLwX$eE^GZ0F>tHJVlLUv`rO_Tt{txIchi=>Q+V#i<5>`D57xZ>jBbfL!d zrHD!DQ+1s=FG3j1XpaqiUJ75XvrTTC5eGrP=KUL-Y=stuFZHPQ-;zts1H%xHmu%@M z(kefY^n0xiyICT_W`<>j3YF&4X>qg{rDT~><vbDp@2vt7kC?{%_kMu}9+obs#V5eoQh`CHhz^+i&Lu%=j2NpW>K?5J z#n6NALhR>?PU#JyIsd0hr3PEE0hbF)M6}1FK-v4_Ve&I!W*)a&a5}*Zss8gNtv2Dh z?6)|GBaaP4*eJbRn6yR}T)GqEgfKnrxqb0;O z(|~SRJxZt|%C3rgc2cC}rj7&TtDeXZiobdO#-2qO+dVzBkI z4~mLq>E%ho6v|Ki)))C9KHR%^3?5jSoPV8C^80+ru@C1Nv6AsywJw2?>-@?<{Fn;?`Jo#%{q3&fG0SR26m#A80d z3X>8@u=?{4k;<6@{f(3h6=+Drfpre@Db>wWFF?7KtAUoo>2wlA+9R^TRG95e4qelUg`Hu@>twq^hKHNHG4WJd9*dWUznQ9;&RIx6)A@G8P-{kR_H~}lXP=wsA z4!aa+6bYK8=wmMS?+cI)eJ@qN)JV${Vay#v!S<|66dmRyW_5wJO|4P`ivaS*?PJk^ zM=O)_-1Zz=>s&O1ILBfgfmCsINcRmW*)E}fiGfp?+Er-pYyt{%SyA}}dYj$qX^Yz2 z+1oB-S;*oc2ucli`8S(pPyt5`prNo6{6<*kD+VsBb8GZt-hhI8;Hu0nlm3RfC$U#o^^k@qCIg^M3z z7hbXQRHLPsfKz#JWQUBt-<(((SyLfz*sauTV|W}4POGiIuu`m)YMLpENYX%ADo@Y^ zf-a-R#;cy|?Y^e1+>^4OsIA4kt2cnmP-C^^Qx_>)bhyh@N`To_%WE~%78od-p5c@! z&zn%Cv$9+ynvLo(VbyIw(Q+xeqFwCcp!t+)5aZh7X`n@neU1Eq1lN~974NwH%t1_n zOhNwmAt6Daa0`A=y8kk>zGzl&livfynmBTn;fxO~2&G(KdZ@pKa@bORwM9 zfJG_S)UvJ#ri?H_GrM@>8q6L#5jwO*TAzi&YaMOEwz{Ww3LDo!C3{7Eu7AeWPTU9_ua1I<-jqc@<`{?-L-sX z?vy%Jt!~3(?!DGIB7Zbrr+rf~!7SgmU}wkLbLHO_Un6|=ft(Io9mn}%I$wddX~X$6b*5YXBC>Go{O5u@J6 zc6kSdAWoP-F@7Kan$!2od}Em8w1Cp<<7bSb6=8UU-fF|?aQE0eh2h&tgbRzuns6Y3%rT;i^G<(oA80bawm zvqPHZ?WNq4LMFltx{etQA3nptb`I7E1fRA$?<#@Car@;LVBei>WY$>eDD`aomM`al z^;M@XjT^Wz^yL#ZH^_?t!G6;0z&jUQ=i4UtE%S$Bwx;zOR|p=j2Z1eSq6bcO=I%kr zMaNBWMDCk97blt!B%d!BlFO|9<>Afzl|bM`&J6Mw_{tWDi9;u{1;)q<=+xmWPj15> zxD_DzlUJ5pOC2-%8}fl3a1x#1t`=18oTrA=hc!>VLFS8@G(H9O50U7#YG`|G^`1Mt z1{@2K^Dh%m)5=&6$r6XlgSy)8`m7n(ekxlhk`cP>20sZ$-DB36i{l5%ily1`Zy%1F z2L?lx6U&!In0at-2UnznaQocv`|+mfB1{g!H-oBnW2Gq=idby?Q@a}lGE*&Zmmwjx z?)(j_v+HmdSBVdqc2pZ$btsOg0R*(a(j9um#~0UUZtVx&k$cwYhrdbYvMkPVPtU_C zd=eTr_-A*{$#}r*0BNY^cynh+UJ9K>PPy#MG`s%p)_^wDn5Je~`;~PX6SZtn>xy)Y z*>2zw;4AZje|+$dAa0haaAKb?ctxR)4x%^0!Z8sltyX zy~o(z+aXct>ktfOEO{3T8+-Vd_Xam)D%v%Mc{>WKyo>&^IEunn>n-vd8Rz0f^(BRMGH;{F;w>>>qKGh2EFQR`X97n>IZ5ZOx*H( zQg#B{4Qt;%tq$Sdad=T~``--2?0&sse}ecx_k{2C+U>X8O1=^a1PTs}+#!F45o4zX z&)njYM#c?O8FxA)D*?cws3DkTqF9WJl?)fLT!xhoyhyDYakeCC=InG#bO1WGI>w)K zt$nLTn}ACuSL_#T-l&eKtzm)#L*tyf9rv}@8i!sk03KU3&Pe3JaLTasc|n^q8NR&a z9EsJhtl%>?Ed}#=8LI^voILi%nS%)2fu0la4fa17(g@Sv)6r=6MU3gP9y917{BJ zS-|E+4dZRAng^fTZG&y|ZR2gjnm0NTn0rM5KPg%;@E`w=rhZWvn`;RU1Ox{O1Vr*b zY3cwo)BmJlx~O>rP)+fCYTT?nw^pj~^+AV&rp54VY6y)LDB4_{n*he9kzptcE?w51 z)}9(S#<+8-z|83eldr#TNp{6xu?<+DgQ5I)?NhNxLBD}PoREbDAo!fz#48Mg`JQL_lfP~J0fC|-2JPVA-@Xbk?i4%_ID=p^# zIz~l0<&zC~rYFh3%%}E_v;}hajmNQ3jx~d9;3j382#E)ca0xINNONSd+XpZ+gr=41 zRKhko9=FB+mNQt*_b0=YO}+&oG=wx%k5)vh%clx8jmJAmD;*WPk7>2lkjh0G3ZfK0 zo>x|C)Ic(2r7b2EQBRH}p`|5>v7Ck%Dd!U8A)#Gkv&K*k%H<+uf^jb3X7Be`5>Tp^ zzaVrD5^cG>=h}z4$;GpuOV2ZG_0cwv^6M`WQ5C{M79odQJ|YuIjsvr)T_C`XQZFwI z17Qm%2f_!_*cbz50pOJ0D$#V=H1 z@60BPt6ev)Rrn>P2raH=z@{atFKp>ql+KQznkPL%sbK4D1t5sRbW0G&_ zvI^G^G-l-@Auopb2~G#ACdavK~xK!zqqca)D4UjCDZAs!T2d;#L%; zHQ;9smqVulNy}5x_9q{S({#b8Cksl2CLPK*D4=t-gvA{w&0g9d|80`s@BhBury z^rs4D5=sNGqSAc*){w&i%Yx9Ya--iry0FTA|BhIeezEo02f8D8Js8ZB@lcxip|I6e_;FyTwK$PebD5U&u=U#1yyyj(3isc0(@!0)&|e$GQ=Z0u zy&RTW1K;O3fX4syE2qiuKKxVAWh(ilx==MTH{7v6$~v69|B}3UMsl%I*1tO4u}o~L zaWAe|Z8cbPoz>(#405-)Un{A(^3HIw^(BRQfZq{j=%=QhdHvGQ*NU+3}UmZ46_48PBpyS3YEfcxR$(_@#o20^QW zu6V@@V`S*&fzt~@71z_ixXFb-dJ4saXg{xD3v^u@Khvi*qRjDj9fc-_;m2ciW3!4h zDCn$D*o@#Um<5Z@M(cg@w=Pfi<%KoEox|!%?22*vHdqX)!Y+9f6LW5(J&Lpp z1uA-r6-YtuC>&)m(D1Wk=P;2DOe&W^vN~2WaBDv+AoDV-~>tD(ndh{!spql9RLMVq#7(8I@6_ z{6Ae=d9;>+d#reu`C5w>?$(lcT#A7^!R1I1kU}=B^YeA!(9hb6Sgov5LuS1rgqz6< z;OR}6;CQjozB9ko@*=+;@_x1IMArWcd55$e?zVg7PI(Qa_M;E-3I3nvCrtk9$15-( zpic-OAngC~qP8>tudslr+Kk}mOb(G(FcB)sRtb40+T?{pxsAts++b~| zb_pY?75olE4rBR&3O4$ZV$Ds^sV3ny`~Khy9^2oXR$axj;QpontYjdG0UAF0Q0uH{ z(`EJ<=SwOT>}QElOXS$0j%>4w^a>3*%5}VEu9++Byy&PRe6`i}|gUcq_{z9iQ;cAeUNT$({D>RuDeUaqQSA^{J~@g!X{4MM_O$EO7Qh zOqFxrV#uKPoFi0D3iV(a4&8|OuIP^iypJT$LUJGoX{6f!X^10xVgXQn*56c~NEO`g z37p1qmRL2W9u;5|PomktLDf6+8!4Tk6fU4j4LQ}s;LkYg*k4U{ucU~+37y5_B{e(B z`lC+3OGV ze@A~e810Vokl+l>eC5ziR`&#^tGwzF&rk;7KO(J7^(!s041YaMUL5(n5JmASS!Dv% zTcfDR_Dt#Wnz%yH76cju!Xtts0z$rB@8$uA0!9Eq0C$0~=bEkcjDcWZz}qMx+|f7z za@WV#+Qc(C1p@08o6Ui-a?OH>gy7q-Biq%CCKb*?_*T)0)rP4j3s|$?C4@Pj4L_^% zo&h;MrhKw$Dp#vuyd=a~0!2je+}Wgl*~$93C=nF=zIG^kbUX7oxs%kd_PP5SKx)7D z$xN%{a&i4BGspq#PM9eNc&|QS!M?`DpR~#Sm14jKg?kuQ=VpIs z4DR%_YiV}7e7VBn1Jbiucv*Q16fkELIhWqwf*qRgWZXY&xsmSOU`{X=+s^vmiMTUM zbwjsB_Vy=-(nt=tW&pU>aX6Fr4a1fWQ7gkurv{PL3r-gjuv14Xnay!8%aO^JoDVCR zkj+OH9-KE#yl;IP#5*n*ZyJQz*j#5TvQ5<^dZFm43G}Bu)^zSM4l=38_W?ASm<&)U zfq(YGZ-alKRKv>l*V~Iy!P-Gr^z%}^2|oP4c=!C6-|niQKtT5Y?n#3G>5jB9bv87% zG5POQm5qw71)dsO_d`QQmUBkdwN=Eg(^!t8Ix{zpHD^@1N85R_N&u&<519UDR#saB z?^;&QcA7NmUEVL{Q*Eju=U++z)L`#K-PF&L+I<)#BmrQmUTTAo!m};gylIPIowp9t zZ{4gX+fFChXKQVIKwh3k<<4b?YZO2R3A<8+67t|H@`u#rWo^=sq>_CWQlinguL{f6q%HXKorY%|I}*Af?3b;!#wvGWCe%|5<;I2YtL%V?m)e*>DjkWjEgRL1`KaVw zlA3OjFVW(vw?kGf=4gA3I;}DcKaPvvyXIbeSBKj5j7OjF>wG~<(Q#H1`e zA`A_(GO?f!1bVq4j2Q?`u|yKR$hTm0!T~~|p95gtaS9&-BE6RnYh_8GF3NG+m~YPH zXZYez`$_~Ic0EYw0|hR80g$ve6;TqRmTb#Wc4-+&S%-H!%@6{Fo8jJF37-dNJvl=G zcU3LBv=wgC`phzRY!fvE_^n-C37sDX*1HmnpZ6t{`kjrg4_i%9H!~yO+pl{ElwWl@ zAI2ruFUtb2U--W0wiouE?>TcLBQ-k{el0!Ngr%$JB8#Z}o;TU_e+Vd^Cz}{d|H-SZ zrYkU@vS3O3XaB@$+po9F5k%{i@EAopdiiAfEQv!_LX0Xb}%Kpc}Td3165H;@*l|FyUT z{f4ziU1`(%V|szN21{E5FxJy+@bTBTmi3Y1IR0Hv&x>Uo3Acx?nY`Rw`y>?aA9yGt zJ0Cx3!w3)FPG2M^m0s*GBywsJ-$*WRIeMs69C#d@VQ4UFW4B=`>XDELn|Azd-qd_V z#`^TS^N{1z{m}EX6xXhfxX3KmN3UW2QT}YgHpzsYffam~jj9p+<)HsBF!%^LjHW__ z6CFf>RyauM73L=xpCmOb8aEet%2h1O?8}#DVy&1{&x)aFA!OR&OG24{c?%yxA}mB1 z>CA|#PKjZWm#ZfLg7Aa@O~Qau!ujin0MssKsa>r>8QLR3R+;X zW9(JT5QPY^pqdSiTD$;x{g&8_LSXQe3>h=_N}xhca3R@9q;^3b6s$$4C0VK{eG!Jr z1U+d2@)TlvodsVRSk%~SVLbPKjue#y5czR1iL|L|nBrS1dj5g^vJ@Kv@R)Kgk`WvG z2Kp$k*e0Q(Sz!KJ8QHr?T->RSmHB$6gXl80Su_~?Fl(70x&`a6Y=4cn$W8K2VStsv z$t~Xf}x*OrHmZk4VFpP-%747b}BeFM47Aj zDH}p()C{F7PG%BAa3hnKyGR}r6w{D?o+LKQ@LN2o2~)7YnV{vG1vsqtIx6SCieHiJ zh{`voaNgBGW{6#&vYA~FS7~vc4?{_iBLkbtKjis{)z$gU_CN3;HBb5y)si5CdaTF) zJT-<9I`Z}LXoEbnJT`KC+u9rPj~_szkK?->*&O?o`wI{AyU zgHJ4qzfqVWGQ$KW5jCTMD$l>bb{i2UpC|>@10S(FyaABZ!w{HolG0Nj__D|WIM^l+Zf;B6(sDsErtbIGHee3u8t8L%{f}E0M zsH?75dWcbLM!Uo4hKx%O0X!~D!r}k#8N!6A21>>V`EC%*Kz^V5Jir|BH;ID$^FSe1 z*h%lF2osWW@bO=8XB{EF5d*idupSE`ST!<#yP@3;5=ad)F8@$j1F{sNb(%JkF+f&B z2okYWQiC*lQtF^Jf-N%_aWOA3j9FP4YEyudeLj4%P5#7irOVnY5TJEqE3B(F&MzwP zRE7ys2l|@}aA~vSd91d|1&V7fwt{^ee*xz^FdbHd5+SrKLpB!g7bm6em>Y=Kk|2wJ z7_1>)MDbP}K29)th-}9WgF{QA=f_&ppMm(2ZtRm0x|(SIL>^}{7nii-zcU$J)j_$bJ|xRnODBD~ zXo5$88iYqKpTqMDsIUKZM^nyzM}@hbkY_@k;fsyL~;!$!T}LkAW=$VOMzc-1QTLcm?<6B@|JKKosr|G zvU{e8x}7u=$Pl1SWzXQWme41VFEdL}uCrSsWX)wpD_rCKQ0(SBa$Sn!w9MJ(k?sO`tI8y_zJnz071-w;s}vjeL)gcx>ncj& zWz4a!C~FM-gLQ%7mF0gkp5+D^Q|DcYH=5n&zU3y*b|T0hxwKz*(eZBz;GctEPQtNp zfMGkys|}*4`^g|HM=3!V7DVj{!-C&>>choe*N7#TC?78>tLNn-tTV`}Ekc)RLzHPB z?_I$;4IjH*q9rHaDa2}a36Q;2vtpv}DR)O->i>j&XpVj))$ z{w}P6dTN#4ep2c08O?uGiV+9QPcmdMuzrRG6eFez1eu|B2#$?VN+yYVVseHbtfiUC zBLF2a?POnNU}D`KS7ralFQpo6s|kS(4W$Ci4Yjqc(Vyr9Gq6$-+7g(U{!BzmC{#&m z%l)(s!*8Bei1xJTz!7~*-sbN;zm&LC`+kee{(_dSI8zT@%OafRAc4p+=n zAxXs0eB+jIFQfqU*s^S^Ylc~-#gPJV2xkrWmiVHaCICd$6nX8z4eFc-s1~(T7ZWao zPBrjTz^xh$j2{vw)EOzZNb=|t?yMC$_!&uHbx#^@;C2dNDid>i|GH@@b7gD)xD#Bo zHA(yLMk}z+a20zlyX$lZ2V}RRJCZ78R}(GLvG~Huxz8*U(_d<7Ly7J{P2RuM@`e&U zfp`5bohm51E9%=R{Qz*Q$~2-Dp62%>Md3OK1-pcen$5l+wmVgg*13L-aDcIgcyCFY4r`Qffd zSR+L)h=M(+T3Nr}M5M96RIsCk4^U7SJJi&65aU4O!28Z{M<=IzU*DEyC`N5ACGFqccF)eF}&3m%$;>ZdpS{uf*4 z6eP;lMCrC|+qP}nw$0PFjnlSm+qQAqwyo)zw>$Aa?x=`*+EtOcR_3=Zxq1N07-1Ky zYLzpyEt#}`->AK4L-6j&9WOnqjL+TEr!1q66`FG;mn8WXJ(JBMKE;=3LlD=~D|vFZ z1C}S};_U@T8V}8mxxjH)|I|;lX-jOqpEwzx*4mz669VY`< z6|G0~xh#yYZD;9sec{qwplZjX1YYujGaqCDD5yA8?vG*GB7&$s0z==*V}-D6L?^OQRE(vltf zh4*7L+SnQAYrws6XS964?^ERNr+>Z2&;GT}($cB;m1~dkxn{r7VB%vv`@o6X>-|m- zdBRV>!{<%p{WUM2d&YG)6{(8YdC}U*=lgc}>Z876PxJ2EX=`L{{KZ_``y16#9@Tr{ zZG0E99VhSY>$BRgUV)Eu`R?!lU08nv-}1@&JP~m+w90mUuD^BkzNwAvgYMU6I}KX@ zyc+Z1#{0OJz4Ix<(|oj&fAekE^q%+_(dkh6k-gH@^^+urcgY*w+B@`fr!+qA_4UHj z<1vjNXI9wz(xaE-vG=#yufz8M^o{+s`S{|iw_9sjfUon@tiQ#W-s=IGH#gXtkDKv+ zV7Ad7yHPEh5--w``tZZg<2=^>^JOCS-^IEvz-?56qVGKb^1sUUVJLZ}o1kPWBp}mw7H@ zPS3lZW%}-gln06eo(mHuOaM{0=XY_ICt(pu{$jwE(0 zsmLeRCswEC?s88RIF=X+SmMRLqV&V@pXa08_?^8-Hc@3k#!;3_q`b4LPLgxHSh4S6{b zzcc*Q^#;Hn;O`T^g};ltjk`;FC4Z&-WZ?^t-&cO+^ajfvrr%G!Ke*bgyAT=ELb*#>`9k9UJ<%! z*jAJij@@C`@l=2*Zw4bTCy&->rL$$1QYof3AFAuytKq~{gt9ct%OIv`%&aA4zG%gE zGjV!Exiv;FK(@#TyFw#o3UT`i~L0O!} z$_v_UL1F`fD#rk{bMV$ih0@N}dpu-rm&SYqUglxleP zd=N%K)KZm&d0i--sR*0Q8cVd9rq~3!qLU$U{A9U^yj=cxtaw*i*@;S=iCl)XABX*^ z4M<@^x|k|#;wXwtI$8G}qPE5z0fwnOI)kQB*&M}I{BM4VitsX$ z6;qCS7_^OG2(0XuT0RM3SXG8HYa>y)8My~L2>HrDDvU6luq(C|UnR+s6DZPrB8BNG zr*=9uN|=nWWSR}3bBLDOQXVL(T%ja%r37+GRVI-BGhQ@ELqv*wXi&ZY$L=`FekhmD z6xE2jRj2?nib3lsW;K=S5>nB{oroGG+EQ&(LcbVXo3iFGi|VwhL@K%ru$H#nIdO$r z8N7FXlL1Ia z*CLB6ed-!a)RPys#{@p}o`BZg^gA?9CQ&9-n=oj_o zaQau4$JQ1#@6N?c>9v0?Yrwp`WnyR+vLMVVF*6>tvLGC^3>w|Az<3ldfm8KL3AfyY z+q6oF83jp1kIw%sP^k1QAxBlVwi2Q!PY*zC2&n`+8ureSzZvB`V=P`czr7bpOgZaK zZDQXs8faZrgRR24Axql@?A)AOV8r!;0grG2LU%_;oy9OAc4E|b*~4%zr`f3+iiBX^3M5FmrorWffp+E-mGybYNCy65 zQjm5uq{jEnoPQ1eV>68`5J4fNY1SNUNW0ifE-4m60{>Jwh{U-IS5YZrCNc;PAnfV6 zU#OO!F_6mWNy%l^aFU^qW7bkd(iX35x{-sQ`gSh-Ef1Cy z0g~KNkGmCM%<9VtQkmM-q~47smzMG-`HM42YC7o0RFPdKRboamrj`yk{Y8##gbnno9^MtWMDf}!3*C~MwE{*^ix&Rt9HRw3r(7Hnw)kNERQY;rO9Tb-}fP#AaCu8F_3tK$;PlXO%&f{{Q zl)#aI9RWH5cyN|EDZTijH_M zo<$BZJb)|5Jwg2C*q{L7DEwZb@*#A|Ei~>X-3fb)>nDxdp@{i7itic9rm6DbWa;iu ztaoDW(E8Pwq#kwYDGL`DiQQB}5|{UMLM@OM@30^Lo>yy<-kJyE)CsrSG@LxA>m!F- z)k9NxZJF#w;uBUWxo#}(5tk}2U>tPT*ErU^$aXH{I1CSjc4_NQz5OlZRT_=A_oiv; z6nD!zeB3>bmA&tAea5DK$IIb$4W~1Y*AV)~HU*u}-Ev19Sq|ssliL03X3kOX=WvYu z_13)0?V54W4XNMvQ*V20uix7=dp5@RSL1BY@yGnb&GMJ&bmwsz*2gq2x128Ty<|8WH|=I(8khjBF( zKWExExllddua2~+E`Dy$(>KNLcqW;8L$qNjwcdWmXZ<_28qVo_7vua_Tgg3tHhbKQ z#IOBcMmOQQy6-}fk849XwXc5|R^7LZ&U$X><;)u|?7JIyvN?PXZH?!T_M-60yUb6V z`8!`EjsI--uRdQt(m(!d_zYS}Ke$%+6@~xMe^Xl?GVgp(ZTTwn+OMc+ZNToX?!?l0 z;dMDkX|(=nKhf9?Qr?FDd7saJt~iUx-z`yIua{4L?-%`UOm;9k+Ox;;cp59&dVjfe*b_#zFv`d2a3!3~{m9F)keoKlxhL1MhfyTp+xZ9dDs;j3`f+Cp{Y(g7DPzf=1^O;5MWNctifWmAeJJb z-i)lYWHO8(l^sQ`T!F*1Wxr*SV(U(f0!QY8lhvYR8S%9cU#HxDv7sZ824q@?&q9AS z5R@44b0>8Tt}V}w5EhRr?eOzOc>8uuJo}@{c^lmYRx6xqo~Q|fR#3ML`zg=W0syw) z=~T!CawC{d7_z7UXCWZ~YD`4530OCxC&n%RcY)FeTsNjhbT#i{&VByr6s!q6rm*`I zt_glC;#!Db#Jv#G1-j!GaY2jN3Fkq{yUgts@`_F&U=jBCv61ZpDKDb=fp%7E&bOU%<{HN}W8`mmYBj%1 z!y!~}IbU~izihp1eO$Zmko<--OaO@9?Ldon)Lbqs(}*&nd=_Mt44KlFoWG@I&2A!F zvE|NCC`YN)4TV0rl*u z7^85i8JHj`sRWr73rr>b8(lzcDkZ1t)IpmqUEU%{nj|g~g;-5eol>kqOMMaBRRZuw zh!SN2g_igPOe!Pe62!%raMV9J!*HF@R9aA-)f7s`bOBORzOucc32Usl9DE zJ+q0$W|pN$*+iM^P%)fzu{^n;ekdoYF_JfZWlAv?eau3-Ofuwe(zu4+3ON0;b72+g zoC95X(E=ywxGqEbYZ(T2AshoipGFNs*1V)5lQtdQF`Zggu;GDIf3&`wo7oDiR$f8g zWP4E+sWF-J=P)T6u$!th8cLa;lyW0uLn1BAsH?2p>9dO1iecE16i;^Nu0EFfp-sOU zz)Ut2Niij(ET+F{WC;mwstPv>+VkHmB^`8P6j@RndC%HAS+Qs79+?5J!bA91k{uMJ z&`UtF3vYe-jUA&7us+t^kKRSmB0CB`(e>2<&fW6;S%8Rm_YC$|q*a;(H0rwWbObbv zsXwsuGs+8;W{l1iP_;HO;uaC9WqJt_p#|Hd))vv-KyZw5wag|GYzdiyFYcG-XjkS# z48o9xp)UwHasHt623Tu!_qgEca>oX!XsA3gwY|G5E(&Wi#+3V%hD3T|eTtEc@0yB3 zF5iLDX^26FV&`D1j!0}{yLQ-S|JDvzSA;L$UOfOuc}cS46I!JcT~O}%pgeZ){QS4g zj(G8&I_d;&`q&JEQ*Sv(*38fuL*v=Zg3)7e{SOaFd(=@1WR}3rIz_;b)V6g$3pji-^fRq_YSoJr-#*<`Ic;4nQMkA=FssPpKKvMxI@6+Yv+V9eKP9 zZVvKBh(n|N435$|AHb7)j*|JR#bIcci-Cjaa)=QO5aX>8$#6B&1shZ36EmDe(ivJB zG!A#voMBn#wl=Da?9f6UH`%0q56Zz?TvS7fd=D7_GpKE zVv56uGY#DaGNp>pI726h{uhON0GdVUiHI#N#tu0W@2Ld59M`nY!Aa8`oE>|D*~=+) znl0Ty5r7H1OM=W9rUj7667c(^#rYwPZUp&q_xKN5KsZiTVqMrdoY$)=hXwKQRcy^b zpysw+I2SaOHV=QX0OF;YUU()js0o2kVZSE;C&bvpwP5%4Tj zj1IC)Y-VYU?LH%_qp#x4;fI=WixvAizSAN*j6O@QI7yNFBPuao8&O+2v8PQ!s%fdY3ID4m&VDk~J0O_# zMi4JJ3>yG~{0rb65RV0BB282a$^x+J?j{Ty2&y?qtGcWLhew##CLpoqemamLQieJn zZQzHwQQO@P7zwN2f(aOGPlS^pi+s(9yJ1+$MnJ3D1pctn)HqTwHMC7`L1!{g#eV^sqAmNvh{J)10THl=KrzK! zf-(wxBk-I6GzVrGbw35Mi~#{sLDB!*?!>bPa^gj`Of>j=T!EL6ZVF^LM znB9M>!N`mJZ5Axu_{>@GSTjw;zUlFeKsMzXJb}h0QP?2S&q4{VTK}O{c7yn>ImktW zPeh-Ea&|Z+`<1@l#Ymq^@}|LwpuU6l1N2LXcl?K$G-0~v8wHFK6jHk=uLd#mjv$~}kI>5JDZncvs#v!C|+HoSIkY!u&ZwP!uG``L=O-M#sRoxkjD zmiW5i1AaNCTH8;{$JC+4e3@K58tt0vF5P}4M!$;VQDQe#UYuHchpX*JySRR+b9`xt z>tOt?mt*Hn?5a?j3I40CWdEL=cyfC)m|FkC{oow^_}}e&&5QfD@$?RJ+}HP(a2)^8 zR*H!GO#4X<<>ZU6qEAg%XZ=t1%w8C}UsHa;#p9?v@3broucJVxhsi~%_)g*V>o~Rh z1U~k{SH_CcTVBUm0w@0_36LUNpOA7%YPnL!G@AG8#FNaJ^#vm6sCYM3>+(AsBB$oD59|dcR zk*~dIvL6?jq{(?VET04ril+BB%)3)Hx_6k)ylLuIJO!+1xjChmQ@T5(S6Q86?IsZ> z4I~qZ*r^{#N0o1CsXffMopWwNS8;zVuB=y^Y}e+tQl$q*=v9p*OE}o z^^}Z4_H5KvS`fmRTs6WzmJm#d-9e4=7lo5~v!zW!lcW+mTmrC64CvB}aQ0YzCcTU* zKWd_36qU7$1SFt9ff&nqG2+qm9E@tu^iVH4E8K?>R5{~UhEtAH%*N5J9$+ORT9Ows zI!@b+TzikkYG7z!qy_a2g3{l_M5zVNg;gxe+6y^FY}k0SRlz3r>rz`yGI>PJjy%`x z>2>9sf5q|`S*%%B@?3$1nSQoe;pUk!nf-EsMB3eI|OXaq0*lQ zcOBKKV~CYY_g+-nLtP2QjD>xcp+4oYEk&dhm{LT(&!Jk-CMx2XDxhsev8V`S&Y3;p zx=?S#+6lWC;98Jxgt!&*o(euPc!FFB%jW@~o}5y$g!)G02!_oyo-%mieZh5y>j{z1 zO`fVgLUD!YiI2`Xs~oYDed_qP|@~^h>2YdPN z>Yu|?se4%BIHZ$o!a{^_#B`CsG07|)&6F^62FE|bCYo5>L=B`QobUt&5R~~mNCX04 zP#^=1S8nT|9h>0Ew2!)46m>q=LgUxI`akQn`X5(2TRZK{a+gaxC-b}~IbPc@SeDH@munQzI8P*NH1v$CSU?*Q#Yyn^`oiI7Aj_Ulms< zXF;%Mub5=Vg1K`od#1^uS}d!TPXiwPsYgwj@GlsbfMPUUETb@-n6WS&unhg&Vc2_4 z)N{_L7Be-X`6FG;glNt2WL1BNlGROXUZ7bnTc^0jqKM-@P^uWCRV`Z9=$!;Tyt0nL-^ae_tXAq?I)*zSh#q5k&1Ca+H$26f;!f#L6^SJ!)FbuuH8b zS}m)gfpT9_l^VHo!_#Z|Zh!l^ftiVOrKMfHqfpabYC8NeLr2P_S&d3<(`wqlT7N6u z-Kvpcqbvr!Vn(CBiJpgZ&!ZW~Y6Yq&FEYv8Lk;!9b(TqtdLs+CHZ`&uE-v+?gWxV& z@FwNpChot!(wGC6v!S3*HjlX+n$aEGL%wYfA=MpE$v(4 z-!giEY4%ksmzPHlMYtDmFj?1wuk#}^f`vM>d_btj+x&tNLMA|B)yT8k`H(W=4%nl! zDkeWjg{dVuc+M3QUc+x3%7(JUDkU}1X_hBqWjxb^lKUJKo7!vFokpNt&`*{!?)8Ew zzy9zT9Z4k_5ncE<6;+<8Acmc>(C%_LwH=imjo6{W>3NUv&6Qfr`~!X~?{4cAkMM!Q zv_-`>At|;Ujegp|sDN(@4J~w8G(^e`f!sSrXz6ZlRNO>ijEZO8{j=~ehefSPOX^mo+sM|=R-P?JWSpdS4?_j5qSnwI z5#G!p8nb}tT6az(os*7ly}%qxBQYa4$9mxns5`FA4$!s;J~QA}9+HP9wY+b#sF|GQ zVJqO2;o@wC9x9V5KoAy%2to4+ z!>b7W$B*v&2mMDIWqNsoOC2MUag`}J!yzWGzxpwxLZj48tHTJAmG0doK|z<=1SA*> zB^wC|X+cyW2Gmj-*-^KQtR*NbNuz#%AF!eMiCy;uaUup> zu@Ti&*U&0K#B_RH!8$nx#8F-&T#fuuT{ikMN6F=q4?g1u`J^EM#@wum1v?r>Si^bH zx0gZGFqjyCAb8|FPyhtTaV>yLiOvysFqDaWp^ao2 zFs6>JeciR(n}#D1E6S=$;l^N0mvls?SOIMsj8|%#fXNv!SfB|sB-r}Jkn&i~p-36c z{;C54okJ$%U}`Yb7LLKxGhF4MdIOrA<**5%ExJ&Z1nF=Ut+X#8+jg403uF28f}=Mt zQl^F|2N2=E&bx)nX!UNE5`pxpYe>@&huX=F;uPkh1+HUQF6&f9HQ|{= zX-Jm3k0%Hz&w@P|P_$16u5{iWtdghdkPNTH1xJgWmtA?gUd7_hZYIUqGbfh^l=B5J z;mt#CSsJjBjgoE*3CLe16c+X`$Y{;=+au$Yof`dw7K`6h-DGaXMrE3Tsqr z;(Q$F84EWabe#}Ab@4ux+$=#gC2;3YgfYWiTBO4CdluXmFcl5qb#|$y4sV z*AbJR-c-N&c>m;QL(I<0KW>xZB^}rRV;cQSrNX8O5nmw zwGA0(Yp5`f9pP74154F~oEq-4x!l8ElBL?2W3{=H4Sd1!ZX96S;}RU$Rg0lj)Tjm5 zJrc~MTr%h#Hr?zyR6^mHA8bv_l<6t9*0`-2{(#9*)@U(aE5O)YDgKZlUNzU+4B3c_ zqRX6)x38IONg_!yFOZX0=}5*XahyK>WD9nf{y^kwPAeF?uo zwayO%`q3m$I~I5`1Bxk0&?bZjoUV zntDbQs-g+aTPvPJ^B45TGx^gr56fX4mZJ_;8r%1~e1OhvJ*H15!lYB=v}6@vZql!; zPFmLuxdp7=RTI8fXUE^KP#!+brz7S3I5jijFD_t!(n&+6p}9<>$*7Re-{*PRP8{e@ zoxKL!T^Cdi+pA;s3bw_1K+qbcwZC|fiIo6}wsV(MvnZ*sg>fdYY^;^%X6*P2gXp`K z8`1Z!c+1G^?omt2Hf;D-DD$fdLoLOeT8Yyg;UlKRdac+%1Q*k8secKqAlfRKy~AdG z!PkJ98qKIu54n}FmA%DNAx>Ud_mFz!3|CEFo7-*d%0L$KZwYcpGigtc*r_i!SauSE zxvfSkI%kvHE8lh`%O*PnsD*(N~A=a+4;!Kl6ApJ4c?+5EmQ`!mmuV^ zZVxf;L1Asl7uVJujOR^JZ@ch##F_pDKLM6F-!}~GPAalCWVCF^__a!PLQ#LPCT^f@^mEif&StpD zs6%8Y9g8kcW-CIJfUz7mKvHx8cXF3HmmR0mf_I-NyXf3%3n(~hW?MK$*_|DCDnTKx z*-+jetyF^vYj6GCL$2dwUmB!thka$|3Jv>k3i$5H5<3!1OAebnU2;+`ntNL`f@oj% zuQ++UrSewDJqzlog(}@&&N6$@HgH}SrsGvP`8=QtM`cMoNWKhbXsSvP6>D+6*!adL(%TuvZuoYc zo^m7hc<^5C=AMcElQNL8R`?1|m`@sJ`zMHO4QzlZ{kZiVa2s~JXc#Yb3yxN-zOxm0 zF$NQblA#OOX|_~x)C|nbv1rm4Y0q#b)cwW;0P)d>t`qu%T4P&mv29unAp$ayGBRWM zWwEXqe{hANmzQbaMq%x$bR?9UA<(1pE05gg+GTxf*B#n!Oc&F))=&7lh|k$PweB^! zAD+VI{83l1kg;g7tav*}fTsRc5Xl392>H`)>dyRA_N5#FL)Eg4j& z_*5{QMbHp>j4(ElQ&UV=`2~^a(+6G1R_3vLg_9l^f%!@hf~1z;xao_~g)YR0k8^n- zp2&yX#4%$&Qjwt$RtmDrwwXoI8v!F{ctagT4)1qH>%IKjBmd6|k2Xv?0Iqo6+cijmxbbe}b!7W4@Y9%EQvdDAo|ijG0sOEJfb6N9 zPiqz6*VS8uzOs}Ut9gwkV5QwQZM#5NqIovtzBXVU`VzuZ|5r9G5%O zxgqd5%4>}r4I7=Gvxe(ev~dR@TV*G@ZW!Knz`5uoxr1ps@vrnmck*qQ5YKj_+fkU` z8TLbvoVHoFr7_4XwppiGaNM884oST@bPwt%&q!lVY*gAN?35o3;ACEJl~7*8z;p_0 zmElOXy(!1!D!Gr9bY37#H~v$#<6_}2nP1x&w-hzU8+PBWQwG*eo(6pbeJ%LJ<XA*0f(#bDJ8S` zY@G^EeNmSNdg>8Rp3CBW1$Ik7L^I3&lUCz9`nv|qqJc>K84W(fc;t+OwA!$z6)6O;s<72bWTUGoqy z+!1mGJ@z%|$==&hL^Rbv5W%@$ng>IO7CyDOI1@Kc$8_HHQAswB{E*A=iwgEeCx+DE zgGjMAe|;(1$NY}XSypam@grZ&hg9N3eI!py=h?nf@zplmk$$*AYISFn3)~dO!1%5n zY@j&gpcz$4<&Al0)%^77FuR$g$KDZt@sc;(8Fit`{>F=8yhmL()z80%<-0i`*lNLV zyTq*X#b`bc!~7;jeW1gl_rZ*Bgn0A6p((}o;qp)4=4LXE4)w`|L7R%cYd-(KGrG!Xn43))9d;m@AgH@G*_xU)-P{%wCm{b^M~w3 zzRTaYs!o3KQ=Q~wM>hrdXA~h^-~82H1%$P-#6(5;_%T)DJ(2MAL2wR#oAGpvzW5$_ zR0gnD@?i&Qb7$t>XXfGSMyCa^A$A~T$NUw)#OKE6F8z&G(ib9Q{2AWzo7?CEpUw*p z2X8d(F_-s~S*9qrd@xPIIZYz$U8QoV6wc`VlLj606hZkBY%v&$nzCDzk}&j^?g8j} z7A%2n97k#dN7OW)9&B=!#qjWnjkz|XXMff!((k%q7~um1<;OVGFW*U4z*%=X?8B88 zUoLUxzVJpSyI102R`Mk2`hw$wWae8Po*(kBywqPt5o|MjmW$sV_A!7}hS!onk49J{ z`96H?BT^5<#dR^9FZ;AFpaalo{2`HtQWO~cs>M=mti1yh zmXSA|-bp>MSy^W9PpJO7paPL3g6^VjD4p6#y}{YzAM{`K@8`!y&4WW#8-LrpPct~F zoW((EkqAdQG&!lB7+o?b^K^F%^27lLlMZofzoLl9*rH2x!Jh*5+{(q-o+w0#5;GBI z>z}-j_3FKA&iCu{L)WbDYUj?AcJI!H^-VJei)t({ZJ1-?jFQfn+GRd%3FWiQ-q;02 zPeN|tnyg`$VFLuXR|VW?#9mRxzMR7|;R_`vTa>YkGNB4(^mXl*^r_4+5ZrVyv-sb_ zlEH*r>4UT>NyB<)jAAF7P}HzcpteGBme^P=xPvAf>gePp6;5Xv4wsEXPD&XyxWece z#b~n*W?W4!xqIjAQ#dY_Wt2qU*z|hD9pqn9Ci} zB|T`0tTdw^>cunyjT7IQsy2z%B!}x$Cu|G!nDA>E~df(fRC5rpn|;(ZZgC1>P0 zR>sqeD1X$-Um=HH+%uxTNOtH@ox_%NuHq|>*t3|+OC>#A{Kk?VO^6Q-**oB$9_fS=MUrb_)84HL-xi?VGLhRiMX>0=L!^?AjEVGh|>u0 z@!`ND^xVFx?J_ys-a9f|uE8I^S?kaR=|^^F+wH#Vi~POS=7YguQ^dh>@$mTi*7lbfyOHZQjB)zuY6GScyfZ#UiSHc42E$I%GtZ)P%I%9ehU zp11kTNg8_4bzH$_h!3-IkXp%Es#?sI($ez8RZLw>bz$<-@%ja{k}~m;2e}Ff-=@oK zxysBjZc#V)5|J^|PG@CAkG|pG6^$>V^os4r-T5R(b~LeA_tkY#4> zSzgswk5SXwO%GS>O$vJI%nv#i99YkuoOhaQ)@9kP_k2yYZnvsFfEJYo2Q~7*%cllgLFP8S#2&UT)R8TTzcB%`r zG!;g}oUu+QJEHJ>RykfRPlD^q=QBhBG19Dq)SC@DtI@UE8P+ds59C2d{P5)L*-Krn z7l(5;jl$xoC-+E|ruzIC6tbCY_7g31A$cApakJxFU3mKVcy}R+ZQCFG(0pn#9W@Gn z2>Z>?)^1C=Cq&9 zwB%+!#+Pc(1qWhF3vcuFu%i5#8ga*fg3ErK)`m4jI$DiN7w3Mjm+PchAMNM1e+K$i z;fWXSRTGvix({O3Rw|2Y2CqtTshXZSa?v&Ju39If7wYD5FYJE4o@!*vB_-kYmO($` zsAdUfVW(#FnsCzB)t7{iB8@;>EF#?2DLgPgMau5?S8G)>uU-#<>*7oz#LzXf9f&(f zsAl~gV=_6-eqVlICAy2`8~0wh8Ij6Hy1(BgbR4;|E4&#AV_lXMW^;=hMn zWm$5$w=nsqoQW!nry}*3*DfvZX-9*h}se zQR(_s=Q}ZSv@>nIg|BImi8J~c+&|TCg}F{gj$!qS!wZ=ycKTh~Q~cyxHfy|{b%w33 zd+pcj56jT{o@S;nD>C>rV}8q$acLl!xqn%&)4AiCl~z!i-CbUI z;P5R~+6w0vdMB&a7Z>fJbX$g34_y)jr*0=2Z?HwHWD4g~G|yg;-0f^V96jM(g+HdC zZTDOgBCeKmO+PPqd`c?f#y%_B??-w%+BEU$LfbuMrdgi$EyFMLP;%SW2&i|u z+sj5))m6$`uzcU%JYUI)33h$qlZqY9>R)%b`&b+%9v>n7?Hv8e*5STpP({(QvoJ#xZq3CQkg$KQB0Ra{GEh@o#_RlD#^b$U%cha6;0J{B|lBoZeq&YEPF zh)-iDn)8f&7D|sJ7C(^u7#mRyac5QvFRgZnfVT$XI;~c~VK6HyH^G-21bk||lX)gA zs1D~Msy^=Ua6=>VAZ?#*e7#NfKBP_7>HZ%x{;FtBraAJ?v+Mo8@d4w6lN-!5`TnfS z%Gk}rw!?K@%!?#O30;qL5uur_K$CkEamWHNgeF?Vfm&F?B!yfX#^B0~_$F;Ij^?|KcA=a_I9*rNzziJ&S9B_sj_aAcRB zTmcCx6hQ_;a)kFV5wKfe2n0%)U2v%2Pk#a|Pc#I9vCW#}j`fm|Hws^$B~1 zO=Bsx1@=vE*2mhf`w+>V41~s329^d=RxS|)$@nBuE0H?jaWl%hjDI$G+G|Zk!ik2z zKMjl`4C5)1?I|Gl!J&gQVXj0K2M`eH2?i99u*@iqKkkM?5siNAr`TmkQeNMqga$zf zz3Tz>aRSv&n0g`ZwFz_-rY-1R5O-?*!tKrFBcX1AzYUNffcga>LBg)Hv6Z~!;#+Hj!HggxZZEro@~ z!^Y%OvV^J40nvrH&Eatgh>wL(=Rv!KQ0J5Dz;@@sKZalv!iV_&XrEM;G$bSIvOvDZp+4)+~Vh2V|jNuMSis zzybl-NRTNU_>KV326R&ZgYoO9izM*>53XliS97m|001b10RW);vx2RNt$~*%C{-`iDYEoLpmaSuHL)N!_Hx7PWWd%8nJz&6A-B zAM_bHEea$}#J~u&ko&?cibU3O;|-Q-;1)Qp4u+9AvVJX=H3JAy0>*Cx8XFr4Y#Xng z2U1yX*~Z6DpPiSlt{1+So*S>1t!8^35rpdn2N#Q31;t6^;ObC<@E%WH$q7#*wbe*1 zC%Pm`5+yBPwoGN0u;R9f@OC3MbqBKsE3O1U>;97IIZ+?Edh6RuehvC{tyfINKFu4x3%rl2jKCY3;0e^T*05 z!SERKr_rGQRj?){kwe@3Lg&q7N(<1AT}TsV&dYQtY-m^4Q*zylf2Ub#e%CyuMAsGa z%dnREKrK%IVxw9Nl^89nX5RcY4-M=IQt$DhL4^YpcEvIw*ccUX>&mtaNW&4!NCnrx z^Y{@IzcC3m1$ao9;-@!>;)RO+Nh+iw5xY=?lLN!tLeK}>L@BzrAV?lOVqCqP)xoCxP8F_B>i7Kld< zZObGwkI~G0fep|p6B*K40Ue7}rWNYzaT16qjX{#}npB3j)p3C^I%G`3+8-C?-seKl zWJx+6k6g}?K{1z-g~Uxiq>t{tq=LJ{P&9uONRHgKZhCCjzplHw5wx20L^|1I0ZDw z(jjl*>G#UfeTZf-Ma|kUrO6^x!3MIzIZS^d+(5%SWqNN4h`8%uJtGni@(sa%fM^vE zEkKBrCsOpHY+EwKH1)BD>p;7ptOC&v+pcgzG+{!DEXBPX64k9;B9Kr5NhJZ(=5uV2 zz8cNMVxT?&4#2yeE>-I4AhXg9r~HZ2kE9L!&FG;y@M~Hae!E>DqE}VKTT4I->&v5? zffvJ4N(Q)O?&m}P7uio*xrpJ1`A;n7rf(`^0<@sOmnyoG+im$IpI0>=g)7T)n0BU# z6G8xLK@LyjU=e{l{TQM(BFr}=u({7z_=TCzz#Ti<*C2RQjlh4DO(JsCEyyx#0L;Lh z8?23Pf;Nv3YK9yKu4FI(W0s&bWMjltd4NImb`a&>)p6^y+^%NvB-jT%pGPIR#exo4 zHfbL%uPx|;B%Rj3WJIzAxgroALE$>m;Yualg#ECO1Q-kuM`(_PH!+ndygU3ZQKrrY z)Qrdu##zvykJcpFfDi_{8(1gJiCCuH+w;PnYwC5a`dNWvAjDxMydL5j(cuPr_kH!> zU1#2NN9UWB#?3bV9?_alC1HRUej7ZDLr}7|2wsN{Q}0{gzGJEIpg}j*Xyrh+1w;1h zQ3%-_T|(TOu}c-zgs=%B5u8|CHH!Qd{WP(YT4?KMogTCjxR6$w=VT9Lkz+QUMw%-3iVwlJYfm*A-idAN@cGj@d4A4PhSPdFaX${X!Iu_lkpIdjE%ik6zOP7Ge2#Sdc@m@M`8q53Ci}U9 zEfDMec&ug(|&ln zDa`raUt}&g*~s~Pt$*zwgI1p6=kUF)KE0^pcLtwn&h{>|$iHf;$>RJ>?ztZOHiQk~ z_6ixU=KZu)efey7uKDg9emkD{$kE5j;ODkqdy3QREqd(BY+dX`^8L*6 z<$c_I8HwKP*6O++dcdxp66LZX`4&h~7c3(C`^W2b?PFlHme?uqvLcNs9Az2zDenIZk8KrSB z2BfUHJE>Zxg2}E4_GTHbs$+U49Z1C6TCRHp4BDg8t*_3x=dKDDZz2uKok?V$vxCsQ>W(|traGm<&^8 z6~i07h)W*NOy0ij0lL~9y}xxYbuSI@b?^-*s}OUazGGki!zq6PkxEL+f4%D38~(>Pr1txVUBb-%jd8lsX0osE5cnq$d>Jgp_8a9v7YiOZ z77-hZ4jHq$a8MsJW-`{Dnnf_1A15ZFNegwJrlP52<~Z5k@u#e)sJpZg zkH-V0g7-?k;_GHIsxo_l_tW#I?d8`UZ~x=TK2YwL2QZKr)#%*(++?xFGN7hofbpx~ zmt@HgZW*MxrB9>9n<_sGHCb9bM3ZvKRBRp$H=rbqN0i<)V=4+HZ#1IPEi4{5FJ(=` zoYELcRGMxkMVmfu^a_KFG-fUs14)rrAExQ;?B= z>x@@0z!DfZv+7*Mv6G!ThCu-FAbdoHwx+_c<8{|NipkAI@Ga44nRv0V& zf&!wPGYY0Cau>-M4U*t!3uOxyuN3Z!bK8_7l9LbhI=2UxfWNc?!RTO{7L1ZKSaXU_ zWY3VWrpJmZybsT!Ay-&4BROJmlv`Ayk@m9)D?V&0BTp>htLjCqjVdP6;EEqGQIU&N zD8=9iaflD=+;StKJepG@v89n_j?=F%VKdf*gGxFfO1m1`WsR5663M6h$Kj-VhG&1m)d|&WkpRx+(b$LUzU!w~$`A^9&Wo zOET~IeK`>}RRJQ0cORjAhj+pNqZ$c^$oNuKak8gB0`|VlUMFx(GBk@6}79$KOfgx+;l7hV;>e z(I83lDIDq6#st;49E1qy*3J_-TP~S0fs3WZ=zvnx4tGiA>gbKAoodIzsnM z2hN>{lhxVu6^V7HS^Yw-aSp5wKw1V$1{Q98{$%dw;c$s_S9IU#2g7?se(lsq%aJz<%9OBkUEvoJSs@GsbFm# zdo@8Lj@{lQUl3L~Fm6=@voDj3kms}lgg~I`VueUhh~y>@PJpI((59a5i5Q9YWc>Gd zAm9zs#EO-3nNq0<$&e}`28}Xn3M3)cVcnI|vMyYU7#q*5ji>_VCmLNh?^b$sU;_L{M+d@ifO%jphnH-p7GP_-f;noO1Cs}4 z1w{35FD2nzg_$%}*u(T*2O=P3u*=Y5#Wdq~1MN&_Y2MRI}gN_>EX3+kywqXA`f z=$;lW6l)25xGo0;zKlr{LOVKUquPI&NgAt=;XaiVCzcV4L)aY`!15f1u%N(POopVm zXf-M-h{DB*v}+7uhxrJ-2`d9YikIQM2!NN-d*Bdedz{YM?Ot4YPaXW4M5BSYubD|2 zDU8$3c`DTsjnx7Xs|TIdGy^+6?XqUAg?=Tl-3UrY-*|6{HpSUDF(H~X8EV8#^pK}4 zHbLlb!eB)l(hdzNdyp?oO~82?fY!R<5dq=@fN;g70lAAhdxv|hFZuhy-f=@qtMP~A zsx(DJCh{cKSt)ARDCn(h7T^Hdt{N#fN7fnotP`4ZT0?DjItB(bQF>p#O>JP3i`tp_ zVMiWyJ7--WVlE>DrII@tw}egvskSqIsDb- zrb2emJ`i{m4-BZgufYL^%D4!FtmyS#MRu!Ck2PEpZj8flQXkF;w%!>4nqQX&Huf9h zsJGF%a3sgoATuFW+!f#S7~?>Cd-SKAYl-9UfgiJFg#b4It^22kzzqSd0Of$H?fQ?S z>>4Od_U?$Fp_C82(0{#u&fs|A%_0_5Nf)BN6rdsX`%1v73)F*l2%)PQfL9Hb|5Um`PVk-kn_)|rW3u?w{@YQN;M>@6B7dm1R4j%lYYYB3fQu+Fbh?$V+> zVTLN!d=V}hI-u01gbj@gP2SrA9!8xtj_|XKr3P=jC(eO+b62LvFE69IrsM*RYezQ6 zlOo6OM#jQuC#HY#$lCQrtHWeBpJdbHArjr@)C=D8xrg_xUiELo($?JU+bFc>6@Io6 zUiqua%!Q2Rjs?bp%cA$P()(FVOLvoi&Eyo9?_EG|2CSKCt8%GZv+v7C3B6Z+=h2q# z)6`GOM}G#XnCA-HkFb|Jx5w4(KL56B^Dmyu)5Z6n&(D5~tz#mkF3U!Zro+>xEqg_r-(dO3WkO>Vx~r2>Gxjb+`M;6)-yR zj}{Jp%TEcoDA)Dt+DP|a-q?cY^i4*?{`1WQKZob9ncRWN{$mc6+X0(8WQ+Td_(JyE z{e*kQnrf@}Qk*90#)y`ll+qAi>?xL0~IZ~VFsbp*ArQ1W; zXNmPW&b!^Cpy+!4w9ho9C`H-poXa8RL(lf=j=-b!`Cw$bqwJ&6@FoV&`e%F29Dm9K zi7|N*{U!OkInMKLHQ3S8rBe&J%|_Q-SBTs9w8$36_j6}v>uvW9zNhtZB0!mZ`*h+f z&+x9;?P^DtdppKq0>72|Zu-say+I6A|8zd>$EUsYY5FC{ zF5C0Sc11Po>lSk!WDVmiNGj4D4B9i#?niau4RGC)<|?OOM@`Uqeq*XJNqmM~9?wA( zt5kLvU}@~M#EofLE-OhRBXDki9@gkJR~CD7Ghrh62{^>TBn$OxB` z(aBWOX*HTx5c8Jt#t32JgbM}sjICtd9?>@H|oBP-6@W=sl%~A z+UeWj4Ra3uI9aj9D-gNNtl3>}yUenn(wi|Y#<)9UgRAVNwMh`^#?Jpm0aP$BH~kb$kQ3b5g{v#>(4<&~usiSAh@U7>JV zY~|!gA4nWEc5y#5irPjQsCS@@sL>WG3p^ow({Lf@C}%<@)1`QG<4O*wwob%4(P9y~ zLMH!6miskG*5XL!r|&u|jXO)jaW|%fTUVo+so0duPF<;!&*6$n{+i)W#h*7h@EX62 zv}H3LV(dj|_x@W3$D@Q??>;U&*ynu>t&lIZ{$DZ{VJ8Uv5Ns=A%^;IbM93{yC!Xyf z@vDUwEZhNx?5=%gKWF8hwHx<#82KIS@7OcoW&dUu`)=yB;}y+|dOOB;0Q^?_HRRQ% z^1$=%>lMNWfZspnAm^aq719eRXRxRD??K6}cjWEUZrTsvfA=U6Vw=wC|918mzn%TB zRq6lj>>Z4)be)~dt^R|HcUJSXLmt8GS;(HPz70x{6$OLA;WNamKVn8|HU{RCN)7@H z03Yob9Gzc7OLi9Gx)V=;p`9(3z%~;~Z!3mRryL`}EM|d~Fqt2do0nTBVP-+T7fX;3 z2RBJSjFC&9d`x@ZSinl0Tx8zax_?R4nLL}^xzX98%r6AjPE4YjU>13`GweYH z@g(NVL@7^^Q@uG!)Jmcb@Mce^z`8DDR7l5`_jGdl{lh913pGkJ$?lGFTEmz^9hOnr z))nm~7>ZY)JQ-JUSJ`GMp%U>MzBQT@YKw2tI!@%~HA$`ncz7Vi$73tYKrD_Js<$-} z`iewbSSH!dm@s8NQ*gmUMU@)P&Qzz=eGyhJtZ}#~R|kl^+YC%umpPoObMkT7mNv!g zi1u?La?ed{G$2|E+7zvlWVCI~n*8;3f8S{ldLk#OpCNhsah0LegWX0|zJM(x;882` zKe4tCIgL?pD7a!;7z=KDMk=h?slQpA7oUmWP3ozvQ5``c+Zei%W}!b9OMwy^n!ws~ z@q)h9`+E{BnBRIP)YYWY8HtrmWUAZ6WV~5zL&r*qGUiNU_>h&b8gpgyrrn=rJLo?* zrd1tQL@_1#A_QG<$FSF4BpC>K!@WCDDMc1n!+QZRA^nL=~4rt+`HEkb(zp(LOxN z+n9rBojpy-QKX4Hw`AFdhTErWsuaQQmR}=58StG5fPs82+4-6B zc2~w%o!zRue<3GAdcqw#a-6O2H%1Rb=flBrOBt`1CYxnrWy1_-782dWQx7{HSP9lwf$ywO`) zwOk4-ps?fgu!kCbkd%wuUH2nofWek?;AXsZC#mOmuxPIrR9+byrCCv+(kr%L5s>O6 zKd_rTy0e_DRTX7eJRl6j)+=CP$FRLT_osiFfe=t-W4NOHZ3w`#lC5V|+6Olm#qZMz#V_n~5yOK3s3}Y+$2!M@8!^b9L-s z$>Th-vO{k!AE5&oP8`p5o;kk#97wf)Ep`>Q+G;b1lLOmrQyaNgK0?r>F`PG^43^44 zQrxfN8kXQJsI{@$(h4WQ!%2vD*(Jiewrx>dTC3z4fKo1}kZQsMWXd0Q=D?ZlI_+w> zKBn5Fh9!St3`rSR35k+JJs}KhtGl!n?EVjeRLdP!=v>uZYme;0E;q2JSc-B% zuM~YH@bHr9Or@oA((Aar;(o|b`dw3i{!O#Jf4HnzZ5jdtc(&z^`p0bxzYZZ#HhT|b z*zf=(@Copu!!iKxOw`K~Sim%L*AXd%6=sFS&7lD{fdIs+fu+m+H9(8Giq>e(Bo$oB zBntb}&-Azew7FwOG<(N90-M!~t{%ZhW}OhXKd~^T!G#2XK4|yB(m@A-`)=UjfZP5- z#HQTY0;LaJ8ohS}oh<@S8FMKu%NcgW-l67_O3RK=9sKI;tu}!gBhTb~N<~YmM9H-R z8nU8P0Y%g{mjrNW=*t&|UKiPhKNsxbCvv7&YT1``RS$e|8;TBmxMZ&jSPy+Bd`7B_ z+p*VHDVbx3FDZ&u*or%ktfY6O(rBl8!tTtY&=~o=*_pn9M4fnIqTE$p+#~Vz498y> zy_~d04tXY4hV0tGsDrL(z>=Qouwa-u=|3mr*p6t{nQQ4jnLm6Q4UOE0&trpq9gQq^ zj*IzE@Y-)S1MqgnXVR~emoF1ke{bmgk8Ts!A%FR9=M8l)ea@#(e(E1rKJd@pAE%DU zbvT-zF5--j>22I!I`(XJ>%CrJw7qT@{dKqO?-Q=pEvr;@IG^w999LXUdYDiCqWPRJ zL!WG~dD^X<-9NtN;Myo1+YT?_?|9#ck(2+3UzhvVtoT|C-8~QE<6*tOe%SVSbbkzj zRvg)$RBykiBjR%3t}IAhdk}9j@NjTGueXugRPcX2mO0E2OZmKb5NtAh+(h`e^KGxY z-|ne`<89aNt+OwOfJ@fkaJ#f!Ws}FKRTH~<-79^(=|k&&-!2Ty{PoCQy?bvFvVG+J zI2P;rj5q_|9{v7a{=wsFIK1cI#P_+;IBZVBZphj4v2HcK?F#z%`O0beF%RzHD74F% zgsC!5TxxGF?y|}RIX5v}SIn}KS$@`3340#yCq@Eyb4dzJjAB@Ucb9=N@t@gI7 zx`B6<=2i+zc%AQi&n_iJ1UMEQ#XvA_%J8r<&Ij2SXh-Puu_r4Gz^bu`kTHW+QDir-IrAB z-Mguo>$>XD!K!WLYSVUgq)_}t{X+mTkmZYx;#( zA5VM;e#mH$FftZ1_IK3&9>hJKM<$zaCO+ZN-66eGK!>Czw&9S+J*-oNF5Y#t(COuHWw5$=25DzlQHl>d^m} z9f0?X#L##qDTu^5u_krM4!2%YK;n;QsH35tUI@z~m-uA|Y=l3BZ}=2 zW|0c~D~9&AX47`@3yKel5!2M83~HvK!A~sSsGo(Um0NYu|6yisUV60bzA>53^t^66 z`8=_|QLEByS#j9>RBQ0vI(A+&$~=H-vBO~}DNMSS<4RRASTkZ8_DBkhhciwpL%!6b z!;UhRG#Ha3!H^~M$nO%WiH9`-WA3w}Lsg_1o}ZUyO9a!Ez*m_E9s3p{&5|vS@DQ3y zMWUQrGB1bvvo+@A0yT&nC&z`fco^>Nr&gabQcIFYjD|%6GmIx3BgKe10NjRu}!5gIktpfe@*TD=NslPK~D|sf#Sq#k>q7BSM?U#|uSy4m7|lTC@u) zWFESuFM>o*nB9}3DM>s(e-hUW+rpM|2{8~!v*JoWr5z{xND?k)sP47Nc=l!ZdxSzO zBi@3Rv#&*z-wQ$7PoC(fKrwY_pl&@Lyp@m0k$a>@ku-iQ5H%9P8Sq%^kE|3OYppnp zxS}Za0N!wH#Z{t)5ZD?&nq24@b~h?LC6yvA37KLcFGcorC|_&)U5RirGeP^%XT+jU z_z{bu!jUdTJxH&>q6rlzpg$!2K!bO_QY`sY$TZIpA5p^6B_a={>Z%V_YFvUaDhN`` zoeoh1UIlzcfkV!!m_#hek&*>~h?#~(6Zx``Z4P=`L`y)a3`H#ITAdO#vc)C~0-j$d zjuE7UB1`QiMoI{h9(KUYz>OA$O4#Aq;C<{T$0d(4Wp*8`C@9H80Yxq1kqbR9sSX!i zq5u(q1WBg2h!DG}2)SA{yQQmmi8gm8eCeM^#|g5vrT~1jEvh;G1BaGH|0{lx34}w9LZD4HvxtNiJ5J%|nbv2F6qw z(PwL!VKVeb9BtUlM`(A6QWPh^+Dwooz3F{8Cj<)B^=W8#{RaAA_!#N4E>J)XPJWp8 z4E#|E(pJUeCVy%M^bHz{8;JutnRfE)Id5?}{b3Z=v?AQf1XCy24FE{y=rqy)@y-x+ z6Ftaz%6i&&{;{ivf_4=N1PAGMmiYG~1==Y}53BS}s#B5zdYN0K62h$K3_9skGPp6x z)=`WsE?Blg*>cNoXN7eI;NlfwMJ%78+N|&oO>I-!0>NZpp3^;y zVlu9BpGrSk(ZxggxY2Hs?qN2D=DD{|NPTtD#-M?4M6=6b*PjHP!}!99qSDAlCAf_f zt2?Dv9pbe&wW3}mlqaL4en`MJXk(B{a?2(I(^*L5G|}h+DwN(YYr*;RY>Ea-Htj`E zc4Hu#CtyKJ-=z)54HUB$=AQX>7IdmB)5=wW@F6KM8rq*35|=r3_Qcm zR_ud(Md&$DlCoa|+Sp+wI@OzVCkPIjCh}} zsEPTVnby~*M`i9IV6id~f@u&8X6Im*{WfTK~l34IWoBpt> zXOxP-6I-2?n%7`THe>5T9~rC-=X3Hla_Yd983(B4?o@b*{1w?0Rn)N-f%LuQU=@^c zFqkiaFaSzOdbv|S&J zZFq7PY0}2gPgMfmJ^`};Qp?Ba=Ne~^huaIa9D4Sdn#3&WYQSV~`pA}xT=O1P#8t!h zO9>P5vW7M!QrKLg^Q4t~Ax+wph9-c=WB>x@^%}<5rA{jX@!6L8aCbHGYoh;}QS%+j z2J%pu;F(DzD#B#!ECG3ikF5~AR@ZZE+weoOW-l9EMwexowySeus5YdBu8@}R09Aq? zmDkX?{!Ri28EfWM&XYFY4_F4GA#0LzarCTlz^^%-r2gSXQ0LAHwaRcIsW-{cmWsPl z>yS%qn%-xB7|y>B6Tw~2m-UW=2Sf6pUBDT|ARBY*yY3O+kO$awVPSN$)?N1#ovq}*uzOIEnrdgH+W&`fH0n?Du>Id^to0xCD zGg$pcx$aOf{;W6gRUc+ujt-a0|Hg$C_|?6{r|;8bQ!oG%~EdF!a#GdI!~pAUXK@H6a?zC8?GDTYICsJ;0vLAWg*|#w`f!U z#T3Y@c;@D=ue*!Em6A!xc%^^dSR?R7-b`C4@gA%ju5P>>vg#~h!%aqR>njlpFU3$0n5SSUmp(WUoRo2&PElly=4qH z>t0;g7@T6sj^}KVsokTFKw5p9U>Av;;?2Dzms;2^Hjl2Z-qOH$B~X)as|v3*2${Zr zgJ7jS)OM=A5I8d@>%5fZR7khxYOCB7C z_8|GGezcO!vZ2Ll^;Kg7ZYOAaYN1>nCsK@*f)bteCcrJ(k!#mpQtRZn9b4TOQGIGHA=~u;!%Y* z70KO+>=3#Z2292rKp;vk5Z$d?LZP<%K?vp3)R)c8kenT$ypS|nL5&fgT$%$F}a0HqZ9&yB=8A5&Z__!IZtm@ zkU1gs1EWHkD#pO4-p)#*Aj^onqaV4A)Vk&?MzL2Kv1ivjp#(Whb>69>u^k*g6vf=` zNO^kPah%O_jAD}Tug<=Bw`j1^N_#$a71|wtOMv*^sW@?CXAR-qf}))M0AdE#EZTJF z;W>XI7)Z|8fW*A&A5!zqAsGy+&YGQefm;V;|MG|jZ~g==c%o>>XM-!dpj>?*$VU2n z4a%)C{yR5EFu3JUzVU_0XBe!FMj2-OGZQ#2D`1bto?q~D`HuQ4CHyid8w{!b&XMx(HVM+x?#{4~pk zgxE|Ecpa~x^|N}h_t^|x;EFh9D^#w=0zu*STCUqQfMQY$JQDmosDOhg>)#8PDKS{J zrnW;v!UHS!5S)c99@nl>&e{$j0+1Et&Ev;uvK_LP?P7PyEmN_szRhyQJBRXog!T_c>~`{5URl z&$UL3{sf}cnvuas9krmHka0_I$pOHr2UN|%tB%52)vLx5JjqqRFLj#5r5UW@FP&rE z{py)1@PflB6M&9=5t>-lH+&OJ^FphiVXL?4zMK2M{cO}iAaY^mZ{X%3DQt(zRdHZCp>HN+=m^4ld$C`PGPj?Jd zupFmpb~AEOW4x}}Slz(7)ml?d(Imn@PECRn21KNod(3;*-vcqA!!RZ-2cwrurIqEUYWEZ%98ti|>$k|w~A|;DOzGVtJ z4sS2O;-YwF2rANrgK1mwG|i<;MP872hD4sE=EYFcvSyXSmvyl(|bOj&b9?tlgrY&;pBAbJEf|2|15jR*>b(WKeocn z_}Z0nHny!ymA7~K$^pvZb309Y;9vXLd>m9fiH_EGdu{gKKpDA{Y7vRH-TVk>a`f>$ z`(41R@IP;Fu)F&>{n$VJ>R5Yjf@*HOp1v37P?sOmS#6a{t5>^2(hstW{s!JC;D3Fr z*#5lrdtzPpIQlkuZ+Kpv6`#=e+?szTBm4eF3*3K3ECIhXdRf0qxo$tMR&4y-`5w2v z(c^pe@I7BG!p~Hx<85UX|4F`=w|OO zHJ|*vkGXlFef!sAw)JXK`#RB58DrIXhkRzIzEUu-LtUo(eH}!dQF5*2{o(n-`|}sx zL}%keZp76__o31G_3FdM=OpAD-L@7s?b%di(&}<+W{RS-vAeMC@N7qw_v7`9-=)U) z?D32K=k*z%o_F@$H_G;BBP9pt^E~_GJ7TNT^)Y2ejE~FPYAY*b!Y9Y=Ty-P+r~4-W z|Hxr~Y$x;UZEU3bVRq)*g|E){K+?`iH=9s2Nk_S(bwK0VF1SU3BP>(CZ&LzUD{ zB}LWya-`Ufjg{MBZ(9fc`#^Ft{&MEm%Hmc|l*ivj)aY=>5?hXkO|fleT`$82?Y}8H zKk)c1S^O$)Z^=JyG|VoaO}F_m-!7-wb15@DuVW>K@HJntp7&uZD*WCzYvBn&x}4iS z_7cZKQUT~UKjvea$#qjHvOI5J*6{o~_BSKr%pb_lb2rUBuWMZy*>3~wai&GC5wf&U zyl)K&^bfWa=$73M#dlmFG^eQmkWKGaNZsZ#YU;;SBA3ufA247dxm8(o}31hSzWUF@NWn^=OW-Rz0Q?C zqPB%tyEb+ebG9vJnTADe?zPNVU!8(kuvxM=Enz+C1Y=(ESvL;smxeVy+O3k`591Nf^NYLvAx7}0Ng7iEN4@3V$;}VhK3Udg4*wdws4ISh?5Ndn`LPGrkkg}eQYeUI+ibhn#7>?}Y?NhmnW`6!J`%sp@>_~YIb(BmYnWKq z+{%M}=D&Oa{|k6vJ=-Cb1PCB-HzRmUlOGoApMOC36kM3gQ5aVAmK%a9oX*}Y*yeH6 z!44%%jK2ph0uGtqmC}fT=&vNqRumDTr%8eWA{Y>`zoo%-`vESCA|>K#Adhhq;ZeX4 zCrgH`te=Qs7FjYcSp58*#{f~k?(ge8tu{Hz)kjIiDZ|RTIM=J5&LIYkZgxa;Mu4~*NZe!d7DbkV;pYEa{@}Qe#C0oxkr1WW*YFf^Mp9VOFyFK zvUUP*=XDzXdFq(F=XDDI+o3lOFyuJ2?KsWWF9^+j-av|`Y0{x-9;zbJeQZMKu6%3z zv7O!OuCvNz-LR$wHp+7t%d)DaPcQ_B&HRveA)4zL7@zApFhQ2{Dj&L<<0i9Vdew+| z!)sSr?scb0&<^Im&w$B|kDkxKekrn$006&Z;QvjbVef42V65xnX!9QtwhFuz@(QN! z$;s-=B@XAuWhx0ACj)h(fI}E?V398!WiIwi>$>f_f60ERj1&z!I zNU5bKDi(`{m>@0eh6W>5yR`O5p*2+@wPo;n@xphCwUgCCW7X338oEH0%lypFOZSWC z&dbO4!h+*Tbe|}IoRnv7RgFv%GXTO10DxKuk!Zi9IN}Qmi3rTqlxdYbXF|ffL7LjA zWV!nIu(6|p^hxY>kkbswSaNHnQU7_knL=c-kf#VK{n#2$SEi`aVrO45?e znedW@9BqC)oGJ42r?3txGa+x21Y(}Ts73>mERIYvs_#`12&l5;$wWV*y8MKJL1C;w zk(-9>@5cM6y*_Xwv1`H zARJQu*X^c8NQ-u)toM|+G3@rhb(jhPfHOn~E145qhZR6#7RAQ7QVU|4r%T8Y#mbqX z=Dz|CjAiQ3+%PyAtj{qD%Um^C8=|XdMuor${Yu7Ic1LCkf)L^k$j~Y9$2vC$bP&>d z@Q_Cm*^!zf9bFBdf8y4zD!E~w(CH3ZE~9athTe{Edq^+mfMXn3lkn*7mhB{i?HJcU z#}s#%CcOlhE`c7vJ91zMnrBf~P5nE22PwsAcvB{rAK;5m2V+*fS+aCWvklN7tVNko z1k^ImWsJhMB6xk?`Y9#+iTde1$>Ym+6(gLi8r^+`zeW$lf}~0+7Y9TFUA^$2fyrTl zNxY;w!0EObE4anonJ~HuYfO(8n}WR2-CaL^Po^7bO~e5hN07^X&(3~Cyd|xuyjh;^ zRRpFP7f6-6aH=RjX$Vi2VU@aWGxaE#hF8dtC?Kp`77Z%$UzDyL<@Yh0R-kPBq z8@*+=p(K%t0V0ba6MLF67;?H^FU+0K^no7h*nR3zKUN2UV0#_ZmWqWG@2Xp)#8&0v zC|1IP$^?>;GUvp7WEwi%o`O^>cjtVBs}HtvfU#%96e_E2Zf9had0uBXTTTz~Mszcr zZ*5RHTr+6Fo5^^_$ye+z1l?SN*s|&oB8D`La9SH{9%pNe$zQ#f@U#%qzj8Rje}Sg= zg2iEp777I-3y7fzOf`s&d;OEY{;`tr7K4RcGu+`K(Km0OAh$UDVpFy zQm_-VlCB8Dl$;PiP#JTcUc;5FWCI7RzxLb}ZXi&9gkq%EsurjKD1hO*bRe{YVH0d8 zpzfaSn<}8@biQLj_21%1F@-gL4d7(Vi;N%WcureRMr603N_K;%)9YFa|t@TKd{ zFy#8?1IelozN^v4Z2If2W`idBF{g@|IR%f>^K<`_-h|gQ$MeRMwdP4%{bP2g=V7tL zbD5U!bDcYeU*zYye^A#~r~AFl#bIA?qKAr~lVX##!T003p&}p)HCvni!lN01KVRCJ z(KPf&$-XB>?jvv_C29I)W=Hb@?c-DUI-tFyhs!Ao`$yjBJLq*&{O|X)wrR=EqvA>T zbHdqzDqgDK2z;_)!-?fp0&NGp7Mh|j&5zq9d{%_xj_;F!V?)8a+0J?WjihOGq_-Qb z+s@(-TgYRWp1Ai*#(>3zP3eU$WA=TGDKl``wea~fCUj=W*(Qe6gPnYw-Fu85c z=kRv|_%r1dzIEHG^G?pQSF1zah4@N>z5eC7fnEvAr|93M;APdnc)o-V=XEC)Vu}SP z-ANzsr`_bhe?JC@!B@KOo-e8Up!qZ3%|}~?T@B8si+pP~;B{ltZoFSNo0zXbb3TIY zR)DziX*RSUk``{m;Q6q)zwT4$FTaYMM($9H^&NlS@?1ZPgS&4JhAewDbbqc!=xnu5 zKd1b3ILmYDRygpDGV8WD{(SwhE-%;X_>ODj%U~G-7=)Lyk1gt}D;w=q?%2?I@!*xd zTz4~eYAoQ!9&w}<=~h3{JjyKRPxt7>0!}QYs7D${XG*JO%0s4c5R8(Lgi0L>OY-j88YZy=G)xr;AL6dLKpY@J>Yd|mNr%}_O#!kHWN%?n`?+L z&M&Ul;()F~3Twsh#U3__56d!;bnUEkHqkpI*BDp~p@(`0$u849W%iAwn{F(Mxs`1` zn%)g~%oPfv(i%jqDK7)ZwY<{77yU+cQJq?H`36vpY$QjKtGQ%DiHR~SRw*O(=A1Ya z?4@a1f9`l~N2%w6XTAu_Mt)F5H;CMocV2Zi>DkonE;d@d;Iqt#Ed(^>sN1of3-A^r zz~;Wpz|X}7ektrxctY^`;BpXAxe(#`B$Bx>l6k!1f>i4f!-7jcwD7$@zr_X^1m(9P zLI9k4MhG)bP>Tiu0{~2d003bBZ*|hq%-HHbn^J2fUMpe|6rT^n8usqkA*pLg6%av! zRUPhW?N>yNlM^~JU)kwBsMux8o7IjreoGfN*H09AgfCV~6}j0w>R1Ya{M_8!J@J0l z6)ituPtbWmk&(y-a72Sb+wBbYolbUJlWnb??JB@fe!40)_6!mzfL+gUYLQ3}G&V!< zHy@kL2b)BEVUAjHwiJYwm?Ac5w22 z(bdK~K{_%tGIuU6fz+7JeC!N1tRX8I{oQwpMxew9v2H1cZtMiL{!k`EB^V;^ySTI9 z0E(De+n@nTv>$)>wBW)IU*Wu7cPYy3JOoZs<=1&=tP*V?2DhbFK%sI8tipB>$?}Nt zlK1S};#$|C-U>13%xo+qvqr^7PLkb5T0ec9vf;Q`7t*RJ%wc>XNTuqm^BV{)cGhJO zo5bkX`M`X;kd)GiKN4s4HjQpHETGQ60?cC0g6SFD3U(CW81$W@>(oyo&~`hl)|#mJ z*#2aP(Jwn2g;wz3+-%LJ4xnBce{PyO6$XQQ!*EC_za#NJ#Y2jF^>67E=B&GL6dX`q zkkrXbFBeg1J}LQe4yqx?G@rDth@aEX^Q=2OlZMxYwNInsKWH(&;jk$T-)KYQSu-D7 zHZ3bx=hJJueF^*}*|7n(uFyvf2~V1qx>MCjH#t_>(@nB2>q`wy3pp+}RasvS?e6tH zH>P`cwzsPmP2&yjcHN3SfUBYJS(npo(DkzsL_M>20rM;dd@V+}g zG;j9>JuY++FD^GX%tG6ep~(91IvOJsWE6|bbxR9ei^-9VEj(Nd5H9AlZb?2Uon1Yf zP~eNY-Z+1Z=nBn*Hz^|11nc#Y^l|14B>j8)@gog|A@gdsfa6f18|k*S_|$o-CC{@3$AbjZKC zaWGw=0020@V1@rRp3KqNz{uR;KPqHvWf?0JWgOoI+f%Q`jqYo!jz$8Q%lkZ|j)tpy zc?2)(H9AtzWwg#crmeIsSyva6tTgMs!PQ+*|ieU$%~O`dG0+fs2w!g0(hmO2r8Sa|RK40(MV#0UYt zh=HcDsEsN@Fs1G>KrcfyISG!$KFT9%G?to|PJfofC*LDu$!Ka4mwNsfRImh23Irm# zqEB2xUH$;D+~3`i4iTqtd51R?Isgns5-?)S!-GLfKnZk|e?zjTIeUo)df-TkgZ8KF zYEFnHB)-`RGnW=Zb(U+~dmqr1I9GyDSV=|xtIGe>prW%N3DpQ7agtJl`F%l{9=;7o z$Xtj`@?Z(IVo4POe3KHeNuH=e3B%MMZ4pG_4XT>~4|fh`KX^)|{4@X6NY4$K4`|KNgc{G9%1`_BbCTcrJZ?nFU`q(L9@GTWlXzDnA z6UDs;^C|-5A+O2nu_&qHYQ$o$g=3#yT_Vp)mAGb4gXrhGNk)i%Dx&QDIVbZ|Gz56`2NJE+d}%+0^m; z3$4PmqH}+s8-EV_YmqQn%|}54`^&~DW`x8?`d^Oou?f`KdYy^?)?NaRhLm)phZ4f@TA@9IO9>&0$c4xW#_ zN2Rla%TCbU*+N5!X^!-#y!X|7fa~L4nWbiP+V~c@TfzMM*HI6=_S?0G@6Afr$WNNN zH}WGdPbY^>ig|QfEq+;Tcebg;Ipjs*!#SL@{^$eFdP9qJxU?7xfT=e1sv5+l7LImj z?E6(f%E~biE=0{`mOr-$tBnqPCLpj3j!=(&LLs>it-5#Y2&I)OeYL$ z@3~^I2;y)l;B@V{iQ)n)0Sm264MuXc%pZdV76P~&ENp%)E>0I3U0~cVNw$*1`nC9a zTY`#9b;7%vNa^I^=$PzMzv}Y6u`lm0Z%M8ec5UTxPwMyUamD?xgrV5czeX=G)WBz# z(y?!q*R3Q^SZXO3*q0OXkx!BBd|ON+7H{gtODajK>=j;e!Itb<^*X?w%nL5sGj0nW zQ?=!31J9Yp>*dELogT!~3 zZaQv)tE5JOKKcTqkn~XCL6G_i93-72Fpy$FK&S6J|A($~>=88#kMN$wS=+X4+qP}n zwr$(CZQHhO@Ajl=(zI#6yuaYdo5{>Q*8uzi{!i70ayymKi2?us^=}~O|LmVQ8C%;~ z={p%a{HM1O^{-&AwA`Mo{S*;CWgyjyOy+T*r$ zHVZ$n*^=4vI_vulclUSgcgGWBg*iE0ilw}y$3xngV8N0>JU2tC1RK%7AI7$d8vZOg zq$`|n>LL>4j`~@ou-t}hjeh1Vgz%^DO^z;yOqza(@ykD^r>3eXO|D2%XmXFpUeXj; ztH;K%h?H?W7ZGxZF#m^7vzHa+$t4Kzeu|Ik2-@|WVG|*~B}Q)zbCdWzut!LQ>yJ-( ztaO6$=e{2`xO-Fk=^Xyc$6zYz80FovD+D;JshdASUEuQZtqt7w7m< zWDsYd_DT(3K{X}P`CUlSj2>CsGJ51wi~SsWx4)Wd-v+waTgPS|yN$p12jE>lV9Zg* z?%_!S!tkiHIpmOcy&lv2NbxZB2w7z1`nd9fdu_;c`}Lj~FeAt)gZVqc;u#3EjRboDHWRGTwCn^z6b?oJLi3EhDXLX;pD zn1O*`{PF6Jd~I)h+bM{4Z~AQ>Iz$_B;F@%b0ItE+ zZOr@3!0ZHCgl*&S+Y4xrLGDAZ?Xd?w%oIQrIeXu+ai)WbL~yeA0_*xK9b4JFX}w9P zt0PSD1DRGQ0T4iD9CX|lPeZV}oPM}7zwP6;2g#O z6;;d<4B5v#6e?KA%Ie9MLWxR*6YkFm>myt+!OmeCox}PD?gpvh;uCQe3k4|3CZ

TIRiOs6pw1D3~1D3!vQi{8U3 z_QNGBYf{b3EbZ?1w;7Gt*H!995knS_X^S=4En^f<=B32RCl?J?eRRM>r&KVa5^o^t zZAU!|TK2{2)#fbk78%EGHmp4T2NlT+j-6{x@jojM^J z^6+9YF5_92%KuQ=CW7pFZfIF^XGS?L!kzzEB^FC*XmU&hkLY|v>_XN_AOSiwz(<07 zI^fDemi~bh69~Z+@1&V$L#XFP7#Y2BC`gOrfazmX5RXui81gF>*MR9ov9bnPrLm9C z_vTTg7l(k}UNDE^A8rjiKG26=-dw_jAaXp&6Qwb+fZmXZ-x%ptp{Xd5$Yvsb+F*9t z-Ii2+Mi8-fkpo%=F_MIe`wzl47C^DmO8t3${UaAbCXI|Bz>7qn&X2KD+*G3a1<~rI z{8LVmgcl`?{ie8wl=&{h9MS6!ObTZ7(06c0;p|?(%#&MA7-OZyiL#1S!VHc}n^Okf zkY(JdcpYN}FpQS<3c`>e4A25kGfrTr)hz1F%dK{fG?Y^opv@4TObmqOGI+sK;)vE;LafUE^3tPaCHhH1#G(`9 zhByR*P1tFa6NA_w0WK0| z06n6&^x|p9rD{-}Yx`6Z5~H@E$G6Ml*Yo_mXk1&0bmoF*-Z^Wwh*q^`juq0fN0kYQ zx3uUUab7Tg`|Runz>)%x8Vin^&aNKWI-8OD@i}&|iZsZxCaz>sR427qNLUOl=^9}V z7z>tgk;Uj6;D-a&;`F~PMF*;eTuVew2pvxiL8k?T7wl>MKGw}wIO#@ilAQ!6>C$qY zuHvW+t>kGZVcPkXxe7!FbY_y_pD>vk;0&V^mswXF6$ z<^><3=~H#`?s_C17Pgebo5Mda77tm_pOovY7S?VwAxhDCgv3`v(HXdbql_`s|58c% z8}BoBrtm{9UOQa45Nq!koH=Ix2(2)TbDsSg1+tO;uuR8>4j+^m+@RoGN> zP{lb!FA3U43*y;;QlB!na^c38Ag`=Dh4WD~7=zL(v5>xoDjtO*na@=!BNJ#RVHyQt%Hq7C;2NyOxWj04ZQ=)mLSyVAkl_fnajJ zVkIq`OkRIwS;%B78VPUk3#Z~kNV%}J{d@U1m`uu@X3_Qr%V_4Y$c7~}`ZztHC#>&xECv*zO6hOmZDNCJZ)Lmo9 z%w|T=NbY{KjcGKf?2YNEvr?ei^J5xsHWSUtU>e~<>iO}`S(tRHF{=AV zTqJokOa!8(9p@hP3PqQX=@P2!XBPfd7>{or!yOUos*Z4FNm>|s)iTwqVHJv{>W3xL zEfSd42LdZ>XBz%h{)@v18$=qam&!)m@#9Q%wa1TOAqRaM2=R?-k!Kx@P4ak`MxhpN zI^)Xv&V*f6pyl8y(yogpMnaArB21U)`SB)JJJ26C!rn*0a!hqKp~+nr%;rVL#nO1w zal?8~8Zf_TiqT4;IBZ#aIsH9#R**C7Ka<_ewkulkGcLd$o)IfI>5L zDB}5Nw4IAdYw;ZCdB#$)2PkVfnUy*tUL%N+a^@Kwm?4U(zVw-sv;ZtZ1!p6q%Nk6( zSFOy{-CMB@eP8hj5bCUP1^VvEv0jIqgDy(*rkQl;vUa1baq4~e@paVG=q_yt zoweU9MRIw$)x*7Gr0-ASkACy(T9ft!aHqq5zzf~V3Ce;S{@}-Ek@e-(E2Me#6X8-6 zZiOTw5}jqPYa8uYcbsrsF|3I#?OM(Be!zNw*AZ}kIIcJ0tQ79AFl<42i_XS2DivWu z@Y}Sq658i}tJf^hKWi}D8jtl=?mevQKT2B*Q~>7&h+Sq+uS{vq8#~9DeB5ss;C4~K z+qX`bl6npV;4J{k1>e&Kh-nVnPixdgk!2GRr-P3<52;AE!3Z{dDdjZzP315_IRZ_L z2@eqAB8yh7&u;<(tb4$$Nza8{hy$4L}g*K7^u*Cra_r zLl+xm$TLI-K=FUN7hnjB!w(C?I|u_Ki1|qh{NNw&Zac&cK^Nj&WEGL#x>o>a33YaS zp|jZxtWtr!lSJUm9EDj+pB>J)eA^s9H)Xj}=W+(mk?s8B=*ls*J(2FvF{xthSb%iv zVnTe^yCP^)R?rGzg!-4ovXakcSr^q4xB*kNjPE7y2bx_jw`CB0=aPW7L>MR8fi}9} z;QgzrJ$QNL+&okPI!M7gtJiVN>M$R1vG!g@MRgK;vtYOVo5#mcVyFGJy-aqTMOgcZ zCHLNlw3Qy0?GS8}DVoU6W-A(2gcU7yl3fPNk4yLTU}qJ4foEXM3Tp!h470Ybl`#3u zm6rwJ7pE`hvOiT7f`=!JV6@lOj6`Xj#%^caL3p}Jg~SnnV*B{1k^m&fLOwofQzv{(zoLlfS{(WGjNM9HYGZ=ypP>WfMZdX4>l?<$my%M1V~0kHHQTfqOUiR?I{k!Y{~AZ%ialNZ4eG} zb^KDJ2`N)_5mR!pImZO8k=h`*D=rP@IO@=<_7j+@Z?A|8zcv`%8BAiFSEvfp2!yRX-I(h;&2D%@r1bzn+ z@}bLEHZ3gCZjB4lJeRz$rJ*|y@HMFgCmpV=ijP$&kJQbdSwR0RKO7%oHt_JeBHR?N zqY~2mCnBnUYC$IC7hqv0Uox)EQaEvZDJ!h`4tHJUN#vfHwLmk(grCqXiwJT$!bAi! zmgSE$olov0z1hPYNrAXo;v7<-55oph;q0*#>lwRk{{3f81j}Gvda#Qi#&i~ww&UW7 zD35$j`Mo0+LFBR*F zYF@R}tt}^Om7#9|CC1Q)-FJ+dYO$*M}~KYkFeGvdOtevsXu1xxjF0KUgFy8$Mc4@KTf%hGpiYL0KPE2u22DmS-fZRYPo!P}baqn z$!|NC9=>zA+74(Geiw9o8f}*MWlgwtjfRVzLw5$vA(V*q_YW+pJ{(~U%1t4 zyShz&3_-G8qjNglP2V|r+ZOfRT^;sMewY6;=5Y3&59$g>?)S^X-@0^HMr|J1yy4YCKclejIA< zy0>2~+qTM}v${=jHt!AEW=ueEw*DRzBO>y!dsZI2k8i2fov+>IILW%boTHE4(D*o~ zbUdXtwS7@yj@enRcn0G(^(->kJ58|u7<#OS%s0;87t%gK{LBtq6u$Q?mOAvde|(8z z#&&nV*ghck()5^i8edaw8t#5hhG##6*2?tG$zi>xb$`=%UkaSgGG60icR0TqYPxD= zzpqAOe$x6dv>h7^pYHk`1!B&2J+G`2PsUn7_55~&rMJ~@d)~SZqSDT&d7T~DwsKXh z1G?6@J;~XfaiUjes;Fe_c+?(qa{i)m{Mg2Rk?J6Mx}Z>1TWQR9*q*YOFIm;9_mrKj zoHdea_$=L9z2frHxF518K4Vsfn>!DMeiN}tjW-f`&lcYvK6PjI=z2zXa|NIe|jdYFyKwsA72czpY?CI*S}Sq~tH7J$^wM zc>VnT%bNa8@;Jv!njy*~h1hO1jn{^0kkLT!uq+5#Gl(x1QEzONN>`UyG|QFiC$yWB zBuXu$i(B%m4hNgTSLK26un?RTfUO8CmzCuaIN|fCR1=Wdc;$g0gWL`+Wux-k_|E>) z&akJ_tIVQVqObw`@R$8#M%R^b2k^Wq)9Y~J_p?e1a~dYbT+U{PG6L0+mTx#vqn|HGe4$B zSw(I7KUnQ3C_7DC_KAy)Y=8(*HUujUucP^K*-9kMYMeH@5Gq-x~Z0q zsC*LZ@kY!-EB=vK_Vld($8cZ^H`=7`Al)v!}D4<;Vd(@zFmC8xCdU*MCqG6iey<< z&1X)Tw5>`wNH2$WW3SV5XD1^sS{gg(1dbXFT4LPyQdrze(NJq{-PsXl8&olyXc)*> zsZLS8nEft4q$5kMMb`S^@-yY5Q?JFa2+f85fjYp0@h1dFCO@*nB24R(kJ<;8F+fmc zi*#g*g=`C`Y>P49>Z9K3*WMZg+8QO~8d01&MAz9PM!7?YbPIib^NW3puvHsy{pyqb z8nAVUOVm;lu`CauD34fCWL};_c~ImIF7wwgqqHt0xhTPjn8U%y=3=8#r&3W=E~>MpS7kP^KE*S$lpz5mN-CJM?iAB?>gj^otm z{HuP5`#*i}E{0}XW>^4#uz%A~|7YImY-j4AZ)E(RuD7)sq#NRBwywFcrHGq;adJKM z-&$&PD@e@-Vk;v|HD06j5!yQc5FjIUjm?1umF2;H=@<+LadB9{ZwFu&YI5rg2k}MV z4+#e%)S6V1b|3TowvzGGC$>@R4atoX81SIJ=bN9#PKJW=URxKF-`i1Jla-6JT?I*b z5XS1Bl%13$CUN->_lN?I^dL3_Dd2_sdLcTh215fWDPTo|W)Zc521Em~iNsxi~p#v(4E)?>uAU5Rq1#tsWC=y{f+33P~F{LOF zQsF@ya)e1_#Cbg{sarcV0R=Uoiut8BoeVy4Yilv$g#vs8%w(uT1E!i-vGzg*1BNsP z;)QxXGc71-qUa$DCWZt|afH{%rYEth5;uW}N`v4bF5tw3qB8-xp*m&5Es>>KUD;aK znpuCg1$!WS0Q!;A`RsTzHYBMa$AWkxige7V(P6bUB&g72v&4Qb1QiDk@h&2H6a5S* znXn-rjvPV~1O)<8HUeS=c71yy?vUkwu@M@yVLU+w6fwa)lER9d+F}?{F(6SykwhTV zv%*4LMoc8TtBAo(2bZ$72Lnd9bQ(d?mEpM=T|6U36p1jQIK7kUTH2CH8CSg}(rXDq zzZ?ZCRC%dTAn^KJoRkb1g}(~e1-#^Fw46OX8V0DIl!Ri$BOe8RMAZvUl*sZLojb*` zCKSYe-+*NeS|R`?MY2$t9ICX$eiKQ3kWkulPt^x)H0Nc7=wiKvD)Zl812SUb5_oe! z@KLb)#C5Ie;H#?TpiKZZq-0mry`17DUi1+cG)bqbsS9f3t{7dcQ{$a6#hqrSLyZjE zV)B|~6Kg z2*7D@L6*J14lLF-pM&T5Xr7JX2dXq%(i2Vd9qP90fa#0DKdhu{X6_?nuwYI6JQhVE zARcmc8^E2KW~ab{nRG&sfr5J9N&0@|c2>owz?5_5?@|7yDUdEPV4h};qCB>C^j)e& zn8M7+p<~nKUX9CEq!g(91t=(>plurZR8$%5=6@jJSjy{3G z$4)fqw?)nYSfQtWUJyBh5AJRw$Lek~V~=-S6oh1D<5QP1>@o8sk%wriBxmgXvhxfK zulI}{j0_EaLOqhMKKFH!d7()No_RG42T@n@WihqdwuoO;qF^#DpG$f$3i9@c0_(ecOe!5ci&qdJCEmagu z>P|xBAdq{|-%dc7&`^%Gf?gno$Y4bx!{kQ*Rs89fY8bS#&ezyrEt$)x{5i*9 z#?0CW8Y?VZqFMXW@ox-*|IORc%3;%N*gkvay#Zz25Fi0va6&5knd2}7fY--?rBB1r zrBgF#@SA|y5T{o&P~bcEM2X$bDWE3{+^fQz$?@mV@$F5ngZ$F*=j^e7S?9_91u!xN zOEb^)FY*3in|{B-!W$$e%nY#1EC7t(gkoqwbcg=>L60FpLjL-@0rllT$a`xHk)IHZ zftD^{6LU395;&@iSIY%InSgEby#8Iq zR~(rjPfV9bq)exdB?ZrIn$Lz!5u_#tAK@o>V$9&cM?c|?tua#v%>EZww9aFM+KvUi znjRwTkzp9lo&H*ZzHbvtir%zB&pmZk#AsG9YdK6*MI|%>#;Rz*#w@5hDhim%!Ra zZVZ~DJs`)>G_TkLgzk<{g(EQ3Ibx4Pxh^3mF@9AnpMX{oVn4MaPi-*Co}?8fIpE%F z&U^|0np&Gr1Cf7&ym?#Yqn6F01KMp`uFx1}M11yK4xLl#E(F46KRv7p6r_*i{0s2W zB@xqxv0c930XaxJ`QKMhy;<9BvLy;w7*>Q=v9$ z<61}4_Hm=ct@=+bGkPsE7LoBP7OF9+aLc~|SQyCMi!J^vPNdW0J+EY#WXqXIp2IIZ z*ieblfZsr>E7D$sJ`6BDX%au?td4RO{?L?p6jUlzAd#`23IMsdb_5TZ6AE;4p%Pv#$Y9)S{h`J?5j8|i*i1SAqM7|hHoE?x78xP4!M>@(j3#NflXWCZBILD$ z`zc)+Z^31Ju2f&^ZOtL~gV-I7ZCemBU(0h6#3>msK_;v3>v2J3L@sl0fcmMQ>#b0r z=0|<-7Y+3{oUiQD!uE#YWT0*Bh{T!6MSM00TVfmI4Z6F*4oli?33lDCMwn3ka&N z+m*zg;z+R7V*h&ucg~W00a9_{$$6x8qKC0;`v;Y8-_asJ!|ZsBh3r%Uyllt+4V_J8 zYYu3EX{;c1$}&I7s3!CyzWq-D(IW?#|CElM#EHnA@CnHs--!wOHQu8K`ZqL_FJanF z9U4zXQejxHv(2>YT4Xi1V>z#Elqy!8mD&X>FJ`3uj!yuC8tb9lJ z66sJ4W2C<(lv*Oz?*H!3L+DlL`nYKZ-mQU<`M$}(n)(<19d=HvGyLtY_e_^L^F6Kp z=}eQU2{+`>?^UJ4c8t;u<-n3Eoj#hpI<=ppd}YwiTTPD6q%mHNyNzM*owhYdzWZJ; z@;L4}Q_9iS3p-k-R)kB-UC5UX2u>;ILForyEL6bX(gCy%?k`;CSOR7LQwqs&tvdk0 zwbXxdxHpL^%wT$gLzXu?X%+IuVrs+sY`ETUb#l@*g`!CqwXT9G+`8emhNo$!m>Gy} zmwB;2p5BoR&GpAz#VwtmjxjC!1kVaKv*Wn%?s%)DiUdBA0)OMrG4)?t;=2~~0Q`S> z_DTXAF`as`V4z-B+`c3&So>4J(F#`tF`e8@l(oiX>!0GplJb5fgRrl|d1mxb^;o@g zKmI|wzB$XwtSV=ll?^aK(7-c&YwLh|vBdGfezV`rxMu|EkU;%qqm%)%xwBxp2&PRV zs>L&-ZmmSpm&nEy$|h%UD4t1^@)FIzgC*7CP%hTp5F-}UvIngSi^T7>^Iiz)7SS>* z%<4T_8QB_W&Kfa6H{%id)g*#v(CQ4AOdY5qhAGn}R342Yq?gYcTt?mal(hJgt=yBU zqAIi;zXsXyyGg{miNuQhD?Q!7uo5&yJU-F8mDS|ZyIISi-X=~(hUnVeC4x?^79L4A zucER8OU3t}%snx4o&@9pf_dmi-MXAyOO*D_Rvx04BKcR_(r1%6h4>A(RTk-xu4$ zvA0IMcS_q{X7Z}r8|KSl_jv9%xjLes&-%AvJhm;IyWH^ninhExjmslqY=E+U%P^jzbj{)KBFs`u|5p?wCr~- zhw+Fpp^{UWuDLzF*D#~bPYaL>wLRNj`{xmp{yC$)3|!5Wcd-4&s1OOXSRNO1p@g_4 znwgiWy$8^_np)o5Jub^Y(^quA2d1Ssjy{9>=v+KrN072fQ`^t4tG@46 zrd?lG&9DWg61!csWc4?&k-1?hx>xP@2lqEXzFCdFKR3Cq>72GY))VPxo)4qFJ|C{L zQ98|Ek>5kphNmyz_MelAAFDI%w`}$=3%fnITv}M=Q}5TtuY##3y8I4XNnGz2t~1`R z!`_^-IG(lptFXJAA7io*JZ~#A>+OT@rW-fD7YiS;5HU#6XXQBLLUitN3bQ3vQYMFW z(uJRTO%_f9mh)#uJ7v|ZW~YCuv;W9gPY;rltK#9Z8%ku_dJCK^=$@iI8=S0UJjR3CugGK`F9fLq-8dKK9#*_C zDaE!6Ko=ft%!Bm*R5nwDLNIY=60XuSF zHv^-AD9{s!BR3~1b|N5>k%OMGH&<{f85s^2vzl6`*+wVq{UOONA8%89&t6EE3d(Bz z_R}IHDe%+~Ng#K)dE%>)6GU{ryk&w4N7@cp-NU#VYF>-xZu{eD&$tt&Sr6-aAl(LQ zJGgmogVl#mbs*mbD!ccOMoj(%!WGUNyXc1D3x7Mx_y*7ex*H06K>H6pyt@y2r+OFo zhVO+n`DXb>^uzH(@+11i<^$*p${Rj=aJ$d=Ci;f{CHRH)L;5xQ=KfXtM*KzLgZ>N9 z8{!+{8$-Jn`L=sc{Du1l{YCOa{3Gjy{)?J3e7nc~#qk5}gZc~e%jyH$8~f|W+lPA) zdpDZgA>nOI(-TpbjV;J+EiQEKnL zNX8kr63z>@5ncHS5WsSIc^GmAc>6B6*7XVsGFKKCtCx5MAdm3r) zCem2l|Ir=nb;wn_n=ajS@{?ROy)7e#F*-KSR2V!Zr;j5PB@lC1w8)MZj__FpdWQ$% zShQ$l0wj*R%~vZMtV+(3@|sns3^QLK3S|`iqcAKmXjJ$6M>1Llhoer}iKofi??gCM z%UIZFco?`DfzA2dzPt-r6{v5mox7pvXNtXn+7C@CgCkN&up=(~1&Fh#PqA?BC{q~O zNmtKsTcBf;8+7uuBsf3E5J*wM!%RO|*gvzb(o)e2);go6$c;s?tCQ{_4x6dj*|V|j z8$6xr{4mTDl532*;CVsK5X(6ZWst@9{{j&51sCowSoi}o^$XuMXJD5D_9{#nWskY7 zNXK>-n^ZZm0FhHOZ#Et;*x}!T+TmUgwhOF4CXPhcsMc^y-yDro4UDrVS0qmNbvX5? z;Y=W9j!LkDp2^4*K4QfE<5#npvlS)k&CQh)2oS^OSKv?e0Nj8ss1EL;pMn?KeaPfV zkG_Wr6XDKbKpdxJDNbF1Zitnf1)qxw9z#bXM~IEpwe}t`Df}p5ZWkOX zp+Qh;*WkHYn6!5($$Sd$p`ypjY;Zyw+b~OxA2XPi$rWh7#W>^-g~*jZncy|4k2=FA zJrPN5*ZqrVa`5Y4Aioe6DVJau#wxqPB;0E^F@(0T#Rzdv(6qOOB$K<%u=~~ zp5qCeN(1NSvS9tgH$LrGZzw!E8=j6d(eWC*y=(q5p~iYq)LHuJ%iG$iSxWWkTNn9_ zd!^z$d-Y;i-$1J8EB^{oPVda(SkY`8iC(msbz@tq=ZALrlM|S8%URcZtEc?hb@TY> zb6nwP;__+~4cM6$y|WW&IvcEqDT^63V_QOb?I%yfc%-1=;v~Dvq4=}XxeXtdWs(Ho zO~%xT{Mvz2@tvsVJ0L;Uv4pvS8j92iBFBgT@m#I4TCFnOhQV-9VdGrQaxkE z;fs@G5Ltm0{|$Z33rRkzIUR=u73xE?1al9mtU}NMULvrl><;JoCClRKie`=N#(cd| zX%R*zmu{XpLGE6ZuWboSZmRJ-C-z1e4|r@ZhcfW6#2wwyrmK8RUh;Z5vCcW5?W@qN zuIQ&{)pwtAl@Pz4(QpG79d|o|(B%)N-{h-CxB!ueG(2Ad; zWSAnB9x``Q_MkO{votNoumO+;y(V1o^_MOlAtbEAT7KLKjjz{3xS^$P<`+*dtc4H6 zbbN#Gg#Upb2~RP3>TY-N2_|NPz>kCA1)ktsdDG(_g~c5l<4++qMbj`}8Zv!sq*Cwx zt(6)15*d<0VmCz+Miu$6Nn(kMSz~!glUaobt-9$DmvF)tUIV*+=I-Vwv@`aK62;mU z-3sQ`8PW#T4$A2@ge6b{2+<@`3O)xF8R_~~wMi!((#p-RKsI|A+hv>;&R09qPZlvr z6-lNmg;k3YD*T*^nrWmpBqdc8Oj$N;cpdRl+L_-sIGbEUr1~#<-gLYb4;c+KxfM+@ zl_nf`_VwXo-bI(W@*g??x~n$$h+$}Mc-E;u0Qr|(lPj}OBh_*&Hs>~547gG}V7ToS z8B+$(dc<`GcTN@ml6=Aw_AiES%&;s__p(1O@kS&~bF9o&WqPRqvvnJuP66R3fB3nOWj6+werX5hF{vL#%^hfJ_p zbVD$XCpm_JV{=X zE9^pR!*O&*(yGdSs?elfG4OE+Uxe{*<^`ZuGOId?uRuRU?|G$9cZwC($k=DN%HAPE zT%!Dq`m-HjDa|6PPsvxW(2$%LHt2x~lKaXD*GP!ZqRNDJR8{ApCaUt(Cjo4UTtui+ znKnc7S31^+KF&6zPx*-cNQ03A3P({Ms2e_U`u2eSsjY0oMo>$tD5lU!1|NB5s4v69 zkeQ&akG96H0VsSizkoA*Nxkjdp$GyUO(wgsOd&jMt2M+Qm_fe965j{1rP96Ex-~es zSsbl9u4ny9?70N4pqj1?HiC-iFSZ#BTszFI32raecykF)`wE)35*jh`<{d{dt(NkHqLy5Wv$TbAgg3;~pP=*((= zK%}eY1a0tZSBT)}a5Vulp!yP@9v%HlqRt4I59F4f;$Gdd9%+;YdsXV|mU04k6nY}! z8qkm=0c}ztT3}ed#SwlUzy(U8O)whPK6cCiD-k!i40D=J{5E#G250=l&8Pne5&vjkubrrjI4Jl1Yvik!6SIAX3a6jnvXc5-Un1pbs0$n=3;#}x(u+SP-&sVw1ZG~*&I8`@od z>megRf>#mY(iWQ~ELlh`8>#Q47c>}7157+vH=?8+K#!v^@N5-xJYl<>M4LeE%0!llpHcAh>al?sl zwZuW=;1KO#EB@n$AV2lKvoZW|#|FiYZ~-ID-d?mpSBh(HI$C|cwmrX_Q~;Z)xLp7( zx(u)>*P+Is56A#F306>kKzAqG-d{9Rx~}LYJZ1*KjdTIS3G`D_DpY5XRfrI=t2Qs5 zM4ONkopWM0)wzJgfibVn524FEK43MRO1yj615~bK4D@5ebYsL;K4dLCBlqk1+taWX zuXmuxE^un#dc;$11Wa(UXX<*;Rk(NcH^;A*ulJxxF21O|Mnb0fDe?KC@vrJ;ExwiF z0LeE}P2HaQOF(~gBBtrRvVf3$JLI9{_MZh3Ks(oH;hzQM;G_p zvcL>AxS6n;7~0VCUcB6+8eG;*xlt937z`iS);kz!^SJli{pW&_LZe{+TW}e_-5| z_Ic|Z%YC&yDzt;zMx_;dIZH_!&`G+Yf4c`4`Yt|~ReoXH=+E^2LT0;F>wb)he~ifo z&N))>jOeYR6Te<9@Z1x8T1`Fo%;nkEIS0&tVba#@hE{5P%wI)pvpNB|OydP|ofA(; z02h@dKwnAFpWonqLn`B$(qFEq%L zsRyZukHYwY{$Vns*Tv~K^v2EgBS(BLk%}Wn`~f|;wEyWC(78N7OnKQ_DrM6&YkFSc z@ygAgy(fcS_Rp@p1nee(fLt<4;)zcOKN@TU^ZGIc&lNF5x^|574QBJF-zT#;Vm})A zYxH?g#Quj=u8%|yoO?I!iVX0Dzyjcp2c$}NmHP6qEXGEn z-M1zS-`(o?%;ARQm$meRYg6ib=LY2$_cfZYuD-3{nZd>2t)AihiMCZilRIoCag#f4 zW^ogHdx&qIjLTgq3FjNAfM-SyZ`>7#*(1fmsUXh9E2A!^EV?-5a7zCcD*OoL=bNb9 zW8ueaY-|+Ud!WU05b?UCzbX$PNOP28f_R-bv=y(;q;J5E;qb?s=?XH+Zuz3*Cp;4eYqZ3kux+J#p}N(_z4x?(lW(=q z)@bRJ#y* z8Yo6%=R!P1=e@2@+_+I>F#+cE-X8Z7%lwk7!?I)# z1vu|5-Tr7cvi9?VApWUK`L)*o|5!Z8c9uAMDjo3%{8^jq{TnN1)>nB`!24I1QKMfG zd5xsr4#5KKM`kMs8C>t_^)OvcqRtlb&dhe{niDXqUG4GTUVHrssNDuF)E((-BAW_! zE^)sP`U^|)Bya}Lp%@^)};l5J9m6SwLLU1`zi38z)3eXGF#+-pM zj1b`o!Tt)%o1qE?JOOSqZAWiMgVFHI;S)r3j))2I;bVY+gd^aG$OGF09rg#44WO}Z z!7&~>m|UbwdW|wV&$xcv_Wo4#JnvL`OxtHAeKwAxIfTFyM$t#>51cZi;36 zR!;|bv?%cYZtK*cF!KA%dTykRB!PYT2RLKzfA0;C8@$YmI3p{x#4zp3(5WQmj;e`s zq{j0<-dnCD#E@DGy^0EI42Tu5qtYoIjn1Lz%og+L%o%#+J0dWyJp@EFnhk z*N)RIcwtH$zJI}C47Tz|&6-tln;UkRdcSUk=u5u;0Pj|{xy}Zlzuus;!idk`y{7uo zRHG*^t@3|ot!Lg?z1XLHWipB8`i6Z`dOIfc=Jw>4dVGEb9_f}?uAlz$`9LnOp5o&D z5)5kXrOtPmqN8|8AEhev#Osrf-4=FWqT!9*68|D;7xMI`eR+uR`s)h)hn$pB=de#}twblFXW3tOTe`^Nw6& z`Zj$B{`BzFasYK;146&R)~M>bEq{LP)Fs1O140fE>gb+*ds}JvyhC4hX%2f*SR>|0 zt-aq?4GCOYY;Q>KewO?kc#&RC+CA1NZ^s*t#?Ep)m8ZWXh`xk0eNc+Xn+)r=;BYSJ ztIKJ>y+*4xm}0^>mFzx8C#bsNQe`l&=abD^rwfYQv=%x(zcLtM3^U|XAIJk;A za=24@==f@txw36mHodzYN1=OqtD@&1R~&r(Wi=N4wbQRQ4tq8q5B+Yb;kb;;RbziE zdmg+kk)5Gw-h8G#O^I+C9@&0AIS!e&KAnxy@SO9C_{I>In`TyjZ1?48Vm^j;H(X6r zEwI`#OLDN@YQQ`#sLizC;FZ8`?w&c%NY$0+E7~R;hIEXqaip1*^2I2`wPT~c6*tMXn<3>&Vejc9yGBqAINt;Ez|Fg)R?y2Pf|$c29< zlP7kqa!z9#AR=h(|H2gY>S}w1E#J47bM}2 z*{TqV*M|Ci6Mg-u^AcTBjMlVB(S)2ZdgLL;|4ASuhvCapM)Wi%;^U1`ntony&-;|> z=i0Q0F2$ZC@+qTLmx)EI+(&z20kxT%UpC4Zc2ahUnB;G;#oFiG*rD}K(yXRgBD6%c z$Sfub!;D7FqPcVZ2U0VKOzH^eR5SVCbcNCbsbhrV5Z95B#ZWetZ&@9H|iOt8r>}8tvR;I*2y_bMJI*!h(`xSvt3{ldah%wmgSA`cqclRAo-FaH#z9+wz>{3^>8pExU`=D4d1`N`ne(X%eSP4R z4xWUcE~uX=4Q8JXk$?}%kq=NI&L0o~4_+znFEr7hz?>jO(IA5w3?&|710HBkh;$u# zOTP=5->n`i(jX66@V6b<#6B2SutZsiMp>Yu-QWM9?45%vVYa>DOl(Xtu`v_dwr$(S zj&0jcCbn(cwv&l%y?M_0?tSY#_n!0Cw`;Fm`;V^b)vJHK&{zvI7h|#ytQnMQ8d_xs zyuOd>kB{^pKMq@j3jr2R-=ZsQw;iUm9I}U@Z>GT@Gq^a@NWt5-fL<9pP_H1O891&P z7|&_6cUx@mZBW=j#5dqR+V2DCfi=xQ1t~t}jN6#wLvYO)P22G6K^%5y7lZuk{sXFh zuhl4zd*9xWqIdN;LmE_ri>i^*tAEh!F}3?yEk`t6;cgB=JfURoQFsLXP>oVj4K+#$ zx1a-2kPAO<21P2}rkK3=T#x&E`AH_g@jvdMdaA(y zuohCYt`jTVe?&7Zk~(ds3W|s14dbdw>mRWtQH@QGNhhS35Ju<6l4w+oi$*E%r(27U zJXk`ZkYcDOVfWT@jr0ZqbB$4R{o+(NC4hm!ez1jvtFErHW;1i}u?9K+aAs`$#k+L0 z?)vuW{CVVjiL>VE-{ucnm&1*Hyg8#qeTY%$6sLziQBVRU6fR?KzRE|_;HEY+$~0i7 ztul74$(&2Jn(HD)X2Mz;g{N50-$+}!RG64ixu|0DD?AY3gc4b5B>!VSEU&U0H(Rzr zP);Rt5l2QUOUSBdzFvMLk&PrVtf)3EjS{UvAj*)gK%dsIHAAUEnasq2T^V@=7)+nt z*Br;F;`bzdp1AS+e)9!T9lMyt04Hc=@@ZXQZRxQts|F?O*g)xYKVEUMy7GcWiJ*u$ zb+F@DfdCDfgtQx3Nl9yanoO~JMI5QQkw{|EM_*zi$pt|u53OT5(DhKKM^-n=dHtMxPw(R*7>+r_;sus|h%O>m2NmG#mgAO^%h(4uYK-VbF4)dFc>^ zB@({6{Lx#BfDz|^bT}%SgE&HVtBG9_elc z-`#aX@vmJW{=DExJiD-3Y$Hhxk|Nf5CB9RNq_!|b<|x)wq-x{!9Yb;3o#-bUp-Q&e zGO?Jv_T$U^<0%3WA;S#E8|Ue6Y*AF~!ItAD;o$311x_fmkMiqE|4&t6f|Ul|7>i;k zwtWU_rHTmpS^*%&!^~Ya_9c)cPH=0$P1Va~ZWe*`nbXbX%O`k?ptdn#7ADH|3;@%! z7F@iCOgk=15NkSqqPB66=~u6~J$eg?Zz9}T59}d{(=g&I5{JI1EfoeyGcMhQ&neC} zl=ZJ3u=Pp5nu-uElWjI8>tq!}o%>JN7LSi?sImQGP_URDtvP8VmfB0qW(-;>PWO_m zBQwJGv#Zf^+Xxd_&LuLv$8vQ_eAE^U-+^}Dqi-IG!@GlSasm2EYi5V+TV(n&Xh*`Q zdMTNZM4P8a>(ibz6(PQ8tb*1909Rq56(V#Og7I{NZJ}TbjCDb|aO;u-yE+hpCP?lL zU5vP1A5GZKHJ2ner0#H^ez({rY_`H)Rp7%zCp#%1)7sz};s7&pZHOBOhnT@=Q6z}^ z?r_Roe! z=zvTv@joB2i0TgxQB|vNs~;t5Wi9}hQ(miW{j??sM~isg$Aomtg*paiVm1Bd9c~1g zo8s~KLTaVrE1O{kODsGBg#Ck%`g5idiEF_~P)M|nmUHmpy!zL8{KyexdTf}L4 z$vY=IZqvNz#@HfuyodvEYY0ykRSiBi$b+DeU`g`8992YV2*FktH?HU?UdADp?F&p< zn(|Q#A!{_3=D}`@9#tB2jTK(3gVSC0fcvis65Tn~-R|AKzDqre4aUa7-I-UowH3G2 zw;Kiv#>h=!15~v>lJe!*X%>1ck`sDefOUm%2XGH<`i!MZdOIUo>S-**7yCP+Gg{Lb&;u=9$rIMJt$Mgk@YZyg%>sDs# zwF_E67&R!R)|ArXb7H9?0f~o{rma*`F+7Aq9!$9*EQaCx*!aDuzc;$MWG}5`Uu4mi z;3up^z7Iu6R_NQHhvte7t0x1akiC#gUIq?+3wNWVMNFC1+lEwK`KG)$&oI5rO?rGt zU5R|=xVrSF-qSEQRdL%2?yGSarmf+5Ae08I>Wx+|Qnr|vbnngrnpK`DJJTM?yMFap^-9L2E+dUo}NA%iCUs)29jP&|e-4 z;64up(V|%zOoJ>sm3sYuiCQm#29JR4x}I zP%Q;??97jmm!svb^F)Xz1I)@ z*1?lt=>xtJA?`Tb)y1q>Mo>QffSL3PAS!58s!1gB_s<13FPvGXJD5b)WyF9c9HLbI&!KYbl~iDeC*>yij;}E&CJ4cW;C=0UBtLg+Ss&6Y zq~qf-@H7|r>%JRL6G+R$m~9LwK?RrMJTM_bBEaA9GNE5?0x!)=a<>BbiV!bXgxqK; z%FL6>G-ro{qnWR@3tk?@&v8fIbSGzPOpViUTQ7v}8{^i($T{DA+;U_$1(&Oj*HZ85 zZQAR5!yQZzR*($3bu)&vv~|xn%kKWDVN%F~o+_kn4Q&^fPvqkk8x z2pyDMx0{>qH>?lZ_BnSIC^keF%eRot$C(4j$9G$PU$GyOgB&eLpj)E6gsfY0w(C)| zw`Kin(qJ#XSWQO^vpwfUPAIIfJ<{NQQUE)#9Kft5FRuwMvrqnZ#fyw`ba#Sp6@&}w zXn~U*Mw(cmT2gcUaAYm(MD%n^HLOpEXvDqPrYY$va->i@a3&YDNl!wjOqh_A6|wON z{X@qZ<;DHNuJMilsAd#jfI;Rut6#ih>BvTDZl` zsFlUc5AFricj<#50@W$Si<(2`z7z}hER%_XsavXNe)MolX0)|8>KeTyb@~&F)B_8) zh#yHA`js`iI%Z>u@Lh0qt8RrUK1SKxO{-UN6Dnjrv_YrFYQ zPAfMX7J8Nbb{%lYP0x{Q`+Yhtg(-IEY!cc&a``vI#OEAy-}=58WCm`SbIoOYp=(>< znv8W7R>|i$e8LAI2N_Rf)*sSE`Db^6HG!ad_0GzSNF=peZm3CQHN51hKa#l)_L7vV z7VVRbUBF2u7AZW$qh>9ZTSIqoX%upPBFZAi^Kg1UQVcASOu<|lPH}ynBRgKq?xlHj zUY_e#@7y*zlYDHVWk+~_LK)0_-eq|gA8hjcp7OGLVkw%+mj0|hea6#ifA;-+x+6KL zxa$<%7M*FTN2yby5yxD!A|aJ7z#7O9Pfr<~&cy;-;Y@u8K) z-v{A84&v=*eQI!8S$-1Dvh=y&&;;yq>^F0WjG||Bc)AByrK=`=n4YV4Hz7Ts&<5{geKUjlr&F-kZth}X~s&cQ% zC0ok+nz?g0p3k!mo5_!!x3v@MMV*8Xy84sOPVct2!O)1;q%IF{``e;NotFFf@Ze9c zi4Gg5&a2JPg%#g)`ZnsfxxmZKr%7*`<$K@LMMQkCU79jS89drw&h>s*E$OkJnf4gcqn+QupSo$?MV7U>WI3fVRi2pxs%=r~|_G zWL3Pxhl2@oHrH+ERZj+*_nNt8i~UpMh-5F@weV7O6vD-e<5m*h=Y0zg=Tq;}>PO=a z+0bQe-REgRaQsdYJf`y?kNZp9-BeYVgI;ph=^Q%w* z^?CB|MC${MnmqWape8JC%)w-Y&PO`ua z+`B-csXWTZKuh_b_Tu<%(32WeMt8|DSh$&+^-;(VONTR`*s7*K>GP>`f1&cs6)grx>L9 zd}xf<@L|rV5{EioCEcm|thbkyb{%n36srW##T3e;CFRVMtbe-DivuR27$z+VQp@t^ zQ5bH6Phf$y$OoEx-60wu2OnY2LLwOI)(Uik=aFCs3npb{)j7a=`?zYJx+gn(?2IQ( z8z~!`8k@v@qa1zZ$Xci?D#c_#ARb}Wc-uLenJEK-d*~Q$pp@na?(7O1^BT$;{7115 zEVU=*`Mfi`DXyH~wd0@~%L0b3tGrpd<(& zAdLUnSYOY<%)r{rnA)I2Ny{dk7RB>OMU$>X98qJnL)f*Lolt0OHgu*_G117^tOApR z{wMO-(`8)`sHkk6qr59^ttZbU*Q4z-uj_bNfR6;RKt^G9me$b74`s$6-dZ>*fw*w~ zL1OZze*brFWU(GmrCi!4J9o5A&}yHF4L2@l>;kFpar~8rNE9ZQik`0~WK!v3SJVmi+h1{V<**|i zU_~Kg&d#sn-&SEgL+Py%`VTm5k4V>ih3ufw^^ue|tw<;4F7l>aYI{~LNn z_Ldb25;A?Xu$`AGg7Go(WCpf4eo&#fF@pk}(vU)Cd-MWH5sU}9=AW6Ii&ouEz&dHH z50B4v8$IL0!r!dc0IHO=@7gv%cn$_Qp3_&{oVr}ID`$1)faB>uJ39sujA;}^#6pqQ zE{Iy+$>({Ql-r%8dPJRfbt`HBoNQg=3|~d2c&^njFfs($tHu8m$w7)$S+Sb$5#op@ zofz$fm({(BO(w_~et^pBJ!o6G{sAkJ(*>SXnHSEdPjs**FU)R z$80SnWLs$Cf)?%k{O*aXaLqP><3Y%DR;z3oUEMtJ>9brA;`?Oh2*WcPf`9)veuhg~ zQ)xLgz)v!;en7E~64G$(+s%YZP=}^uu;>E)QCQMJ_M zSGz3-kz1roOP8>wZBq5Rm@ss_IJ#Z{w#!(eOp~Lady-Kv?o>u1_rZBJ92qH=(Tfk@ zcO5bLPMURgV8ZM%69@GoVVinT<6z?GDZ_r2N3wNMO4MX2w@$3C#w5;F68RZ$`J1dp%g~XS^rK`fU>e z%Xiig3UieXBDXNdOb5g=Rd->tr$Vsv$JuFWJR3Vv7^E@kkUELsh|G|ZV_lx)7N!X^eE$m3u9Y1a9~>+Dmq*lrf<(In7Jv$4nCT~ zB=e66QF2N6qeI8?Ln(hwvPWeh2R!mL3)Gj2v}C49s4Gp{LM5-E!8>PNH0k-iKi}y} z%yBTS>4w$1A+VF9WXXS^=YG%hXNF44i49<-l4oldn&Sykj%Z&LDmU%8rm1&F?=P^Y zH-Hbl-zZ&C6dmoA|K_w*a!hWPW^^27&_hBk1}%R@54n(v5f|!kDn^uuvzezRLa5J2 zBsC0D*_>4nw^lJLoqJY_Np__dY?RYjW)Iba75l2F1)r?9Ryy(Ps*~T8YR#dCr36}! zm|qIDH&-_~=O^r9#KycpUAT7;p7c&%{mupC$DyMKtv$+)A$|lv;io(D6|d49HjI5z>McV>_iei9%>eqb>!+f%X&ndRy0J_A@6 ztjGMB899T9(Tx8+f-A(knuI5c3a(_yFy%+PSVs0o1m_ElzNR(ZjLjKB-JksfOj(jD z1wgP8*vXd|Jz-jzKC%>0idwSCRF*svN%#~n=Jwm*cDuo)z z6qpM@Sf8(H5Xvi`+i)a3$S~6~yuqu=Z1$ZBE#lxe|6HxmHJ?@p6aBt5%XJgNQ&Wlv zv*;j0OehSH?<>v+_x?Rb>ASN;@f-c-U|6^Ea;B^oBJBHG^eDy&U!&7SxnN(0L+@#n z&obEvGyIPBSU=tz1Goyd{D(kyLqJSjgu3U+H$icX6G8(Dq!VxwuLK0oCJXs12vhHw z_&(}qJ%zPtDQVMAk6WpMpTezoD%1+#n9K0E?2wQwjfe8hrkdLQtn1YEZb;TrmTaY1 zKTf(iO0!2qNeAPVs1}rct|4J|ioW%($@h&-KT)o_z#TT9H^sk*Upu+1@$z$k39umZ z0K+)2)`p+yb+xdZC1o(LOSiJF&{0@*fT?@exxpX5WxMD8ihXE2dzA04I^xMX`jkpz zcYzys<_%c$VQqWl45%`gu5dVl+3uIvThw{3IQ@+HwAcnAY|SEZ!#=NR99&C+44`LT2VK^$t2)z*ZvOP}AV1$s4-vm(d@mm?LQ~W@lzBR;q0-7xg^`X7;xZ+?<*Q z79REq#x8Ucm^tX*bUWJJex}nG6PsUz|38^=F#F2QZlq^xOZ^u)*c(_y^*{&E!U|r# zhi$V_o;?oNHZzMZTT@A0e}8kf(2&)1fMTE-7Ys(kjuzNmAu2W=anhRMwE>!si-;8{ z&}w?k2|wFiQOpk|ks}VD?kaVL{y{0W^elh3e-$KJ8^!aPBlLN=o;Or<*)Cx>u%G-} zveP>S?!SEVf4bZGu0lbYU%XCz5y5}r`F|IoG$rY<Ys-^AHn(H)xc#gfIB zONRd+Skw7SoL>fWZsMQTy14QmT$NNrLd>SQX==8Lm0*M#r9Kx3Ve-aijPLiyjlXfm zM3k2L5%CKWu+jbcMMlTgmLkE@@EgCYSdgf-nYgA{a7$Dqx0$$$cZ(LabS%lP1u7r1iFM$bV1Ii8Yg@Y6Rpbp;bY+m92DIfwX1 zB>|5{G|b;dRuz>^g!VP6v9CDiKaR}7@gF0rke8F_qeb+Zs%lQ=gDyG6^*~bWhoX=~ znJ1Q-{IXUPzpz)8Lq8jX9M$)jT-WXq($1=Ej+PZ_xR0rD@b{M``T1C(+oJ8jjbL1H?e1>oC z=-rz5(={w zE1iAi`dLuTL#oOnzw<)@)S|^XWIgOMS zi@^#6BkYC0W>4XZ2>#P7{u_NnF*))7(nmB6q#{tnvI*rna5G98FsPs>K!01GQQXjk zL0_FCe8CC--!WlnrvKlHTzUe!_bVEFBW-Da3kqZ3L$V$9JFCCatt|hPFj@(tCo=wY0lAL50~DL))m)ik{)Y zSXeBd@L1=$a%6&n&xZo@yk@ez-RG{D#38|7FRPZI*PxMGBq%=mD7^TZ4hDT!40Dfe14&!DXI!6@GV72e1p8HW6F7OIZVt) zJs>$maj^3RMS{kkt#l=Wel~;d)D=0g`}Q~4Q>Owq{WbZKU-WMQ@E70zI{^MMfd7X< zqN&N@t!gG!gCbV_2ME+OV3NB_7#c_+6wpKwi$VUzQ8HdGBik1@eE&(3RyKzJDoJp9 zdIfU}ru*h*fC2^dO}IGYZwHFKIq}=LdpLc*UfHamLcdC!WMIU7lhgkpgb(Jm2j#Es z;{KBm{_67I2w_5II^=7!v#S=z#1 zu9hqT+TXgqY(=cT|1y@bUy(EMzw6rC<|_f<7n-RZ4D8Kp0al6@Hgmj)Z(d#cc9J@b zPR|jPa;Bh(tHk2Kz~98FAJQ_+x73`Ef{O3D>fD|1!G4o>1Unl$Iqmk;0V)6|%>1x9 zI({>>EQ$4_-`>sk_<8~KT_2z=C0NG`t^BrF<)sd)O}Mj*E3)n)1EVDTiC19C6-i9J z(-rvsd#+`_ZVk}ILw19K8Z)}^Hr6?XuPFV8*HDR`9-!aA8-QN0IpZCdcF?^U2VeTu zKG1KA*mq&{-EczX?JQgaXZ>!rVPjgU{AM2y;d>Bs!ICu=T%0HGII1hakA39+Y$d&* zR5(2)D8YhPFURm`QK?#zr{)OtGsXwXT)}6r7fvdS`Q&3s2e`Vl?l{IWHrK5 z36zkQ-DsDJ+nbLJx-h3wR#uvn2)T{xi5rNq5~=)|Srj)Is&hiQhkNXRLkYGw9q6df zIw!3tL8eQiIvitJ+9TmJxfavreY3shuEmu{IjXZXRqTCf2ig%l&`&~(g_ z6X}0ydtgIcIRLyoK5xT{f|@}?KTh|=yF~X*hRmD?TSnHR&aLckJl!}YRM2jrdl9^W zicsUUF|xhBFiFkrCAzq~Ckoj?q(2}<3VtRL6u;O0TDNKQd$*n<78G@n%%O$7eqHi! zzNF#`As0G0fbr7c3e)*lmxza1l{M~b*;IbHM0Edg*_c^70Q4*^b^a@lrqMswP=lh> zUu&r2NClQJ(bBL9UkMvK(a$Fg_l02I9xAq>;n(EoLwQ3RSx)Ku`FJJH8kO&1FRs2j z!|mZjd#da6uCCQxPh>twz6Ni+*~;?B<|a&tSn3buMEBVF@(rr~D*a-mD5#uD&>wXV zND`y5mYO?M%23#otu_?alIyGSGI;Yd`R!GbYMmMAgdI0P^n1i(axx+?M(R%1 zqZ*{4IR&QC5Iyy?UyeUr!ixOTAoxWRN};0UX-^f*6Y}ZI%yIJv+GSP!v(2w`U2R7_ zA%%IEONfPl$=Pfv@^q!Da)@ipXxj)z~>M!6r`PJ`jsZs^2HPWzchTKluJFbcp#wqHF&}An% zdkbt_m^?BQFX5{)L*9aG*>Vkt)%2kHQLV{MokTs#^<*_xPlG+Sr-E zQ<1vmtYN-b#%=OjC!q$WYn>57x|}>#a=YF(jn#@hSGDB$)l6hB*7%~2So1J~A-xH% zyAa2z$+@XsX==~49Q^COJZ+(^tl3vk2LfHdCpyM7R?Z7&9gxXKR@t&g%k-E$ZO{_{t8+qu-18@xsJr0M z7J880N$oLX4oA0UlOu{dNJ2B?(C1B^sf{GaPf1NU%U@Fj6d({tk3c z3|8XKoq%`>sp*^X4Lp5=Ds3v3pb4ry37$qw#!)vfO#v}Y%y|@vL686un&J-bEGEQ? zWfqxQNEv>R%J2R7!E$4^P2eqJQIn(sxKPTI-;5RohjjUmo&m;CiMko6<3YY@pJe{4 za_>wAqfB0ZBnRvjU!{$#tQuKYZZ+~h;}s|v5KU{5)NBZmx&7!uz-U71!=&1JCmzY_3V{POlACQ zm8{)*p)y|u!8_p4yLg-I1!rTkbQYmo8oWYYF%k#$xlvf;Q7;L7ewqY&l%#J$An*a$ zC8rc$n&d{eC~oR5x@BuX=4WX^cpY=j+S z)`qFO*g-}cxGgTHHyA;IevvB$T^rXpqjmms4`kp4F#yM3?rLdP_gagZoU-kk5cjeP zO-KIkC8ZfvC9ZBUT3kjQQQtOST92Zq4~)tze7ff+h$idC{9_F^mzkiDt;OZ86@l+8vy$-o%Os`IktJBOZoB~ zR4Y@FS{d2cjI zHg?E0zAnUYhmBP6^{(f>H@cG$vMCpQuQ|hD`Ma;K6pouzhn?1AWrGd11>m+-LvK;) zW<$Z*I-2z^&!L#OnAXh=P|s8-m*|8BfL2LrBl+9TXE7*P6xAxy;}=)4u3_^; zLK;jIHLC+p@nY@^ME+AO2jAlSZO}z~`0vKq#Xg}1ktWc1AFVg7b7GAceI~e%W3psx zfLrtsx${0Jn6lCzDQ9q*iCWTrt7YKrl-OBLt2tv-%b&w@R!as?y>|X#Njq|3^>p#k z%L4kL)kF-K)#Dyqg{i5t4`VAqV8vfJ9c@%W16gK*nUyxzAAiofxEJo@%*vq}CC1>+ zXE7T%gVo`21^yOcKZs_|uvfT|+O*8O&O#tTgYD?DpX__Yiup7J}nyybOAPP>Lf|L_7DjS86A}!QL<} zt3efc?L7VO;=NGcjSHzxfH!v!llJ-$4nuxEnmDrU?yDBMmBk00h*wyERq(uXX;+8^ zwcXSGPybokA92SVtqI+Y5WTYllu^^t#Oy$tb&+<{#u`guLsO77Y-~HJWPa%w{4je4 zCs~Zrl_kaRyACYx*_JML$xZFrSdW%ID_tA=aB^(4^;HBM3bTvy#ep^h80j|V^j|Mf zolTpjT+1}YoH2<+UkW$z!Ktm%IDQbXx%a(nLU91*PDcJne}9A`JJyeFo=SMbbBo|| zEIQ4NECo~xPPVeZYU`B(C)$fNb)=nVg+6QrcE$#>n;HdQczLI4P$e7!a^fDIE4w)4 zym0*)eCy4T-);WYhOMStXuf@IG?0DSG1C9&q}dx;{$;|b4Q#B9%}f#%d#v+l5j$>w zXy=Ps35GtXn>8fAhm#0_%cz#=60L=b`+8r(GHxS=wXSnATuwYqFtp*gukzH0!?+mh zidRBIgSO%LH<>uucMa-JuMP^I*>n$9Q5hvcn~^%5<&nV-$`uRsGA@Y=O8Ug2A$sF=MFEa&xAt%3*e!2QGyTz`rldP;_lca7y}wi(lXWq?NX3 zM`uPEd_{!NZC>C>BRgL<9mxANIEvO8gSY#x z=L0h`c8DW+l!iI(oQuGJl&Sb@V`gFsm_4JhtofvT1g~#z<6MMD>zB+fKf4;K6~5*Z z)y<;!shVV*4p#|}d!CX1Dfb?}sazB=Hdnv^HCYMGom1Ly6O>b>_Rx(y8bobg(-T4V zDJC5OMmKJZFK2T7$W{GpxRO}*oqtZkTJJ2Q^l24Tp`T;YGq=FK{bSjyh7AB>1M~Hv z0NLX{*7Dn4`}fCff>t5gsL3;kD!;2{(~+5&g+a0m z`6!>pfUM~x3HS77mnqit+Z5?u7Ods9EZm}l+CTnQ4ZKTYSjPYk2&nN3KKTC`L5|ke zM)vD!Rt{^;NFS3rHL;1qPE;dKmkRPEi!0)1CHL}T(rylBBq;%uizp-C^Nofcyf3MN z;&Ma}b|*I^i_+3FKq(*KraF5#s#cdOw@@W4Akto?LRK6d&e^YT&Sfq&w|2bN`$=Xr z!Qj15+i;Nx$hy2?YW}=|>PLQFE%!1_Kmz0a^c^Bp0s1(Cprb{H`-SEW!J!&R=~Tr9 zuRp?d2~-)6z8V^`zoNl;7M65Cy{(e+i(A9aG>i?9LnrpOlmAT${OcB^A#-}hBQ;I7 z#Z@Ut1FN_T+{ZQiF3BTRcQoIe+?svrRxlq*dYio~MB%p(;)L|bdwN}prf=hR3HG6$ zaCSh&ukv)4(o-DAlWXknl(tje4sE8|?}%`$C2BCqoW|}MLfeD*z`S`&*qWcz%?F4% zI7pBj_I;|@F-i!CB;2&$h6ltwdcuWO_sV*5ShA#~htTql&(8GMODb#;w-+Do<&|8>0pJs zW_pSr!iE8C*RMHsZ^P>%nOnf!icTg^@xEv_R93B}pE%l@b~lJ~I`&+>*2OEZ5O^Ld z@d~@~D=x&4;3WYDD)6B;N4alLYVS9I!%f&Vbn~()(8`47~S(zcz2p)+um_GOXu0RRnjY|Kms?( z>g*jxbbC^tK68`X-m&3>D?RVB38ywlZwCjaRpsSb9|M^~;O=RW&g80d8EUvSs1{4B zwk+&ejCPx7Z0LN3ym@{oX@i@nKV3X^8DM!oWP($0ZbEZTCVpGAdlbG5EJYcLC|XX$47GSmKu`x^|`l zyNMEgw)Ee4GQs}QXxZ{(6vekNo+Fu7$SSDMgxUw`Wrj(cnisLX2j{$(xD>%SWte`+ z;XkH_3Iz)xR0Vs)KNNZK0IwYwy?G2rEJz5s*Hcwe*S@4!vMfFfOWL17@ZwV_kt(kA zktW#=BC@5LwH)odDoMwM^Z4B~{9Z6>0iWTn^WyICsUBm>qULI}q$USW)7qtQlrh)M zOW7G3H(M)r$#Mu2gbnnGJ5b9nsF=~J(;kxcB2$qqKYzt-5%T=BpSKEo&PI&S?vI*? z#z?}k+>y>tns+IFzuhq;XN#hR0mdyle=AluZp0CUUr5$;FWY)QINChh+nNN&h@bSp z2uabTZMtD9Z~|*-%z3V9(5qL<#nLbL6Pl0im5G2R3P?|cQ3kV z9>C2D9$<_=!f9eOER()Z#O>d!R{pwg=r&g)o)<02QLI4iT$dXUY>B@2_Jy?p|%LvB>v z^pC|eEM!Fbt-AkP`s7DUu^Ie{&dBc)!6g=-mID_@=_dc}#e>CT+ovU%%)VQcC=vy0 zo2lLB7QCrFSo_JXsZG}}9~PG&WH$E!u)j%ZV2(|h)&@{&)SgvKfrH9A-F*_%BHg|c z9TG)sc(aF1JTEc76Td}e?wW90?V9}~w*He-nm?^u<$Is6 zok_PvR0)|VTkN)LKa0a2oWARu)jmP=Tn<9KDAVNwxd1C}LQm9I=|S9R$HNxb^+7F# zuu_l?v7!$Go3m+)_*G|<5#LF`9dzEWZ53Bx@q2MBs&~{~Q2Uz%wZOqX2y<&qygi^q zbI81!H{rThjHnSA8F`#H-xpnYcJgBdG6@=7a=ffjH-fn`EIsfcSt;d|azg~cI-7() z5(@>dq-D$YZtn67>yC*1uCbKdP*q~zw?3lXIVFS$3y$P>eTIe$$CF#j> zCG%fn{BW_jKWP$TvAsm|s1kzIYfTGzXgTCNjvqMgwN{j=avmD4)jh0ur8)-=%qjGW z`nxwNo+*_dbb&Ybm{NBR+9jN++R3Ef|>T`bwz8%1=u>7Q@Ypf0qH8QqL^#bv+> z`D!$rveSCFFF4h(kpeml8=ec8uoAwl<2XQCyrmt-9h2>V4kA>QQ|K4mZW3R6(Kq&# z?8xkQzKljd_|z1P+X;{nPMQ*yF2%!$ls;Rzl*zM8t3A%EgQzri)i{`>OjI`dJ_-*Q zltc_tR;Wy$sgIMZOrlAb)Rus+DDOr|x-Hc&ZtkevC%^xx!RSV&x+sAbKKQmMB#;Jd zB(8(qwMFWEXXmH?a?Q#dEHcVtXq!9A=WqbUe18ux9>no9W1BoKtR4; zgz>S7OL^KMHzsb5#l10*f>^z8&*DAq7Om@xKmsT?>V*j__VMiN=K=V= za-*5u_f5fZI?0J&gL{_^Hl8Yw;t(&KliCkdZ98ff>Se4BBr&^G`Hn1HFDY3CJbJ?> z{zs*A{yX(5LNM=TBPh0Lb#3P)QEu?2dJ#?5ojZwCs2A@xOo>}5=G~NnODom9Ny~RGnl$Hn}b)HSKV#x8^5G!VAvHh%)^A`u@bdS#x~mdohuQ!n=KE|LfMx=jXYS zgv=T*V&^S|a3u*!HOfmpALJ&<^Qv04~(LR5a%WHS2op(G@nBwr=%Wby=#Di_|iq-eMCY=BlE z`;-#`uWbOM8)&--JP%^Hx2P=tC8+;9NrZ zxpPO4-Du3D`rae7mR}0Gr#p}31n%{UdgKheEMOJy<4u19>QyQFA zC?b$>g{1gy#4uDX?x*cNDT64=>Az5Tmh;O|7Ec7?A z)34Hx5b@9>w-6}w#+ln#vepVNnHmvWW##-pXHEjR)s)c=0_$NOMl1)9^e0T^!^&e+ z)x{Z=g@o(Km51s`ugRD%Y~;GiY50gL@}6-9L<4`eCAw#k(5r7dc!HEH_<3%@w7pk|X;glldJuXCiy4 zQ$bl5>mq7K56=z!SLhf9ih&1CqgRR1xGIt4{uM{m@YFeE+F`1}N?HxeziwzK>YTRC ze~mWg>*VP_Z_5AI>9u35)Yn)M10S<`w-@K(LN^D{<#To8KVnk-=<=PL{7L2i6q;?e zKT2^h++h=UQTPtMJJL3OuVuerc>is2Xp?mg{tHmnjLav3QR!z68(_akuB@&w_7i46 zG}n|$2?aR^Jqce!1gUh7%rvjf!xXDk2mXjt=f@H$d+`qJ3z^tzZ;dd&NrVput0d7i zcVq;o1!?D%jd=ESb`q{#6-F@l(Y%VNNYJG?cX+ez*N#=Sife!=8FivWb?nCo&V#Y!pVwS&)&pADppDc zm<}=M@|9xnOvnJW=Lu;~$S)j|4{CcT63|vReRh#huK-E#=3L}5eK+g0`RK|q#~8@6 zK)4L;63E-wPn_pcw>9KXh7cUQd01`T3i;V-KOQE+`GiG(h}yF{w~GgV}< zD~zXi<}qT_&NnPqDL-kM@S3Z*o7v%yS{N?(kS;kbrQsbu6qxziD07VIVp98xZa^kC zW>uK5eK<~nYRR;15i)1*;uUngZe&3`!;Enmx`zk9^b80SdoTE6qZzLp6g2s|-wnqs zJy>w*-EVGhFeie-%*UOVKi&jXN>I7esDF0n>q&4j;QQE9rAS?og&$1umph=kky zpT^ES5bEs<;A1O0MUe<)30d;=NRg!ckdmbA>lh8bwOuQgQDBJ^B)_& za42bZbg8XXF%A7XsO=_x@cR8w!Q*GCr%F56UGu6%dqpg%r%43U6{32@_oXCto=%^) z4P5cfv}~-VhmD)kxeNEV+^*D7^|+fGX|~bLz>T?H{8z<94orG;ST*!@$Asq?&mKChzoo*XCKkdqB zqME!WR$5b1l*HvKEo4F^)gKDpJA9^aYnu0k&%A2ypOZ>NGKNb2m;RyWd%LE`jqw2E zXO&-g3Tea3uLaf>WKVLwjT*2YUROS_J1Z!vExdD&tKmJ%Y+X6en|g=C{u)!P_i4O# z>zJ{e5=VHdUAd*S^nG86{?K=$iEMixthmuS$r zW)I4$d59$uyIT~DD(uOTWgRa14n095afiJ_d)bpETf0B($WN9mRv!)D(K8iRc}ddx z{b+di^1-tePtLe(x%NId?1kUfr{wJ5h@Hxx4!+47uoGORQ{ddB{dm_?nE~Hb!ZsQ% zwKvI~e*7065Q7dp%7`M+a7`XGzv?Ft+1{MT>{L}z3LjgIeKH<@mm zav){fUtwBQ_qLN|JLIT4Z&-C)=@T}8CdtmJSyEE+a9U$JXcGTrcSmuuSH4=N_LG$x zDBr(ajvl<3uf95bx2Dt;>7kW{6H(Gm_RP(IQMrB-Hvk~e0qjI)whp{ zp9=ml`H}xt>QSp9#qss^dXGK7h9|X?71r4%bz7ak*tydh&jknzfO$oL}^##YDY-qci+<32DJXo}3eC&PQ#!H!IZTijn zu4l!*XJ3^!^BQkG$EC|c&i9Wz^`6t(Q2ElU%2xKSdj?;k7xBKf zqprYyIf3tDGWWqXl@I)*EIvdCTRqqB7}QI!=&Q*5ZB?yq{P5@I$-rql)(<-FY&@Ne8BW?S1Z+afRL zU<#aj1ddP)aD*ebyI$Cf1i3eD>u)}Bb~~Y1CHqakvxU_1Q4zJC6G?i$Tr#yax_T7d zzwZ0WCZr9;+_-#EJ|(d=L(7@uvt9Ibi{GwC(_hEd^7jOL-c$d0Ubm&lU3a4hdC@d^yZL>%b5LB4 z8dm8~C9T0N)7p1dDMY(8o5wb$mmj0`$7%f-p5{2oJoDn!?;1uNUe^zFCx7QzY)-|$ zOSotY%!Z4=_l=<=rw`eW?)sJwP6+6J%|m%rR6%LG2NMOee$D0A>qO3mhiSLvnKltZT%zY1($$gHGAiF&B^lkwB8Q(uC@Jd&-}KxTUcgG>i*Q} zAXXa{7S#_E>eqzEG?HWOQ(O|c0(FFc{CzB;tCl_Xmxp(4jf7L;jBQ@83=(`AA&?h? zZ8b0Zs9R+Bpfxl{b~-NQjAr%bvkvIE#&Z!N|-ZrVx7Z9b72;X86zi_dU)=ebA+b-v=eN&8GE zd1RVbeG~e?7x}q-dL~SvzW0jj9-}udLh0Xb=eix-i(79c79i{Wb@-h}%OCpH9hY;2 zK3miFQgS}aFCG=^aK}+&Fv00wnhXs(`l!s?xQhb|Ra6vf=R7nw?Ne4XQ8v$sz17^E zqheIATxGP$h@(ar$9gGS&v!PqQ0hdQ>$FEyMXU84DKolPCJU=DMi@MJX?Qg_Lxc($ z>+D1Z-y@9c0NZFmp?{yu)xkRBy%toQix-(v7)Z+Vs~`&K~wuV=f0;9N-ek@8b&xc zIeWReIJgiAB)lhJF4t z4v`A{3Z6g*2Dnl@nK$-O-k=vRSm`uC`cC?mB~m6iSx|(W^M7TasmGFX%_5aS&+VN{ z6o9~X38TS$z3}L&@l4yQcmfP(50FX1+M8dz!PI;+jGcBO7F+erIOKm|R(@>a13TZYD z676*vd(&Mx0qBLE8azQirlO!oMDRhv^liEMouz*+c=u-U0=K0Fl3_5#rD#AhXs@%c zo0|`F>?h77<#f$YVc#`waA74=lHIixDYG;OFCoIu|zAD(#5? zbD<8*1$wd)q#XQ#g7b05dU~RQsyr=uOTb(g2Lrmei7QxHm#!K*4E6k-qX)K|@jS zp6CsP6F?)NC?gcmKk1+m!(yq)4!V9pxoXWiw>iwL3Vqo^%9Hsj^ZiLrsWflyqbF5D zD)%p~{7;@67Z?L4$U!hTyPsksGW>%9FMJpTXJtV^+Z-5!=Gia}&Z&aXYM0U$WLSYY za54deBZgff)jWrhP;fE;1eUoJwj?zGEE*i9kI-z~mWYPX7zqc5)+4xYOmOpo_ONhp zI6FdX#4#4m;b(9=OoQXN5t=NX0qtj0H%x+Ki4jtt2NM#bPJ^R@5gh3#1KffbVOTgg zJ{O^Bcru}(OeQ#D79sUBBQa_w9PWzXLJ156`x*QSHxZ6vMM%TUNQ?x7<4+OXX)nft zA4Y6Lz_V*vlTXAP6!omGSspU8xS X3=an|vN0G#@V5?F?&3*cw-WPzqG|>& literal 0 HcmV?d00001 diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/rebar.config b/rabbitmq-server/deps/sockjs/rebar.config similarity index 53% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/rebar.config rename to rabbitmq-server/deps/sockjs/rebar.config index 2aa5473..c240d9e 100644 --- a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/rebar.config +++ b/rabbitmq-server/deps/sockjs/rebar.config @@ -4,13 +4,12 @@ {erl_opts, [ %% fail_on_warning, - bin_opt_info, - warn_missing_spec, + %bin_opt_info, + %warn_missing_spec, debug_info, warn_export_all ]}. {deps, [ - {cowboy, ".*", - {git, "git://github.com/extend/cowboy.git", "4fb2a6face6e7d6ff1dd34a02c3bd8b63d972624"}} + {cowboy, "1.0.3",{git, "https://github.com/ninenines/cowboy.git", {tag, "1.0.3"}}} ]}. diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/mochijson2_fork.erl b/rabbitmq-server/deps/sockjs/src/mochijson2_fork.erl similarity index 100% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/mochijson2_fork.erl rename to rabbitmq-server/deps/sockjs/src/mochijson2_fork.erl diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/mochinum_fork.erl b/rabbitmq-server/deps/sockjs/src/mochinum_fork.erl similarity index 100% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/mochinum_fork.erl rename to rabbitmq-server/deps/sockjs/src/mochinum_fork.erl diff --git a/rabbitmq-server/deps/sockjs/src/sockjs.app.src b/rabbitmq-server/deps/sockjs/src/sockjs.app.src new file mode 100644 index 0000000..5c309cd --- /dev/null +++ b/rabbitmq-server/deps/sockjs/src/sockjs.app.src @@ -0,0 +1,7 @@ +{application,sockjs, + [{description,"SockJS"}, + {vsn,"0.3.4"}, + {modules,[]}, + {registered,[]}, + {applications,[kernel,stdlib,xmerl]}, + {mod,{sockjs_app,[]}}]}. diff --git a/rabbitmq-server/deps/sockjs/src/sockjs.erl b/rabbitmq-server/deps/sockjs/src/sockjs.erl new file mode 100644 index 0000000..a8bf0e4 --- /dev/null +++ b/rabbitmq-server/deps/sockjs/src/sockjs.erl @@ -0,0 +1,38 @@ +-module(sockjs). + +-export([send/2, close/1, close/3, info/1]). +-export([to_session/1, to_channel/2]). + +%% Send data over a connection/channel. +-spec send(iodata(), sockjs_session:conn() | sockjs_multiplex_channel:channel()) -> ok. +send(Data, Conn = {sockjs_session, _}) -> + sockjs_session:send(Data, Conn); +send(Data, Channel = {sockjs_multiplex_channel, _, _}) -> + sockjs_multiplex_channel:send(Data, Channel). + +%% Initiate a close of a connection/channel. +-spec close(sockjs_session:conn() | sockjs_multiplex_channel:channel()) -> ok. +close(Conn) -> + close(1000, "Normal closure", Conn). + +-spec close(non_neg_integer(), string(), sockjs_session:conn() | sockjs_multiplex_channel:channel()) -> ok. +close(Code, Reason, Conn = {sockjs_session, _}) -> + sockjs_session:close(Code, Reason, Conn); +close(Code, Reason, Channel = {sockjs_multiplex_channel, _, _}) -> + sockjs_multiplex_channel:close(Code, Reason, Channel). + +-spec info(sockjs_session:conn() | sockjs_multiplex_channel:channel()) -> [{atom(), any()}]. +info(Conn = {sockjs_session, _}) -> + sockjs_session:info(Conn); +info(Channel = {sockjs_multiplex_channel, _, _}) -> + sockjs_multiplex_channel:info(Channel). + +%% Get the backend connection of a channel. +-spec to_session(sockjs_multiplex_channel:channel()) -> sockjs_session:conn(). +to_session({sockjs_multiplex_channel, Conn, _}) -> + Conn. + +%% Create a channel from a connection. +-spec to_channel(sockjs_session:conn(), sockjs_multiplex_channel:topic()) -> sockjs_multiplex_channel:channel(). +to_channel(Conn = {sockjs_session, _}, Topic) -> + {sockjs_multiplex_channel, Conn, Topic}. \ No newline at end of file diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_action.erl b/rabbitmq-server/deps/sockjs/src/sockjs_action.erl similarity index 90% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_action.erl rename to rabbitmq-server/deps/sockjs/src/sockjs_action.erl index 4310963..c65b012 100644 --- a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_action.erl +++ b/rabbitmq-server/deps/sockjs/src/sockjs_action.erl @@ -45,21 +45,21 @@ %% -------------------------------------------------------------------------- -%% -spec welcome_screen(req(), headers(), service()) -> req(). +-spec welcome_screen(req(), headers(), service()) -> req(). welcome_screen(Req, Headers, _Service) -> H = [{"Content-Type", "text/plain; charset=UTF-8"}], sockjs_http:reply(200, H ++ Headers, "Welcome to SockJS!\n", Req). -%% -spec options(req(), headers(), service()) -> req(). +-spec options(req(), headers(), service()) -> req(). options(Req, Headers, _Service) -> sockjs_http:reply(204, Headers, "", Req). -%% -spec iframe(req(), headers(), service()) -> req(). +-spec iframe(req(), headers(), service()) -> req(). iframe(Req, Headers, #service{sockjs_url = SockjsUrl}) -> IFrame = io_lib:format(?IFRAME, [SockjsUrl]), MD5 = "\"" ++ binary_to_list(base64:encode(erlang:md5(IFrame))) ++ "\"", - {H, Req2} = sockjs_http:header('If-None-Match', Req), + {H, Req2} = sockjs_http:header('if-none-match', Req), case H of MD5 -> sockjs_http:reply(304, Headers, "", Req2); _ -> sockjs_http:reply( @@ -68,7 +68,7 @@ iframe(Req, Headers, #service{sockjs_url = SockjsUrl}) -> end. -%% -spec info_test(req(), headers(), service()) -> req(). +-spec info_test(req(), headers(), service()) -> req(). info_test(Req, Headers, #service{websocket = Websocket, cookie_needed = CookieNeeded}) -> I = [{websocket, Websocket}, @@ -81,12 +81,12 @@ info_test(Req, Headers, #service{websocket = Websocket, %% -------------------------------------------------------------------------- -%% -spec xhr_polling(req(), headers(), service(), session()) -> req(). +-spec xhr_polling(req(), headers(), service(), session()) -> req(). xhr_polling(Req, Headers, Service, Session) -> Req1 = chunk_start(Req, Headers), reply_loop(Req1, Session, 1, fun fmt_xhr/1, Service). -%% -spec xhr_streaming(req(), headers(), service(), session()) -> req(). +-spec xhr_streaming(req(), headers(), service(), session()) -> req(). xhr_streaming(Req, Headers, Service = #service{response_limit = ResponseLimit}, Session) -> Req1 = chunk_start(Req, Headers), @@ -96,7 +96,7 @@ xhr_streaming(Req, Headers, Service = #service{response_limit = ResponseLimit}, fun fmt_xhr/1), reply_loop(Req2, Session, ResponseLimit, fun fmt_xhr/1, Service). -%% -spec eventsource(req(), headers(), service(), session()) -> req(). +-spec eventsource(req(), headers(), service(), session()) -> req(). eventsource(Req, Headers, Service = #service{response_limit = ResponseLimit}, SessionId) -> Req1 = chunk_start(Req, Headers, "text/event-stream; charset=UTF-8"), @@ -104,7 +104,7 @@ eventsource(Req, Headers, Service = #service{response_limit = ResponseLimit}, reply_loop(Req2, SessionId, ResponseLimit, fun fmt_eventsource/1, Service). -%% -spec htmlfile(req(), headers(), service(), session()) -> req(). +-spec htmlfile(req(), headers(), service(), session()) -> req(). htmlfile(Req, Headers, Service = #service{response_limit = ResponseLimit}, SessionId) -> S = fun (Req1, CB) -> @@ -119,7 +119,7 @@ htmlfile(Req, Headers, Service = #service{response_limit = ResponseLimit}, end, verify_callback(Req, S). -%% -spec jsonp(req(), headers(), service(), session()) -> req(). +-spec jsonp(req(), headers(), service(), session()) -> req(). jsonp(Req, Headers, Service, SessionId) -> S = fun (Req1, CB) -> Req2 = chunk_start(Req1, Headers), @@ -139,7 +139,7 @@ verify_callback(Req, Success) -> %% -------------------------------------------------------------------------- -%% -spec xhr_send(req(), headers(), service(), session()) -> req(). +-spec xhr_send(req(), headers(), service(), session()) -> req(). xhr_send(Req, Headers, _Service, Session) -> {Body, Req1} = sockjs_http:body(Req), case handle_recv(Req1, Body, Session) of @@ -150,7 +150,7 @@ xhr_send(Req, Headers, _Service, Session) -> sockjs_http:reply(204, H ++ Headers, "", Req1) end. -%% -spec jsonp_send(req(), headers(), service(), session()) -> req(). +-spec jsonp_send(req(), headers(), service(), session()) -> req(). jsonp_send(Req, Headers, _Service, Session) -> {Body, Req1} = sockjs_http:body_qs(Req), case handle_recv(Req1, Body, Session) of @@ -236,21 +236,21 @@ chunk_end(Req) -> sockjs_http:chunk_end(Req). chunk_end(Req, Body, Fmt) -> Req1 = chunk(Req, Body, Fmt), chunk_end(Req1). -%% -spec fmt_xhr(iodata()) -> iodata(). +-spec fmt_xhr(iodata()) -> iodata(). fmt_xhr(Body) -> [Body, "\n"]. -%% -spec fmt_eventsource(iodata()) -> iodata(). +-spec fmt_eventsource(iodata()) -> iodata(). fmt_eventsource(Body) -> Escaped = sockjs_util:url_escape(binary_to_list(iolist_to_binary(Body)), "%\r\n\0"), %% $% must be first! [<<"data: ">>, Escaped, <<"\r\n\r\n">>]. -%% -spec fmt_htmlfile(iodata()) -> iodata(). +-spec fmt_htmlfile(iodata()) -> iodata(). fmt_htmlfile(Body) -> Double = sockjs_json:encode(iolist_to_binary(Body)), [<<"\r\n">>]. -%% -spec fmt_jsonp(iodata(), iodata()) -> iodata(). +-spec fmt_jsonp(iodata(), iodata()) -> iodata(). fmt_jsonp(Body, Callback) -> %% Yes, JSONed twice, there isn't a a better way, we must pass %% a string back, and the script, will be evaled() by the @@ -259,7 +259,7 @@ fmt_jsonp(Body, Callback) -> %% -------------------------------------------------------------------------- -%% -spec websocket(req(), headers(), service()) -> req(). +-spec websocket(req(), headers(), service()) -> req(). websocket(Req, Headers, Service) -> {_Any, Req1, {R1, R2}} = sockjs_handler:is_valid_ws(Service, Req), case {R1, R2} of @@ -274,6 +274,6 @@ websocket(Req, Headers, Service) -> "This WebSocket request can't be handled.", Req1) end. -%% -spec rawwebsocket(req(), headers(), service()) -> req(). +-spec rawwebsocket(req(), headers(), service()) -> req(). rawwebsocket(Req, Headers, Service) -> websocket(Req, Headers, Service). diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_app.erl b/rabbitmq-server/deps/sockjs/src/sockjs_app.erl similarity index 76% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_app.erl rename to rabbitmq-server/deps/sockjs/src/sockjs_app.erl index 54aceb6..1b8e77c 100644 --- a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_app.erl +++ b/rabbitmq-server/deps/sockjs/src/sockjs_app.erl @@ -4,11 +4,11 @@ -export([start/2, stop/1]). -%% -spec start(_, _) -> {ok, pid()}. +-spec start(_, _) -> {ok, pid()}. start(_StartType, _StartArgs) -> sockjs_session:init(), sockjs_session_sup:start_link(). -%% -spec stop(_) -> ok. +-spec stop(_) -> ok. stop(_State) -> ok. diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_cowboy_handler.erl b/rabbitmq-server/deps/sockjs/src/sockjs_cowboy_handler.erl similarity index 91% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_cowboy_handler.erl rename to rabbitmq-server/deps/sockjs/src/sockjs_cowboy_handler.erl index d2f05ae..c66c9d4 100644 --- a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_cowboy_handler.erl +++ b/rabbitmq-server/deps/sockjs/src/sockjs_cowboy_handler.erl @@ -1,9 +1,9 @@ -module(sockjs_cowboy_handler). -behaviour(cowboy_http_handler). --behaviour(cowboy_http_websocket_handler). +-behaviour(cowboy_websocket_handler). %% Cowboy http callbacks --export([init/3, handle/2, terminate/2]). +-export([init/3, handle/2, terminate/3]). %% Cowboy ws callbacks -export([websocket_init/3, websocket_handle/3, @@ -16,7 +16,7 @@ init({_Any, http}, Req, Service) -> case sockjs_handler:is_valid_ws(Service, {cowboy, Req}) of {true, {cowboy, _Req1}, _Reason} -> - {upgrade, protocol, cowboy_http_websocket}; + {upgrade, protocol, cowboy_websocket}; {false, {cowboy, Req1}, _Reason} -> {ok, Req1, Service} end. @@ -25,7 +25,7 @@ handle(Req, Service) -> {cowboy, Req3} = sockjs_handler:handle_req(Service, {cowboy, Req}), {ok, Req3, Service}. -terminate(_Req, _Service) -> +terminate(_Reason, _Req, _Service) -> ok. %% -------------------------------------------------------------------------- @@ -33,20 +33,20 @@ terminate(_Req, _Service) -> websocket_init(_TransportName, Req, Service = #service{logger = Logger, subproto_pref = SubProtocolPref}) -> - Req3 = case cowboy_http_req:header(<<"Sec-Websocket-Protocol">>, Req) of + Req3 = case cowboy_req:header(<<"Sec-Websocket-Protocol">>, Req) of {undefined, Req1} -> Req1; {SubProtocols, Req1} -> SelectedSubProtocol = choose_subprotocol_bin(SubProtocols, SubProtocolPref), - {ok, Req2} = cowboy_http_req:set_resp_header( + {ok, Req2} = cowboy_req:set_resp_header( <<"Sec-Websocket-Protocol">>, SelectedSubProtocol, Req1), Req2 end, Req4 = Logger(Service, {cowboy, Req3}, websocket), - + Service1 = Service#service{disconnect_delay = 5*60*1000}, {Info, Req5} = sockjs_handler:extract_info(Req4), @@ -59,7 +59,7 @@ websocket_init(_TransportName, Req, end, self() ! go, {ok, Req7, {RawWebsocket, SessionPid}}. - + websocket_handle({text, Data}, Req, {RawWebsocket, SessionPid} = S) -> case sockjs_ws_handler:received(RawWebsocket, SessionPid, Data) of ok -> {ok, Req, S}; diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_filters.erl b/rabbitmq-server/deps/sockjs/src/sockjs_filters.erl similarity index 79% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_filters.erl rename to rabbitmq-server/deps/sockjs/src/sockjs_filters.erl index fba43cc..c4176bd 100644 --- a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_filters.erl +++ b/rabbitmq-server/deps/sockjs/src/sockjs_filters.erl @@ -1,15 +1,15 @@ -module(sockjs_filters). --include("sockjs_internal.hrl"). - -export([cache_for/2, h_sid/2, h_no_cache/2, xhr_cors/2, xhr_options_post/2, xhr_options_get/2]). +-include("sockjs_internal.hrl"). + -define(YEAR, 365 * 24 * 60 * 60). %% -------------------------------------------------------------------------- -%% -spec cache_for(req(), headers()) -> {headers(), req()}. +-spec cache_for(req(), headers()) -> {headers(), req()}. cache_for(Req, Headers) -> Expires = calendar:gregorian_seconds_to_datetime( calendar:datetime_to_gregorian_seconds( @@ -18,7 +18,7 @@ cache_for(Req, Headers) -> {"Expires", httpd_util:rfc1123_date(Expires)}], {H ++ Headers, Req}. -%% -spec h_sid(req(), headers()) -> {headers(), req()}. +-spec h_sid(req(), headers()) -> {headers(), req()}. h_sid(Req, Headers) -> %% Some load balancers do sticky sessions, but only if there is %% a JSESSIONID cookie. If this cookie isn't yet set, we shall @@ -31,21 +31,21 @@ h_sid(Req, Headers) -> end, {H ++ Headers, Req2}. -%% -spec h_no_cache(req(), headers()) -> {headers(), req()}. +-spec h_no_cache(req(), headers()) -> {headers(), req()}. h_no_cache(Req, Headers) -> H = [{"Cache-Control", "no-store, no-cache, must-revalidate, max-age=0"}], {H ++ Headers, Req}. -%% -spec xhr_cors(req(), headers()) -> {headers(), req()}. +-spec xhr_cors(req(), headers()) -> {headers(), req()}. xhr_cors(Req, Headers) -> - {OriginH, Req1} = sockjs_http:header('Origin', Req), + {OriginH, Req1} = sockjs_http:header('origin', Req), Origin = case OriginH of "null" -> "*"; undefined -> "*"; O -> O end, {HeadersH, Req2} = sockjs_http:header( - 'Access-Control-Request-Headers', Req1), + 'access-control-request-headers', Req1), AllowHeaders = case HeadersH of undefined -> []; V -> [{"Access-Control-Allow-Headers", V}] @@ -54,15 +54,15 @@ xhr_cors(Req, Headers) -> {"Access-Control-Allow-Credentials", "true"}], {H ++ AllowHeaders ++ Headers, Req2}. -%% -spec xhr_options_post(req(), headers()) -> {headers(), req()}. +-spec xhr_options_post(req(), headers()) -> {headers(), req()}. xhr_options_post(Req, Headers) -> xhr_options(Req, Headers, ["OPTIONS", "POST"]). -%% -spec xhr_options_get(req(), headers()) -> {headers(), req()}. +-spec xhr_options_get(req(), headers()) -> {headers(), req()}. xhr_options_get(Req, Headers) -> xhr_options(Req, Headers, ["OPTIONS", "GET"]). -%% -spec xhr_options(req(), headers(), list(string())) -> {headers(), req()}. +-spec xhr_options(req(), headers(), list(string())) -> {headers(), req()}. xhr_options(Req, Headers, Methods) -> H = [{"Access-Control-Allow-Methods", string:join(Methods, ", ")}, {"Access-Control-Max-Age", integer_to_list(?YEAR)}], diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_handler.erl b/rabbitmq-server/deps/sockjs/src/sockjs_handler.erl similarity index 86% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_handler.erl rename to rabbitmq-server/deps/sockjs/src/sockjs_handler.erl index 81d4ef7..ea8b916 100644 --- a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_handler.erl +++ b/rabbitmq-server/deps/sockjs/src/sockjs_handler.erl @@ -7,11 +7,11 @@ -include("sockjs_internal.hrl"). --define(SOCKJS_URL, "http://cdn.sockjs.org/sockjs-0.2.js"). +-define(SOCKJS_URL, "//cdn.jsdelivr.net/sockjs/1.0.3/sockjs.min.js"). %% -------------------------------------------------------------------------- -%% -spec init_state(binary(), callback(), any(), list(tuple())) -> service(). +-spec init_state(binary(), callback(), any(), list(tuple())) -> service(). init_state(Prefix, Callback, State, Options) -> #service{prefix = binary_to_list(Prefix), callback = Callback, @@ -36,7 +36,7 @@ init_state(Prefix, Callback, State, Options) -> %% -------------------------------------------------------------------------- -%% -spec is_valid_ws(service(), req()) -> {boolean(), req(), tuple()}. +-spec is_valid_ws(service(), req()) -> {boolean(), req(), tuple()}. is_valid_ws(Service, Req) -> case get_action(Service, Req) of {{match, WS}, Req1} when WS =:= websocket orelse @@ -46,14 +46,14 @@ is_valid_ws(Service, Req) -> {false, Req1, {}} end. -%% -spec valid_ws_request(service(), req()) -> {boolean(), req(), tuple()}. +-spec valid_ws_request(service(), req()) -> {boolean(), req(), tuple()}. valid_ws_request(_Service, Req) -> {R1, Req1} = valid_ws_upgrade(Req), {R2, Req2} = valid_ws_connection(Req1), {R1 and R2, Req2, {R1, R2}}. valid_ws_upgrade(Req) -> - case sockjs_http:header('Upgrade', Req) of + case sockjs_http:header('upgrade', Req) of {undefined, Req2} -> {false, Req2}; {V, Req2} -> @@ -66,7 +66,7 @@ valid_ws_upgrade(Req) -> end. valid_ws_connection(Req) -> - case sockjs_http:header('Connection', Req) of + case sockjs_http:header('connection', Req) of {undefined, Req2} -> {false, Req2}; {V, Req2} -> @@ -75,7 +75,7 @@ valid_ws_connection(Req) -> {lists:member("upgrade", Vs), Req2} end. -%% -spec get_action(service(), req()) -> {nomatch | {match, atom()}, req()}. +-spec get_action(service(), req()) -> {nomatch | {match, atom()}, req()}. get_action(Service, Req) -> {Dispatch, Req1} = dispatch_req(Service, Req), case Dispatch of @@ -95,20 +95,20 @@ strip_prefix(LongPath, Prefix) -> end. -%% -type(dispatch_result() :: -%% nomatch | -%% {match, {send | recv | none , atom(), -%% server(), session(), list(atom())}} | -%% {bad_method, list(atom())}). +-type(dispatch_result() :: + nomatch | + {match, {send | recv | none , atom(), + server(), session(), list(atom())}} | + {bad_method, list(atom())}). -%% -spec dispatch_req(service(), req()) -> {dispatch_result(), req()}. +-spec dispatch_req(service(), req()) -> {dispatch_result(), req()}. dispatch_req(#service{prefix = Prefix}, Req) -> {Method, Req1} = sockjs_http:method(Req), {LongPath, Req2} = sockjs_http:path(Req1), {ok, PathRemainder} = strip_prefix(LongPath, Prefix), {dispatch(Method, PathRemainder), Req2}. -%% -spec dispatch(atom(), nonempty_string()) -> dispatch_result(). +-spec dispatch(atom(), nonempty_string()) -> dispatch_result(). dispatch(Method, Path) -> lists:foldl( fun ({Match, MethodFilters}, nomatch) -> @@ -165,7 +165,7 @@ re(Path, S) -> %% -------------------------------------------------------------------------- -%% -spec handle_req(service(), req()) -> req(). +-spec handle_req(service(), req()) -> req(). handle_req(Service = #service{logger = Logger}, Req) -> Req0 = Logger(Service, Req, http), @@ -204,14 +204,14 @@ handle({match, {Type, Action, _Server, Session, Filters}}, Service, Req) -> %% -------------------------------------------------------------------------- -%% -spec default_logger(service(), req(), websocket | http) -> req(). +-spec default_logger(service(), req(), websocket | http) -> req(). default_logger(_Service, Req, _Type) -> {LongPath, Req1} = sockjs_http:path(Req), {Method, Req2} = sockjs_http:method(Req1), io:format("~s ~s~n", [Method, LongPath]), Req2. -%% -spec extract_info(req()) -> {info(), req()}. +-spec extract_info(req()) -> {info(), req()}. extract_info(Req) -> {Peer, Req0} = sockjs_http:peername(Req), {Sock, Req1} = sockjs_http:sockname(Req0), @@ -222,9 +222,12 @@ extract_info(Req) -> {V, R1} -> {[{H, V} | Acc], R1} end end, {[], Req2}, - ['Referer', 'X-Client-Ip', 'X-Forwarded-For', - 'X-Cluster-Client-Ip', 'Via', 'X-Real-Ip']), + ['referer', 'x-client-ip', 'x-forwarded-for', + 'x-cluster-client-ip', 'via', 'x-real-ip']), + %% RabbitMQ-Management needs the socket to figure out if it is SSL/TLS. + Socket = cowboy_req:get(socket, element(2, Req3)), {[{peername, Peer}, {sockname, Sock}, {path, Path}, - {headers, Headers}], Req3}. + {headers, Headers}, + {socket, Socket}], Req3}. diff --git a/rabbitmq-server/deps/sockjs/src/sockjs_http.erl b/rabbitmq-server/deps/sockjs/src/sockjs_http.erl new file mode 100644 index 0000000..828247d --- /dev/null +++ b/rabbitmq-server/deps/sockjs/src/sockjs_http.erl @@ -0,0 +1,144 @@ +-module(sockjs_http). + +-export([path/1, method/1, body/1, body_qs/1, header/2, jsessionid/1, + callback/1, peername/1, sockname/1]). +-export([reply/4, chunk_start/3, chunk/2, chunk_end/1]). +-export([hook_tcp_close/1, unhook_tcp_close/1, abruptly_kill/1]). +-include("sockjs_internal.hrl"). + +%% -------------------------------------------------------------------------- + +-spec path(req()) -> {string(), req()}. +path({cowboy, Req}) -> {Path, Req1} = cowboy_req:path(Req), + {binary_to_list(Path), {cowboy, Req1}}. + +-spec method(req()) -> {atom(), req()}. +method({cowboy, Req}) -> {Method, Req1} = cowboy_req:method(Req), + {method_atom(Method), {cowboy, Req1}}. + +-spec method_atom(binary() | atom()) -> atom(). +method_atom(<<"GET">>) -> 'GET'; +method_atom(<<"PUT">>) -> 'PUT'; +method_atom(<<"POST">>) -> 'POST'; +method_atom(<<"DELETE">>) -> 'DELETE'; +method_atom(<<"OPTIONS">>) -> 'OPTIONS'; +method_atom(<<"PATCH">>) -> 'PATCH'; +method_atom(<<"HEAD">>) -> 'HEAD'; +method_atom('GET') -> 'GET'; +method_atom('PUT') -> 'PUT'; +method_atom('POST') -> 'POST'; +method_atom('DELETE') -> 'DELETE'; +method_atom('OPTIONS') -> 'OPTIONS'; +method_atom('PATCH') -> 'PATCH'; +method_atom('HEAD') -> 'HEAD'. + +-spec body(req()) -> {binary(), req()}. +body({cowboy, Req}) -> {ok, Body, Req1} = cowboy_req:body(Req), + {Body, {cowboy, Req1}}. + +-spec body_qs(req()) -> {binary(), req()}. +body_qs(Req) -> + {H, Req1} = header('content-type', Req), + case H of + H when H =:= "text/plain" orelse H =:= "" -> + body(Req1); + _ -> + %% By default assume application/x-www-form-urlencoded + body_qs2(Req1) + end. +body_qs2({cowboy, Req}) -> + {ok, BodyQS, Req1} = cowboy_req:body_qs(Req), + case proplists:get_value(<<"d">>, BodyQS) of + undefined -> + {<<>>, {cowboy, Req1}}; + V -> + {V, {cowboy, Req1}} + end. + +-spec header(atom(), req()) -> {nonempty_string() | undefined, req()}. +header(K, {cowboy, Req})-> + {H, Req2} = cowboy_req:header(K, Req), + {V, Req3} = case H of + undefined -> + cowboy_req:header(atom_to_binary(K, utf8), Req2); + _ -> {H, Req2} + end, + case V of + undefined -> {undefined, {cowboy, Req3}}; + _ -> {binary_to_list(V), {cowboy, Req3}} + end. + +-spec jsessionid(req()) -> {nonempty_string() | undefined, req()}. +jsessionid({cowboy, Req}) -> + {C, Req2} = cowboy_req:cookie(<<"JSESSIONID">>, Req), + case C of + _ when is_binary(C) -> + {binary_to_list(C), {cowboy, Req2}}; + undefined -> + {undefined, {cowboy, Req2}} + end. + +-spec callback(req()) -> {nonempty_string() | undefined, req()}. +callback({cowboy, Req}) -> + {CB, Req1} = cowboy_req:qs_val(<<"c">>, Req), + case CB of + undefined -> {undefined, {cowboy, Req1}}; + _ -> {binary_to_list(CB), {cowboy, Req1}} + end. + +-spec peername(req()) -> {{inet:ip_address(), non_neg_integer()}, req()}. +peername({cowboy, Req}) -> + {P, Req1} = cowboy_req:peer(Req), + {P, {cowboy, Req1}}. + +-spec sockname(req()) -> {{inet:ip_address(), non_neg_integer()}, req()}. +sockname({cowboy, Req} = R) -> + [Socket, Transport] = cowboy_req:get([socket, transport], Req), + {ok, SockName} = Transport:sockname(Socket), + {SockName, R}. + +%% -------------------------------------------------------------------------- + +-spec reply(non_neg_integer(), headers(), iodata(), req()) -> req(). +reply(Code, Headers, Body, {cowboy, Req}) -> + Body1 = iolist_to_binary(Body), + {ok, Req1} = cowboy_req:reply(Code, enbinary(Headers), Body1, Req), + {cowboy, Req1}. + +-spec chunk_start(non_neg_integer(), headers(), req()) -> req(). +chunk_start(Code, Headers, {cowboy, Req}) -> + {ok, Req1} = cowboy_req:chunked_reply(Code, enbinary(Headers), Req), + {cowboy, Req1}. + +-spec chunk(iodata(), req()) -> {ok | error, req()}. +chunk(Chunk, {cowboy, Req} = R) -> + case cowboy_req:chunk(Chunk, Req) of + ok -> {ok, R}; + {error, _E} -> {error, R} + %% This shouldn't happen too often, usually we + %% should catch tco socket closure before. + end. + +-spec chunk_end(req()) -> req(). +chunk_end({cowboy, _Req} = R) -> R. + +enbinary(L) -> [{list_to_binary(K), list_to_binary(V)} || {K, V} <- L]. + + +-spec hook_tcp_close(req()) -> req(). +hook_tcp_close(R = {cowboy, Req}) -> + [T, S] = cowboy_req:get([transport, socket], Req), + T:setopts(S,[{active,once}]), + R. + +-spec unhook_tcp_close(req()) -> req(). +unhook_tcp_close(R = {cowboy, Req}) -> + [T, S] = cowboy_req:get([transport, socket], Req), + T:setopts(S,[{active,false}]), + R. + +-spec abruptly_kill(req()) -> req(). +abruptly_kill(R = {cowboy, Req}) -> + [T, S] = cowboy_req:get([transport, socket], Req), + ok = T:close(S), + R. diff --git a/rabbitmq-server/deps/sockjs/src/sockjs_internal.hrl b/rabbitmq-server/deps/sockjs/src/sockjs_internal.hrl new file mode 100644 index 0000000..916c1bd --- /dev/null +++ b/rabbitmq-server/deps/sockjs/src/sockjs_internal.hrl @@ -0,0 +1,33 @@ + +-type(req() :: {cowboy, any()}). + +-type(user_session() :: nonempty_string()). +-type(emittable() :: init|closed|{recv, binary()}). +-type(callback() :: fun((user_session(), emittable(), any()) -> ok)). +-type(logger() :: fun((any(), req(), websocket|http) -> req())). + +-record(service, {prefix :: nonempty_string(), + callback :: callback(), + state :: any(), + sockjs_url :: nonempty_string(), + cookie_needed :: boolean(), + websocket :: boolean(), + disconnect_delay :: non_neg_integer(), + heartbeat_delay :: non_neg_integer(), + response_limit :: non_neg_integer(), + logger :: logger(), + subproto_pref :: [binary()] + }). + +-type(service() :: #service{}). + +-type(headers() :: list({nonempty_string(), nonempty_string()})). +-type(server() :: nonempty_string()). +-type(session() :: nonempty_string()). + +-type(frame() :: {open, nil} | + {close, {non_neg_integer(), string()}} | + {data, list(iodata())} | + {heartbeat, nil} ). + +-type(info() :: [{atom(), any()}]). diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_json.erl b/rabbitmq-server/deps/sockjs/src/sockjs_json.erl similarity index 76% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_json.erl rename to rabbitmq-server/deps/sockjs/src/sockjs_json.erl index d3dae20..e61f4b9 100644 --- a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_json.erl +++ b/rabbitmq-server/deps/sockjs/src/sockjs_json.erl @@ -4,11 +4,11 @@ %% -------------------------------------------------------------------------- -%% -spec encode(any()) -> iodata(). +-spec encode(any()) -> iodata(). encode(Thing) -> mochijson2_fork:encode(Thing). -%% -spec decode(iodata()) -> {ok, any()} | {error, any()}. +-spec decode(iodata()) -> {ok, any()} | {error, any()}. decode(Encoded) -> try mochijson2_fork:decode(Encoded) of V -> {ok, V} diff --git a/rabbitmq-server/deps/sockjs/src/sockjs_multiplex.erl b/rabbitmq-server/deps/sockjs/src/sockjs_multiplex.erl new file mode 100644 index 0000000..3922e8c --- /dev/null +++ b/rabbitmq-server/deps/sockjs/src/sockjs_multiplex.erl @@ -0,0 +1,143 @@ +-module(sockjs_multiplex). + +-behaviour(sockjs_service). + +-export([init_state/1, init_state/2]). +-export([sockjs_init/2, sockjs_handle/3, sockjs_terminate/2]). + +-record(service, {callback, state, vconn}). +-record(authen_callback, {callback, success = false}). + +%% -------------------------------------------------------------------------- + +init_state(Services, {AuthenCallback, Options}) -> + L = [{Topic, #service{callback = Callback, state = State}} || + {Topic, Callback, State} <- Services], + + Extra = case lists:keyfind(state, 1, Options) of + {state, ExtraValue} -> + case erlang:is_list(ExtraValue) of + true -> + ExtraValue; + false -> + [] + end; + false -> + [] + end, + + % Services, Channels, AuthenCallback, Extra + {orddict:from_list(L), orddict:new(), + #authen_callback{callback = AuthenCallback, success = false}, + Extra}. + +init_state(Services) -> + init_state(Services, {undefined, []}). + + +%% Get result of authentication callback if it exists. +%% Otherwise return ``authen_callback_not_found``. +%% Authentication callback should return {ok, State} or {success, State}. +get_authen_callback_result(#authen_callback{callback = AuthenCallback}, + Handle, What, Extra) -> + case erlang:is_function(AuthenCallback) of + true -> + AuthenCallback(Handle, What, Extra); + false -> + authen_callback_not_found + end. + +sockjs_init(Conn, {Services, Channels, AuthenCallbackRec, Extra} = S) -> + case get_authen_callback_result(AuthenCallbackRec, Conn, init, Extra) of + authen_callback_not_found -> + {ok, S}; + {ok, Extra1} -> + {ok, {Services, Channels, AuthenCallbackRec, Extra1}} + end. + +sockjs_handle_via_channel(Conn, Data, {Services, Channels, AuthenCallbackRec, Extra}) -> + [Type, Topic, Payload] = split($,, binary_to_list(Data), 3), + case orddict:find(Topic, Services) of + {ok, Service} -> + Channels1 = action(Conn, {Type, Topic, Payload}, Service, Channels, Extra), + {ok, {Services, Channels1, AuthenCallbackRec, Extra}}; + _Else -> + {ok, {Services, Channels, AuthenCallbackRec, Extra}} + end. + +sockjs_handle(Conn, Data, {Services, Channels, + #authen_callback{success = Success} = AuthenCallbackRec, + Extra} = S) -> + case Success of + true -> + sockjs_handle_via_channel(Conn, Data, S); + false -> + case get_authen_callback_result(AuthenCallbackRec, Conn, {recv, Data}, Extra) of + authen_callback_not_found -> + sockjs_handle_via_channel(Conn, Data, {Services, Channels, AuthenCallbackRec, Extra}); + {success, Extra1} -> + {ok, {Services, Channels, AuthenCallbackRec#authen_callback{success = true}, Extra1}}; + {ok, Extra1} -> + {ok, {Services, Channels, AuthenCallbackRec, Extra1}} + end + end. + +sockjs_terminate(Conn, {Services, Channels, AuthenCallbackRec, Extra}) -> + case get_authen_callback_result(AuthenCallbackRec, Conn, closed, Extra) of + {ok, Extra1} -> + ok; + _Else -> + Extra1 = Extra + end, + + _ = [ {emit(closed, Channel)} || + {_Topic, Channel} <- orddict:to_list(Channels) ], + {ok, {Services, orddict:new(), AuthenCallbackRec, Extra1}}. + + +action(Conn, {Type, Topic, Payload}, Service, Channels, Extra) -> + case {Type, orddict:is_key(Topic, Channels)} of + {"sub", false} -> + Channel = Service#service{ + state = Service#service.state ++ Extra, + vconn = {sockjs_multiplex_channel, Conn, Topic} + }, + orddict:store(Topic, emit(init, Channel), Channels); + {"uns", true} -> + Channel = orddict:fetch(Topic, Channels), + emit(closed, Channel), + orddict:erase(Topic, Channels); + {"msg", true} -> + Channel = orddict:fetch(Topic, Channels), + orddict:store(Topic, emit({recv, Payload}, Channel), Channels); + _Else -> + %% Ignore + Channels + end. + + +emit(What, Channel = #service{callback = Callback, + state = State, + vconn = VConn}) -> + case Callback(VConn, What, State) of + {ok, State1} -> Channel#service{state = State1}; + ok -> Channel + end. + + +%% -------------------------------------------------------------------------- + +split(Char, Str, Limit) when Limit > 0 -> + Acc = split(Char, Str, Limit, []), + lists:reverse(Acc); +split(_Char, Str, 0) -> + [Str]. + +split(_Char, Str, 1, Acc) -> + [Str | Acc]; +split(Char, Str, Limit, Acc) -> + {L, R} = case string:chr(Str, Char) of + 0 -> {Str, ""}; + I -> {string:substr(Str, 1, I-1), string:substr(Str, I+1)} + end, + split(Char, R, Limit-1, [L | Acc]). diff --git a/rabbitmq-server/deps/sockjs/src/sockjs_multiplex_channel.erl b/rabbitmq-server/deps/sockjs/src/sockjs_multiplex_channel.erl new file mode 100644 index 0000000..4cc48fa --- /dev/null +++ b/rabbitmq-server/deps/sockjs/src/sockjs_multiplex_channel.erl @@ -0,0 +1,25 @@ +-module(sockjs_multiplex_channel). + +-export([send/2, close/1, close/3, info/1]). + +-type(channel() :: {?MODULE, sockjs_session:conn(), topic()}). +-type(topic() :: string()). + +-export_type([channel/0, topic/0]). + + +-spec send(iodata(), channel()) -> ok. +send(Data, {?MODULE, Conn = {sockjs_session, _}, Topic}) -> + sockjs_session:send(iolist_to_binary(["msg", ",", Topic, ",", Data]), Conn). + +-spec close(channel()) -> ok. +close(Channel) -> + close(1000, "Normal closure", Channel). + +-spec close(non_neg_integer(), string(), channel()) -> ok. +close(_Code, _Reason, {?MODULE, Conn, Topic}) -> + sockjs_session:send(iolist_to_binary(["uns", ",", Topic]), Conn). + +-spec info(channel()) -> [{atom(), any()}]. +info({?MODULE, Conn = {sockjs_session, _}, Topic}) -> + sockjs_session:info(Conn) ++ [{topic, Topic}]. \ No newline at end of file diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_service.erl b/rabbitmq-server/deps/sockjs/src/sockjs_service.erl similarity index 100% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_service.erl rename to rabbitmq-server/deps/sockjs/src/sockjs_service.erl diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_session.erl b/rabbitmq-server/deps/sockjs/src/sockjs_session.erl similarity index 86% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_session.erl rename to rabbitmq-server/deps/sockjs/src/sockjs_session.erl index 7e4ae00..d2d5d8b 100644 --- a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_session.erl +++ b/rabbitmq-server/deps/sockjs/src/sockjs_session.erl @@ -11,39 +11,49 @@ handle_cast/2]). -include("sockjs_internal.hrl"). -%% -type(handle() :: {?MODULE, {pid(), info()}}). - --record(session, {id , %% session(), - outbound_queue = queue:new() , %% queue() - response_pid , %% pid() - disconnect_tref , %% reference() - disconnect_delay = 5000 , %% non_neg_integer() - heartbeat_tref , %% reference() | triggered - heartbeat_delay = 25000 , %% non_neg_integer() - ready_state = connecting , %% connecting | open | closed - close_msg , %% {non_neg_integer(), string()} + +-type(handle() :: {?MODULE, {pid(), info()}}). +-type(conn() :: {?MODULE, any()}). + +-export_type([conn/0]). + +-ifdef(pre17_type_specs). +-define(QUEUE_TYPE, queue()). +-else. +-define(QUEUE_TYPE, queue:queue()). +-endif. + +-record(session, {id :: session(), + outbound_queue = queue:new() :: ?QUEUE_TYPE, + response_pid :: pid(), + disconnect_tref :: reference(), + disconnect_delay = 5000 :: non_neg_integer(), + heartbeat_tref :: reference() | triggered, + heartbeat_delay = 25000 :: non_neg_integer(), + ready_state = connecting :: connecting | open | closed, + close_msg :: {non_neg_integer(), string()}, callback, state, - handle %% handle() + handle :: handle() }). -define(ETS, sockjs_table). -%% -type(session_or_undefined() :: session() | undefined). -%% -type(session_or_pid() :: session() | pid()). +-type(session_or_undefined() :: session() | undefined). +-type(session_or_pid() :: session() | pid()). %% -------------------------------------------------------------------------- -%% -spec init() -> ok. +-spec init() -> ok. init() -> _ = ets:new(?ETS, [public, named_table]), ok. -%% -spec start_link(session_or_undefined(), service(), info()) -> {ok, pid()}. +-spec start_link(session_or_undefined(), service(), info()) -> {ok, pid()}. start_link(SessionId, Service, Info) -> gen_server:start_link(?MODULE, {SessionId, Service, Info}, []). -%% -spec maybe_create(session_or_undefined(), service(), info()) -> pid(). +-spec maybe_create(session_or_undefined(), service(), info()) -> pid(). maybe_create(SessionId, Service, Info) -> case ets:lookup(?ETS, SessionId) of [] -> {ok, SPid} = sockjs_session_sup:start_child( @@ -53,7 +63,7 @@ maybe_create(SessionId, Service, Info) -> end. -%% -spec received(list(iodata()), session_or_pid()) -> ok. +-spec received(list(iodata()), session_or_pid()) -> ok. received(Messages, SessionPid) when is_pid(SessionPid) -> case gen_server:call(SessionPid, {received, Messages}, infinity) of ok -> ok; @@ -63,27 +73,27 @@ received(Messages, SessionPid) when is_pid(SessionPid) -> received(Messages, SessionId) -> received(Messages, spid(SessionId)). -%% -spec send(iodata(), handle()) -> ok. +-spec send(iodata(), handle()) -> ok. send(Data, {?MODULE, {SPid, _}}) -> gen_server:cast(SPid, {send, Data}), ok. -%% -spec close(non_neg_integer(), string(), handle()) -> ok. +-spec close(non_neg_integer(), string(), handle()) -> ok. close(Code, Reason, {?MODULE, {SPid, _}}) -> gen_server:cast(SPid, {close, Code, Reason}), ok. -%% -spec info(handle()) -> info(). +-spec info(handle()) -> info(). info({?MODULE, {_SPid, Info}}) -> Info. -%% -spec reply(session_or_pid()) -> -%% wait | session_in_use | {ok | close, frame()}. +-spec reply(session_or_pid()) -> + wait | session_in_use | {ok | close, frame()}. reply(Session) -> reply(Session, true). -%% -spec reply(session_or_pid(), boolean()) -> -%% wait | session_in_use | {ok | close, frame()}. +-spec reply(session_or_pid(), boolean()) -> + wait | session_in_use | {ok | close, frame()}. reply(SessionPid, Multiple) when is_pid(SessionPid) -> gen_server:call(SessionPid, {reply, self(), Multiple}, infinity); reply(SessionId, Multiple) -> @@ -154,7 +164,7 @@ unmark_waiting(RPid, State = #session{response_pid = Pid, when Pid =/= undefined andalso Pid =/= RPid -> State. -%% -spec emit(emittable(), #session{}) -> #session{}. +-spec emit(emittable(), #session{}) -> #session{}. emit(What, State = #session{callback = Callback, state = UserState, handle = Handle}) -> @@ -175,7 +185,7 @@ emit(What, State = #session{callback = Callback, %% -------------------------------------------------------------------------- -%% -spec init({session_or_undefined(), service(), info()}) -> {ok, #session{}}. +-spec init({session_or_undefined(), service(), info()}) -> {ok, #session{}}. init({SessionId, #service{callback = Callback, state = UserState, disconnect_delay = DisconnectDelay, diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_session_sup.erl b/rabbitmq-server/deps/sockjs/src/sockjs_session_sup.erl similarity index 88% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_session_sup.erl rename to rabbitmq-server/deps/sockjs/src/sockjs_session_sup.erl index 71c7ff4..4197ce3 100644 --- a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_session_sup.erl +++ b/rabbitmq-server/deps/sockjs/src/sockjs_session_sup.erl @@ -7,7 +7,7 @@ %% -------------------------------------------------------------------------- -%% -spec start_link() -> ignore | {'ok', pid()} | {'error', any()}. +-spec start_link() -> ignore | {'ok', pid()} | {'error', any()}. start_link() -> supervisor:start_link({local, ?MODULE}, ?MODULE, []). diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_util.erl b/rabbitmq-server/deps/sockjs/src/sockjs_util.erl similarity index 88% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_util.erl rename to rabbitmq-server/deps/sockjs/src/sockjs_util.erl index 9b9969d..be3f972 100644 --- a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_util.erl +++ b/rabbitmq-server/deps/sockjs/src/sockjs_util.erl @@ -8,7 +8,7 @@ %% -------------------------------------------------------------------------- -%% -spec rand32() -> non_neg_integer(). +-spec rand32() -> non_neg_integer(). rand32() -> case get(random_seeded) of undefined -> @@ -21,7 +21,7 @@ rand32() -> random:uniform(erlang:trunc(math:pow(2,32)))-1. -%% -spec encode_frame(frame()) -> iodata(). +-spec encode_frame(frame()) -> iodata(). encode_frame({open, nil}) -> <<"o">>; encode_frame({close, {Code, Reason}}) -> @@ -34,7 +34,7 @@ encode_frame({heartbeat, nil}) -> <<"h">>. -%% -spec url_escape(string(), string()) -> iolist(). +-spec url_escape(string(), string()) -> iolist(). url_escape(Str, Chars) -> [case lists:member(Char, Chars) of true -> hex(Char); diff --git a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_ws_handler.erl b/rabbitmq-server/deps/sockjs/src/sockjs_ws_handler.erl similarity index 88% rename from rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_ws_handler.erl rename to rabbitmq-server/deps/sockjs/src/sockjs_ws_handler.erl index c011c89..bcf463d 100644 --- a/rabbitmq-server/plugins-src/sockjs-erlang-wrapper/sockjs-erlang-git/src/sockjs_ws_handler.erl +++ b/rabbitmq-server/deps/sockjs/src/sockjs_ws_handler.erl @@ -6,7 +6,7 @@ %% -------------------------------------------------------------------------- -%% -spec received(websocket|rawwebsocket, pid(), binary()) -> ok | shutdown. +-spec received(websocket|rawwebsocket, pid(), binary()) -> ok | shutdown. %% Ignore empty received(_RawWebsocket, _SessionPid, <<>>) -> ok; @@ -30,7 +30,7 @@ session_received(Messages, SessionPid) -> no_session -> shutdown end. -%% -spec reply(websocket|rawwebsocket, pid()) -> {close|open, binary()} | wait. +-spec reply(websocket|rawwebsocket, pid()) -> {close|open, binary()} | wait. reply(websocket, SessionPid) -> case sockjs_session:reply(SessionPid) of {W, Frame} when W =:= ok orelse W =:= close-> @@ -52,7 +52,7 @@ reply(rawwebsocket, SessionPid) -> wait end. -%% -spec close(websocket|rawwebsocket, pid()) -> ok. +-spec close(websocket|rawwebsocket, pid()) -> ok. close(_RawWebsocket, SessionPid) -> SessionPid ! force_shutdown, ok. diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/Emakefile b/rabbitmq-server/deps/webmachine/Emakefile similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/Emakefile rename to rabbitmq-server/deps/webmachine/Emakefile diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/LICENSE b/rabbitmq-server/deps/webmachine/LICENSE similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/LICENSE rename to rabbitmq-server/deps/webmachine/LICENSE diff --git a/rabbitmq-server/deps/webmachine/Makefile b/rabbitmq-server/deps/webmachine/Makefile new file mode 100644 index 0000000..a3ed568 --- /dev/null +++ b/rabbitmq-server/deps/webmachine/Makefile @@ -0,0 +1,25 @@ +IGNORE_DEPS += edown eper eunit_formatters meck node_package rebar_lock_deps_plugin rebar_vsn_plugin reltool_util +C_SRC_DIR = /path/do/not/exist +C_SRC_TYPE = rebar +DRV_CFLAGS = -fPIC +export DRV_CFLAGS +ERLANG_ARCH = 64 +export ERLANG_ARCH +ERLC_OPTS = +debug_info +export ERLC_OPTS +ERLC_OPTS += -Dold_hash=1 + +DEPS += mochiweb +dep_mochiweb = git git://github.com/rabbitmq/mochiweb 845428379ed8b58eadc49aba26838d86ea809663 +COMPILE_FIRST += + + +rebar_dep: preprocess pre-deps deps pre-app app + +preprocess:: + +pre-deps:: + +pre-app:: + +include ../../erlang.mk \ No newline at end of file diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/Makefile b/rabbitmq-server/deps/webmachine/Makefile.orig.mk similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/Makefile rename to rabbitmq-server/deps/webmachine/Makefile.orig.mk diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/README.org b/rabbitmq-server/deps/webmachine/README.org similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/README.org rename to rabbitmq-server/deps/webmachine/README.org diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/THANKS b/rabbitmq-server/deps/webmachine/THANKS similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/THANKS rename to rabbitmq-server/deps/webmachine/THANKS diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/Makefile b/rabbitmq-server/deps/webmachine/demo/Makefile similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/Makefile rename to rabbitmq-server/deps/webmachine/demo/Makefile diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/README b/rabbitmq-server/deps/webmachine/demo/README similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/README rename to rabbitmq-server/deps/webmachine/demo/README diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/priv/dispatch.conf b/rabbitmq-server/deps/webmachine/demo/priv/dispatch.conf similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/priv/dispatch.conf rename to rabbitmq-server/deps/webmachine/demo/priv/dispatch.conf diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/rebar.config b/rabbitmq-server/deps/webmachine/demo/rebar.config similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/rebar.config rename to rabbitmq-server/deps/webmachine/demo/rebar.config diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo.app.src b/rabbitmq-server/deps/webmachine/demo/src/webmachine_demo.app.src similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo.app.src rename to rabbitmq-server/deps/webmachine/demo/src/webmachine_demo.app.src diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo.erl b/rabbitmq-server/deps/webmachine/demo/src/webmachine_demo.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo.erl rename to rabbitmq-server/deps/webmachine/demo/src/webmachine_demo.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo_app.erl b/rabbitmq-server/deps/webmachine/demo/src/webmachine_demo_app.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo_app.erl rename to rabbitmq-server/deps/webmachine/demo/src/webmachine_demo_app.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo_fs_resource.erl b/rabbitmq-server/deps/webmachine/demo/src/webmachine_demo_fs_resource.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo_fs_resource.erl rename to rabbitmq-server/deps/webmachine/demo/src/webmachine_demo_fs_resource.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo_resource.erl b/rabbitmq-server/deps/webmachine/demo/src/webmachine_demo_resource.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo_resource.erl rename to rabbitmq-server/deps/webmachine/demo/src/webmachine_demo_resource.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo_sup.erl b/rabbitmq-server/deps/webmachine/demo/src/webmachine_demo_sup.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/src/webmachine_demo_sup.erl rename to rabbitmq-server/deps/webmachine/demo/src/webmachine_demo_sup.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/start.sh b/rabbitmq-server/deps/webmachine/demo/start.sh similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/demo/start.sh rename to rabbitmq-server/deps/webmachine/demo/start.sh diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/docs/http-headers-status-v3.png b/rabbitmq-server/deps/webmachine/docs/http-headers-status-v3.png similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/docs/http-headers-status-v3.png rename to rabbitmq-server/deps/webmachine/docs/http-headers-status-v3.png diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/webmachine.hrl b/rabbitmq-server/deps/webmachine/include/webmachine.hrl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/webmachine.hrl rename to rabbitmq-server/deps/webmachine/include/webmachine.hrl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/webmachine_logger.hrl b/rabbitmq-server/deps/webmachine/include/webmachine_logger.hrl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/webmachine_logger.hrl rename to rabbitmq-server/deps/webmachine/include/webmachine_logger.hrl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/wm_reqdata.hrl b/rabbitmq-server/deps/webmachine/include/wm_reqdata.hrl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/wm_reqdata.hrl rename to rabbitmq-server/deps/webmachine/include/wm_reqdata.hrl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/wm_reqstate.hrl b/rabbitmq-server/deps/webmachine/include/wm_reqstate.hrl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/wm_reqstate.hrl rename to rabbitmq-server/deps/webmachine/include/wm_reqstate.hrl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/wm_resource.hrl b/rabbitmq-server/deps/webmachine/include/wm_resource.hrl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/include/wm_resource.hrl rename to rabbitmq-server/deps/webmachine/include/wm_resource.hrl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/Makefile b/rabbitmq-server/deps/webmachine/priv/templates/Makefile similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/Makefile rename to rabbitmq-server/deps/webmachine/priv/templates/Makefile diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/README b/rabbitmq-server/deps/webmachine/priv/templates/README similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/README rename to rabbitmq-server/deps/webmachine/priv/templates/README diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/priv/dispatch.conf b/rabbitmq-server/deps/webmachine/priv/templates/priv/dispatch.conf similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/priv/dispatch.conf rename to rabbitmq-server/deps/webmachine/priv/templates/priv/dispatch.conf diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/rebar.config b/rabbitmq-server/deps/webmachine/priv/templates/rebar.config similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/rebar.config rename to rabbitmq-server/deps/webmachine/priv/templates/rebar.config diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel.app.src b/rabbitmq-server/deps/webmachine/priv/templates/src/wmskel.app.src similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel.app.src rename to rabbitmq-server/deps/webmachine/priv/templates/src/wmskel.app.src diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel.erl b/rabbitmq-server/deps/webmachine/priv/templates/src/wmskel.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel.erl rename to rabbitmq-server/deps/webmachine/priv/templates/src/wmskel.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel_app.erl b/rabbitmq-server/deps/webmachine/priv/templates/src/wmskel_app.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel_app.erl rename to rabbitmq-server/deps/webmachine/priv/templates/src/wmskel_app.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel_resource.erl b/rabbitmq-server/deps/webmachine/priv/templates/src/wmskel_resource.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel_resource.erl rename to rabbitmq-server/deps/webmachine/priv/templates/src/wmskel_resource.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel_sup.erl b/rabbitmq-server/deps/webmachine/priv/templates/src/wmskel_sup.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/src/wmskel_sup.erl rename to rabbitmq-server/deps/webmachine/priv/templates/src/wmskel_sup.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/start.sh b/rabbitmq-server/deps/webmachine/priv/templates/start.sh similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/start.sh rename to rabbitmq-server/deps/webmachine/priv/templates/start.sh diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/wmskel.template b/rabbitmq-server/deps/webmachine/priv/templates/wmskel.template similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/templates/wmskel.template rename to rabbitmq-server/deps/webmachine/priv/templates/wmskel.template diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/trace/http-headers-status-v3.png b/rabbitmq-server/deps/webmachine/priv/trace/http-headers-status-v3.png similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/trace/http-headers-status-v3.png rename to rabbitmq-server/deps/webmachine/priv/trace/http-headers-status-v3.png diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/trace/wmtrace.css b/rabbitmq-server/deps/webmachine/priv/trace/wmtrace.css similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/trace/wmtrace.css rename to rabbitmq-server/deps/webmachine/priv/trace/wmtrace.css diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/trace/wmtrace.js b/rabbitmq-server/deps/webmachine/priv/trace/wmtrace.js similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/trace/wmtrace.js rename to rabbitmq-server/deps/webmachine/priv/trace/wmtrace.js diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/www/index.html b/rabbitmq-server/deps/webmachine/priv/www/index.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/priv/www/index.html rename to rabbitmq-server/deps/webmachine/priv/www/index.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/rebar b/rabbitmq-server/deps/webmachine/rebar similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/rebar rename to rabbitmq-server/deps/webmachine/rebar diff --git a/rabbitmq-server/deps/webmachine/rebar.config b/rabbitmq-server/deps/webmachine/rebar.config new file mode 100644 index 0000000..b7c5143 --- /dev/null +++ b/rabbitmq-server/deps/webmachine/rebar.config @@ -0,0 +1,9 @@ +%%-*- mode: erlang -*- +{erl_opts, [warnings_as_errors]}. +{cover_enabled, true}. +{edoc_opts, [{preprocess, true}]}. + +{deps, [ + {mochiweb, ".*", {git, "git://github.com/rabbitmq/mochiweb", + "845428379ed8b58eadc49aba26838d86ea809663"}} + ]}. diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/rebar.config.script b/rabbitmq-server/deps/webmachine/rebar.config.script similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/rebar.config.script rename to rabbitmq-server/deps/webmachine/rebar.config.script diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/scripts/new_webmachine.sh b/rabbitmq-server/deps/webmachine/scripts/new_webmachine.sh similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/scripts/new_webmachine.sh rename to rabbitmq-server/deps/webmachine/scripts/new_webmachine.sh diff --git a/rabbitmq-server/deps/webmachine/src/webmachine.app.src b/rabbitmq-server/deps/webmachine/src/webmachine.app.src new file mode 100644 index 0000000..51454fa --- /dev/null +++ b/rabbitmq-server/deps/webmachine/src/webmachine.app.src @@ -0,0 +1,8 @@ +{application,webmachine, + [{description,"webmachine"}, + {vsn,"1.10.3"}, + {modules,[]}, + {registered,[]}, + {applications,[kernel,stdlib,mochiweb]}, + {mod,{webmachine_app,[]}}, + {env,[]}]}. diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine.erl b/rabbitmq-server/deps/webmachine/src/webmachine.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine.erl rename to rabbitmq-server/deps/webmachine/src/webmachine.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_app.erl b/rabbitmq-server/deps/webmachine/src/webmachine_app.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_app.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_app.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_decision_core.erl b/rabbitmq-server/deps/webmachine/src/webmachine_decision_core.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_decision_core.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_decision_core.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_deps.erl b/rabbitmq-server/deps/webmachine/src/webmachine_deps.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_deps.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_deps.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_dispatcher.erl b/rabbitmq-server/deps/webmachine/src/webmachine_dispatcher.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_dispatcher.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_dispatcher.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_error.erl b/rabbitmq-server/deps/webmachine/src/webmachine_error.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_error.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_error.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_error_handler.erl b/rabbitmq-server/deps/webmachine/src/webmachine_error_handler.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_error_handler.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_error_handler.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_log.erl b/rabbitmq-server/deps/webmachine/src/webmachine_log.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_log.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_log.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_log_handler.erl b/rabbitmq-server/deps/webmachine/src/webmachine_log_handler.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_log_handler.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_log_handler.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_logger_watcher.erl b/rabbitmq-server/deps/webmachine/src/webmachine_logger_watcher.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_logger_watcher.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_logger_watcher.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_logger_watcher_sup.erl b/rabbitmq-server/deps/webmachine/src/webmachine_logger_watcher_sup.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_logger_watcher_sup.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_logger_watcher_sup.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_mochiweb.erl b/rabbitmq-server/deps/webmachine/src/webmachine_mochiweb.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_mochiweb.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_mochiweb.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_multipart.erl b/rabbitmq-server/deps/webmachine/src/webmachine_multipart.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_multipart.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_multipart.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_perf_log_handler.erl b/rabbitmq-server/deps/webmachine/src/webmachine_perf_log_handler.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_perf_log_handler.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_perf_log_handler.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_request.erl b/rabbitmq-server/deps/webmachine/src/webmachine_request.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_request.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_request.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_resource.erl b/rabbitmq-server/deps/webmachine/src/webmachine_resource.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_resource.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_resource.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_router.erl b/rabbitmq-server/deps/webmachine/src/webmachine_router.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_router.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_router.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_sup.erl b/rabbitmq-server/deps/webmachine/src/webmachine_sup.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_sup.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_sup.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_util.erl b/rabbitmq-server/deps/webmachine/src/webmachine_util.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/webmachine_util.erl rename to rabbitmq-server/deps/webmachine/src/webmachine_util.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/wmtrace_resource.erl b/rabbitmq-server/deps/webmachine/src/wmtrace_resource.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/wmtrace_resource.erl rename to rabbitmq-server/deps/webmachine/src/wmtrace_resource.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/wrq.erl b/rabbitmq-server/deps/webmachine/src/wrq.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/src/wrq.erl rename to rabbitmq-server/deps/webmachine/src/wrq.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/start-dev.sh b/rabbitmq-server/deps/webmachine/start-dev.sh similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/start-dev.sh rename to rabbitmq-server/deps/webmachine/start-dev.sh diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/start.sh b/rabbitmq-server/deps/webmachine/start.sh similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/start.sh rename to rabbitmq-server/deps/webmachine/start.sh diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/test/etag_test.erl b/rabbitmq-server/deps/webmachine/test/etag_test.erl similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/test/etag_test.erl rename to rabbitmq-server/deps/webmachine/test/etag_test.erl diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/blogs.html b/rabbitmq-server/deps/webmachine/www/blogs.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/blogs.html rename to rabbitmq-server/deps/webmachine/www/blogs.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/contact.html b/rabbitmq-server/deps/webmachine/www/contact.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/contact.html rename to rabbitmq-server/deps/webmachine/www/contact.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/css/style-1c.css b/rabbitmq-server/deps/webmachine/www/css/style-1c.css similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/css/style-1c.css rename to rabbitmq-server/deps/webmachine/www/css/style-1c.css diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/css/style.css b/rabbitmq-server/deps/webmachine/www/css/style.css similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/css/style.css rename to rabbitmq-server/deps/webmachine/www/css/style.css diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/debugging.html b/rabbitmq-server/deps/webmachine/www/debugging.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/debugging.html rename to rabbitmq-server/deps/webmachine/www/debugging.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/diagram.html b/rabbitmq-server/deps/webmachine/www/diagram.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/diagram.html rename to rabbitmq-server/deps/webmachine/www/diagram.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/dispatcher.html b/rabbitmq-server/deps/webmachine/www/dispatcher.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/dispatcher.html rename to rabbitmq-server/deps/webmachine/www/dispatcher.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/docs.html b/rabbitmq-server/deps/webmachine/www/docs.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/docs.html rename to rabbitmq-server/deps/webmachine/www/docs.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/example_resources.html b/rabbitmq-server/deps/webmachine/www/example_resources.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/example_resources.html rename to rabbitmq-server/deps/webmachine/www/example_resources.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/favicon.ico b/rabbitmq-server/deps/webmachine/www/favicon.ico similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/favicon.ico rename to rabbitmq-server/deps/webmachine/www/favicon.ico diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/WM200-crop.png b/rabbitmq-server/deps/webmachine/www/images/WM200-crop.png similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/WM200-crop.png rename to rabbitmq-server/deps/webmachine/www/images/WM200-crop.png diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basho-landscape.gif b/rabbitmq-server/deps/webmachine/www/images/basho-landscape.gif similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basho-landscape.gif rename to rabbitmq-server/deps/webmachine/www/images/basho-landscape.gif diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basic-trace-decision-tab.png b/rabbitmq-server/deps/webmachine/www/images/basic-trace-decision-tab.png similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basic-trace-decision-tab.png rename to rabbitmq-server/deps/webmachine/www/images/basic-trace-decision-tab.png diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basic-trace-labeled.png b/rabbitmq-server/deps/webmachine/www/images/basic-trace-labeled.png similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basic-trace-labeled.png rename to rabbitmq-server/deps/webmachine/www/images/basic-trace-labeled.png diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basic-trace-request-tab.png b/rabbitmq-server/deps/webmachine/www/images/basic-trace-request-tab.png similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basic-trace-request-tab.png rename to rabbitmq-server/deps/webmachine/www/images/basic-trace-request-tab.png diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basic-trace-response-tab.png b/rabbitmq-server/deps/webmachine/www/images/basic-trace-response-tab.png similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/basic-trace-response-tab.png rename to rabbitmq-server/deps/webmachine/www/images/basic-trace-response-tab.png diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/bg.gif b/rabbitmq-server/deps/webmachine/www/images/bg.gif similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/bg.gif rename to rabbitmq-server/deps/webmachine/www/images/bg.gif diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/blankbox.gif b/rabbitmq-server/deps/webmachine/www/images/blankbox.gif similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/blankbox.gif rename to rabbitmq-server/deps/webmachine/www/images/blankbox.gif diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/chash.gif b/rabbitmq-server/deps/webmachine/www/images/chash.gif similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/chash.gif rename to rabbitmq-server/deps/webmachine/www/images/chash.gif diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/easy-ops.gif b/rabbitmq-server/deps/webmachine/www/images/easy-ops.gif similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/easy-ops.gif rename to rabbitmq-server/deps/webmachine/www/images/easy-ops.gif diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/gossip4.gif b/rabbitmq-server/deps/webmachine/www/images/gossip4.gif similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/gossip4.gif rename to rabbitmq-server/deps/webmachine/www/images/gossip4.gif diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/halfblankbox.gif b/rabbitmq-server/deps/webmachine/www/images/halfblankbox.gif similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/halfblankbox.gif rename to rabbitmq-server/deps/webmachine/www/images/halfblankbox.gif diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/http-headers-status-v3.png b/rabbitmq-server/deps/webmachine/www/images/http-headers-status-v3.png similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/http-headers-status-v3.png rename to rabbitmq-server/deps/webmachine/www/images/http-headers-status-v3.png diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/more.gif b/rabbitmq-server/deps/webmachine/www/images/more.gif similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/more.gif rename to rabbitmq-server/deps/webmachine/www/images/more.gif diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/site.gif b/rabbitmq-server/deps/webmachine/www/images/site.gif similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/site.gif rename to rabbitmq-server/deps/webmachine/www/images/site.gif diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/splash250.gif b/rabbitmq-server/deps/webmachine/www/images/splash250.gif similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/splash250.gif rename to rabbitmq-server/deps/webmachine/www/images/splash250.gif diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/vclock.gif b/rabbitmq-server/deps/webmachine/www/images/vclock.gif similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/images/vclock.gif rename to rabbitmq-server/deps/webmachine/www/images/vclock.gif diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/index.html b/rabbitmq-server/deps/webmachine/www/index.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/index.html rename to rabbitmq-server/deps/webmachine/www/index.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/intros.html b/rabbitmq-server/deps/webmachine/www/intros.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/intros.html rename to rabbitmq-server/deps/webmachine/www/intros.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/mechanics.html b/rabbitmq-server/deps/webmachine/www/mechanics.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/mechanics.html rename to rabbitmq-server/deps/webmachine/www/mechanics.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/quickstart.html b/rabbitmq-server/deps/webmachine/www/quickstart.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/quickstart.html rename to rabbitmq-server/deps/webmachine/www/quickstart.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/reftrans.html b/rabbitmq-server/deps/webmachine/www/reftrans.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/reftrans.html rename to rabbitmq-server/deps/webmachine/www/reftrans.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/reqdata.html b/rabbitmq-server/deps/webmachine/www/reqdata.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/reqdata.html rename to rabbitmq-server/deps/webmachine/www/reqdata.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/resources.html b/rabbitmq-server/deps/webmachine/www/resources.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/resources.html rename to rabbitmq-server/deps/webmachine/www/resources.html diff --git a/rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/streambody.html b/rabbitmq-server/deps/webmachine/www/streambody.html similarity index 100% rename from rabbitmq-server/plugins-src/webmachine-wrapper/webmachine-git/www/streambody.html rename to rabbitmq-server/deps/webmachine/www/streambody.html diff --git a/rabbitmq-server/docs/README-for-packages b/rabbitmq-server/docs/README-for-packages new file mode 100644 index 0000000..35a1523 --- /dev/null +++ b/rabbitmq-server/docs/README-for-packages @@ -0,0 +1,20 @@ +This is rabbitmq-server, a message broker implementing AMQP, STOMP and MQTT. + +Most of the documentation for RabbitMQ is provided on the RabbitMQ web +site. You can see documentation for the current version at: + +http://www.rabbitmq.com/documentation.html + +and for previous versions at: + +http://www.rabbitmq.com/previous.html + +Man pages are installed with this package. Of particular interest are +rabbitmqctl(1), to interact with a running RabbitMQ server, and +rabbitmq-plugins(1), to enable and disable plugins. These should be +run as the superuser. + +An example configuration file is provided in the same directory as +this README. Copy it to /etc/rabbitmq/rabbitmq.config to use it. The +RabbitMQ server must be restarted after changing the configuration +file. diff --git a/rabbitmq-server/docs/rabbitmq.config.example b/rabbitmq-server/docs/rabbitmq.config.example index 9b3855c..f7477ab 100644 --- a/rabbitmq-server/docs/rabbitmq.config.example +++ b/rabbitmq-server/docs/rabbitmq.config.example @@ -27,6 +27,12 @@ %% %% {ssl_listeners, [5671]}, + %% Number of Erlang processes that will accept connections for the TCP + %% and SSL listeners. + %% + %% {num_tcp_acceptors, 10}, + %% {num_ssl_acceptors, 1}, + %% Maximum time for AMQP 0-8/0-9/0-9-1 handshake (after socket connection %% and SSL handshake), in milliseconds. %% @@ -120,6 +126,12 @@ %% %% {ssl_handshake_timeout, 5000}, + %% Password hashing implementation. Will only affect newly + %% created users. To recalculate hash for an existing user + %% it's necessary to update her password. + %% + %% {password_hashing_module, rabbit_password_hashing_sha256}, + %% %% Default User / VHost %% ==================== @@ -155,6 +167,11 @@ %% %% {frame_max, 131072}, + %% Set the max frame size the server will accept before connection + %% tuning occurs + %% + %% {initial_frame_max, 4096}, + %% Set the max permissible number of channels per connection. %% 0 means "no limit". %% @@ -165,10 +182,7 @@ %% See (http://www.erlang.org/doc/man/inet.html#setopts-2) for %% further documentation. %% - %% {tcp_listen_options, [binary, - %% {packet, raw}, - %% {reuseaddr, true}, - %% {backlog, 128}, + %% {tcp_listen_options, [{backlog, 128}, %% {nodelay, true}, %% {exit_on_close, false}]}, @@ -182,20 +196,50 @@ %% %% {vm_memory_high_watermark, 0.4}, + %% Alternatively, we can set a limit (in bytes) of RAM used by the node. + %% + %% {vm_memory_high_watermark, {absolute, 1073741824}}, + %% + %% Or you can set absolute value using memory units. + %% + %% {vm_memory_high_watermark, {absolute, "1024M"}}, + %% + %% Supported units suffixes: + %% + %% k, kiB: kibibytes (2^10 bytes) + %% M, MiB: mebibytes (2^20) + %% G, GiB: gibibytes (2^30) + %% kB: kilobytes (10^3) + %% MB: megabytes (10^6) + %% GB: gigabytes (10^9) + %% Fraction of the high watermark limit at which queues start to %% page message out to disc in order to free up memory. %% + %% Values greater than 0.9 can be dangerous and should be used carefully. + %% %% {vm_memory_high_watermark_paging_ratio, 0.5}, + %% Interval (in milliseconds) at which we perform the check of the memory + %% levels against the watermarks. + %% + %% {memory_monitor_interval, 2500}, + %% Set disk free limit (in bytes). Once free disk space reaches this %% lower bound, a disk alarm will be set - see the documentation %% listed above for more details. %% %% {disk_free_limit, 50000000}, + %% + %% Or you can set it using memory units (same as in vm_memory_high_watermark) + %% {disk_free_limit, "50MB"}, + %% {disk_free_limit, "50000kB"}, + %% {disk_free_limit, "2GB"}, %% Alternatively, we can set a limit relative to total available RAM. %% - %% {disk_free_limit, {mem_relative, 1.0}}, + %% Values lower than 1.0 can be dangerous and should be used carefully. + %% {disk_free_limit, {mem_relative, 2.0}}, %% %% Misc/Advanced Options @@ -390,6 +434,12 @@ %% Listen for SSL connections on a specific port. %% {ssl_listeners, [61614]}, + %% Number of Erlang processes that will accept connections for the TCP + %% and SSL listeners. + %% + %% {num_tcp_acceptors, 10}, + %% {num_ssl_acceptors, 1}, + %% Additional SSL options %% Extract a name from the client's certificate when using SSL. @@ -461,12 +511,15 @@ %% {tcp_listeners, [1883]}, %% {ssl_listeners, []}, + %% Number of Erlang processes that will accept connections for the TCP + %% and SSL listeners. + %% + %% {num_tcp_acceptors, 10}, + %% {num_ssl_acceptors, 1}, + %% TCP/Socket options (as per the broker configuration). %% - %% {tcp_listen_options, [binary, - %% {packet, raw}, - %% {reuseaddr, true}, - %% {backlog, 128}, + %% {tcp_listen_options, [{backlog, 128}, %% {nodelay, true}]} ]}, diff --git a/rabbitmq-server/docs/rabbitmqctl.1.xml b/rabbitmq-server/docs/rabbitmqctl.1.xml index 92d4846..fe24a47 100644 --- a/rabbitmq-server/docs/rabbitmqctl.1.xml +++ b/rabbitmq-server/docs/rabbitmqctl.1.xml @@ -275,10 +275,9 @@ suffixed files. - When the target files do not exist they are created. - When no is specified, the empty - log files are simply created at the original location; - no rotation takes place. + When the target files do not exist they are created. When + no is specified, no rotation takes + place - log files are just re-opened. For example: rabbitmqctl rotate_logs .1 @@ -541,7 +540,7 @@ - sync_queue queue + sync_queue -p vhost queue @@ -570,7 +569,7 @@ - cancel_sync_queue queue + cancel_sync_queue -p vhost queue @@ -590,7 +589,7 @@ - purge_queue queue + purge_queue -p vhost queue @@ -721,6 +720,33 @@ + + + + authenticate_user username password + + + + + + username + The name of the user. + + + password + The password of the user. + + + For example: + rabbitmqctl authenticate_user tonyg verifyit + + This command instructs the RabbitMQ broker to authenticate the + user named tonyg with password + verifyit. + + + + set_user_tags username tag ... @@ -782,11 +808,11 @@ - add_vhost vhostpath + add_vhost vhost - vhostpath + vhost The name of the virtual host entry to create. @@ -803,11 +829,11 @@ - delete_vhost vhostpath + delete_vhost vhost - vhostpath + vhost The name of the virtual host entry to delete. @@ -864,11 +890,11 @@ - set_permissions -p vhostpath user conf write read + set_permissions -p vhost user conf write read - vhostpath + vhost The name of the virtual host to which to grant the user access, defaulting to /. @@ -904,11 +930,11 @@ - clear_permissions -p vhostpath username + clear_permissions -p vhost username - vhostpath + vhost The name of the virtual host to which to deny the user access, defaulting to /. @@ -930,11 +956,11 @@ - list_permissions -p vhostpath + list_permissions -p vhost - vhostpath + vhost The name of the virtual host for which to list the users that have been granted access to it, and their permissions. Defaults to /. @@ -994,7 +1020,7 @@ - set_parameter -p vhostpath component_name name value + set_parameter -p vhost component_name name value Sets a parameter. @@ -1030,7 +1056,7 @@ - clear_parameter -p vhostpath component_name key + clear_parameter -p vhost component_name key Clears a parameter. @@ -1058,7 +1084,7 @@ - list_parameters -p vhostpath + list_parameters -p vhost Lists all parameters for a virtual host. @@ -1083,7 +1109,7 @@ - set_policy -p vhostpath --priority priority --apply-to apply-to name pattern definition + set_policy -p vhost --priority priority --apply-to apply-to name pattern definition Sets a policy. @@ -1130,7 +1156,7 @@ - clear_policy -p vhostpath name + clear_policy -p vhost name Clears a policy. @@ -1151,7 +1177,7 @@ - list_policies -p vhostpath + list_policies -p vhost Lists all policies for a virtual host. @@ -1184,7 +1210,7 @@ - list_queues -p vhostpath queueinfoitem ... + list_queues -p vhost queueinfoitem ... Returns queue details. Queue details of the / virtual host @@ -1229,6 +1255,11 @@ which is the exclusive owner of the queue. Empty if the queue is non-exclusive. + + exclusive + True if queue is exclusive (i.e. has + owner_pid), false otherwise + exclusive_consumer_pid Id of the Erlang process representing the channel of the @@ -1289,6 +1320,10 @@ message_bytes_persistent Like message_bytes but counting only those messages which are persistent. + + head_message_timestamp + The timestamp property of the first message in the queue, if present. Timestamps of messages only appear when they are in the paged-in state. + disk_reads Total number of times messages have been read from disk by this queue since it started. @@ -1351,7 +1386,7 @@ - list_exchanges -p vhostpath exchangeinfoitem ... + list_exchanges -p vhost exchangeinfoitem ... Returns exchange details. Exchange details of the / virtual host @@ -1414,7 +1449,7 @@ - list_bindings -p vhostpath bindinginfoitem ... + list_bindings -p vhost bindinginfoitem ... Returns binding details. By default the bindings for @@ -1756,7 +1791,7 @@ - list_consumers -p vhostpath + list_consumers -p vhost List consumers, i.e. subscriptions to a queue's message @@ -1921,6 +1956,58 @@ + + set_vm_memory_high_watermark absolute memory_limit + + + + memory_limit + + The new memory limit at which flow control is + triggered, expressed in bytes as an integer number + greater than or equal to 0 or as a string with memory units + (e.g. 512M or 1G). Available units are: + k, kiB: kibibytes (2^10 bytes) + M, MiB: mebibytes (2^20) + G, GiB: gibibytes (2^30) + kB: kilobytes (10^3) + MB: megabytes (10^6) + GB: gigabytes (10^9) + + + + + + + set_disk_free_limit disk_limit + + + + disk_limit + + Lower bound limit as an integer in bytes or a string with memory units (see vm_memory_high_watermark), + e.g. 512M or 1G. Once free disk space reaches the limit, a disk alarm will be set. + + + + + + + set_disk_free_limit mem_relative fraction + + + + fraction + + Limit relative to the total amount available RAM + as a non-negative floating point number. + Values lower than 1.0 can be dangerous and + should be used carefully. + + + + + diff --git a/rabbitmq-server/erlang.mk b/rabbitmq-server/erlang.mk new file mode 100644 index 0000000..fc2d806 --- /dev/null +++ b/rabbitmq-server/erlang.mk @@ -0,0 +1,6617 @@ +# Copyright (c) 2013-2015, Loïc Hoguin +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +.PHONY: all app deps search rel docs install-docs check tests clean distclean help erlang-mk + +ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) + +ERLANG_MK_VERSION = 2.0.0-pre.2-16-gb52203c-dirty + +# Core configuration. + +PROJECT ?= $(notdir $(CURDIR)) +PROJECT := $(strip $(PROJECT)) + +PROJECT_VERSION ?= rolling + +# Verbosity. + +V ?= 0 + +verbose_0 = @ +verbose_2 = set -x; +verbose = $(verbose_$(V)) + +gen_verbose_0 = @echo " GEN " $@; +gen_verbose_2 = set -x; +gen_verbose = $(gen_verbose_$(V)) + +# Temporary files directory. + +ERLANG_MK_TMP ?= $(CURDIR)/.erlang.mk +export ERLANG_MK_TMP + +# "erl" command. + +ERL = erl +A0 -noinput -boot start_clean + +# Platform detection. + +ifeq ($(PLATFORM),) +UNAME_S := $(shell uname -s) + +ifeq ($(UNAME_S),Linux) +PLATFORM = linux +else ifeq ($(UNAME_S),Darwin) +PLATFORM = darwin +else ifeq ($(UNAME_S),SunOS) +PLATFORM = solaris +else ifeq ($(UNAME_S),GNU) +PLATFORM = gnu +else ifeq ($(UNAME_S),FreeBSD) +PLATFORM = freebsd +else ifeq ($(UNAME_S),NetBSD) +PLATFORM = netbsd +else ifeq ($(UNAME_S),OpenBSD) +PLATFORM = openbsd +else ifeq ($(UNAME_S),DragonFly) +PLATFORM = dragonfly +else ifeq ($(shell uname -o),Msys) +PLATFORM = msys2 +else +$(error Unable to detect platform. Please open a ticket with the output of uname -a.) +endif + +export PLATFORM +endif + +# Core targets. + +all:: deps app rel + +# Noop to avoid a Make warning when there's nothing to do. +rel:: + $(verbose) : + +check:: clean app tests + +clean:: clean-crashdump + +clean-crashdump: +ifneq ($(wildcard erl_crash.dump),) + $(gen_verbose) rm -f erl_crash.dump +endif + +distclean:: clean distclean-tmp + +distclean-tmp: + $(gen_verbose) rm -rf $(ERLANG_MK_TMP) + +help:: + $(verbose) printf "%s\n" \ + "erlang.mk (version $(ERLANG_MK_VERSION)) is distributed under the terms of the ISC License." \ + "Copyright (c) 2013-2015 Loïc Hoguin " \ + "" \ + "Usage: [V=1] $(MAKE) [target]..." \ + "" \ + "Core targets:" \ + " all Run deps, app and rel targets in that order" \ + " app Compile the project" \ + " deps Fetch dependencies (if needed) and compile them" \ + " fetch-deps Fetch dependencies (if needed) without compiling them" \ + " list-deps Fetch dependencies (if needed) and list them" \ + " search q=... Search for a package in the built-in index" \ + " rel Build a release for this project, if applicable" \ + " docs Build the documentation for this project" \ + " install-docs Install the man pages for this project" \ + " check Compile and run all tests and analysis for this project" \ + " tests Run the tests for this project" \ + " clean Delete temporary and output files from most targets" \ + " distclean Delete all temporary and output files" \ + " help Display this help and exit" \ + " erlang-mk Update erlang.mk to the latest version" + +# Core functions. + +empty := +space := $(empty) $(empty) +tab := $(empty) $(empty) +comma := , + +define newline + + +endef + +define comma_list +$(subst $(space),$(comma),$(strip $(1))) +endef + +# Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. +define erlang +$(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk +endef + +ifeq ($(PLATFORM),msys2) +core_native_path = $(subst \,\\\\,$(shell cygpath -w $1)) +else +core_native_path = $1 +endif + +ifeq ($(shell which wget 2>/dev/null | wc -l), 1) +define core_http_get + wget --no-check-certificate -O $(1) $(2)|| rm $(1) +endef +else +define core_http_get.erl + ssl:start(), + inets:start(), + case httpc:request(get, {"$(2)", []}, [{autoredirect, true}], []) of + {ok, {{_, 200, _}, _, Body}} -> + case file:write_file("$(1)", Body) of + ok -> ok; + {error, R1} -> halt(R1) + end; + {error, R2} -> + halt(R2) + end, + halt(0). +endef + +define core_http_get + $(call erlang,$(call core_http_get.erl,$(call core_native_path,$1),$2)) +endef +endif + +core_eq = $(and $(findstring $(1),$(2)),$(findstring $(2),$(1))) + +core_find = $(if $(wildcard $1),$(shell find $(1:%/=%) -type f -name $(subst *,\*,$2))) + +core_lc = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$(1))))))))))))))))))))))))))) + +core_ls = $(filter-out $(1),$(shell echo $(1))) + +# @todo Use a solution that does not require using perl. +core_relpath = $(shell perl -e 'use File::Spec; print File::Spec->abs2rel(@ARGV) . "\n"' $1 $2) + +# Automated update. + +ERLANG_MK_REPO ?= https://github.com/ninenines/erlang.mk +ERLANG_MK_COMMIT ?= +ERLANG_MK_BUILD_CONFIG ?= build.config +ERLANG_MK_BUILD_DIR ?= .erlang.mk.build + +erlang-mk: + git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) +ifdef ERLANG_MK_COMMIT + cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) +endif + if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi + $(MAKE) -C $(ERLANG_MK_BUILD_DIR) + cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk + rm -rf $(ERLANG_MK_BUILD_DIR) + +# The erlang.mk package index is bundled in the default erlang.mk build. +# Search for the string "copyright" to skip to the rest of the code. + +PACKAGES += aberth +pkg_aberth_name = aberth +pkg_aberth_description = Generic BERT-RPC server in Erlang +pkg_aberth_homepage = https://github.com/a13x/aberth +pkg_aberth_fetch = git +pkg_aberth_repo = https://github.com/a13x/aberth +pkg_aberth_commit = master + +PACKAGES += active +pkg_active_name = active +pkg_active_description = Active development for Erlang: rebuild and reload source/binary files while the VM is running +pkg_active_homepage = https://github.com/proger/active +pkg_active_fetch = git +pkg_active_repo = https://github.com/proger/active +pkg_active_commit = master + +PACKAGES += actordb_core +pkg_actordb_core_name = actordb_core +pkg_actordb_core_description = ActorDB main source +pkg_actordb_core_homepage = http://www.actordb.com/ +pkg_actordb_core_fetch = git +pkg_actordb_core_repo = https://github.com/biokoda/actordb_core +pkg_actordb_core_commit = master + +PACKAGES += actordb_thrift +pkg_actordb_thrift_name = actordb_thrift +pkg_actordb_thrift_description = Thrift API for ActorDB +pkg_actordb_thrift_homepage = http://www.actordb.com/ +pkg_actordb_thrift_fetch = git +pkg_actordb_thrift_repo = https://github.com/biokoda/actordb_thrift +pkg_actordb_thrift_commit = master + +PACKAGES += aleppo +pkg_aleppo_name = aleppo +pkg_aleppo_description = Alternative Erlang Pre-Processor +pkg_aleppo_homepage = https://github.com/ErlyORM/aleppo +pkg_aleppo_fetch = git +pkg_aleppo_repo = https://github.com/ErlyORM/aleppo +pkg_aleppo_commit = master + +PACKAGES += alog +pkg_alog_name = alog +pkg_alog_description = Simply the best logging framework for Erlang +pkg_alog_homepage = https://github.com/siberian-fast-food/alogger +pkg_alog_fetch = git +pkg_alog_repo = https://github.com/siberian-fast-food/alogger +pkg_alog_commit = master + +PACKAGES += amqp_client +pkg_amqp_client_name = amqp_client +pkg_amqp_client_description = RabbitMQ Erlang AMQP client +pkg_amqp_client_homepage = https://www.rabbitmq.com/erlang-client-user-guide.html +pkg_amqp_client_fetch = git +pkg_amqp_client_repo = https://github.com/rabbitmq/rabbitmq-erlang-client.git +pkg_amqp_client_commit = master + +PACKAGES += annotations +pkg_annotations_name = annotations +pkg_annotations_description = Simple code instrumentation utilities +pkg_annotations_homepage = https://github.com/hyperthunk/annotations +pkg_annotations_fetch = git +pkg_annotations_repo = https://github.com/hyperthunk/annotations +pkg_annotations_commit = master + +PACKAGES += antidote +pkg_antidote_name = antidote +pkg_antidote_description = Large-scale computation without synchronisation +pkg_antidote_homepage = https://syncfree.lip6.fr/ +pkg_antidote_fetch = git +pkg_antidote_repo = https://github.com/SyncFree/antidote +pkg_antidote_commit = master + +PACKAGES += apns +pkg_apns_name = apns +pkg_apns_description = Apple Push Notification Server for Erlang +pkg_apns_homepage = http://inaka.github.com/apns4erl +pkg_apns_fetch = git +pkg_apns_repo = https://github.com/inaka/apns4erl +pkg_apns_commit = 1.0.4 + +PACKAGES += azdht +pkg_azdht_name = azdht +pkg_azdht_description = Azureus Distributed Hash Table (DHT) in Erlang +pkg_azdht_homepage = https://github.com/arcusfelis/azdht +pkg_azdht_fetch = git +pkg_azdht_repo = https://github.com/arcusfelis/azdht +pkg_azdht_commit = master + +PACKAGES += backoff +pkg_backoff_name = backoff +pkg_backoff_description = Simple exponential backoffs in Erlang +pkg_backoff_homepage = https://github.com/ferd/backoff +pkg_backoff_fetch = git +pkg_backoff_repo = https://github.com/ferd/backoff +pkg_backoff_commit = master + +PACKAGES += barrel_tcp +pkg_barrel_tcp_name = barrel_tcp +pkg_barrel_tcp_description = barrel is a generic TCP acceptor pool with low latency in Erlang. +pkg_barrel_tcp_homepage = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_fetch = git +pkg_barrel_tcp_repo = https://github.com/benoitc-attic/barrel_tcp +pkg_barrel_tcp_commit = master + +PACKAGES += basho_bench +pkg_basho_bench_name = basho_bench +pkg_basho_bench_description = A load-generation and testing tool for basically whatever you can write a returning Erlang function for. +pkg_basho_bench_homepage = https://github.com/basho/basho_bench +pkg_basho_bench_fetch = git +pkg_basho_bench_repo = https://github.com/basho/basho_bench +pkg_basho_bench_commit = master + +PACKAGES += bcrypt +pkg_bcrypt_name = bcrypt +pkg_bcrypt_description = Bcrypt Erlang / C library +pkg_bcrypt_homepage = https://github.com/riverrun/branglecrypt +pkg_bcrypt_fetch = git +pkg_bcrypt_repo = https://github.com/riverrun/branglecrypt +pkg_bcrypt_commit = master + +PACKAGES += beam +pkg_beam_name = beam +pkg_beam_description = BEAM emulator written in Erlang +pkg_beam_homepage = https://github.com/tonyrog/beam +pkg_beam_fetch = git +pkg_beam_repo = https://github.com/tonyrog/beam +pkg_beam_commit = master + +PACKAGES += beanstalk +pkg_beanstalk_name = beanstalk +pkg_beanstalk_description = An Erlang client for beanstalkd +pkg_beanstalk_homepage = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_fetch = git +pkg_beanstalk_repo = https://github.com/tim/erlang-beanstalk +pkg_beanstalk_commit = master + +PACKAGES += bear +pkg_bear_name = bear +pkg_bear_description = a set of statistics functions for erlang +pkg_bear_homepage = https://github.com/boundary/bear +pkg_bear_fetch = git +pkg_bear_repo = https://github.com/boundary/bear +pkg_bear_commit = master + +PACKAGES += bertconf +pkg_bertconf_name = bertconf +pkg_bertconf_description = Make ETS tables out of statc BERT files that are auto-reloaded +pkg_bertconf_homepage = https://github.com/ferd/bertconf +pkg_bertconf_fetch = git +pkg_bertconf_repo = https://github.com/ferd/bertconf +pkg_bertconf_commit = master + +PACKAGES += bifrost +pkg_bifrost_name = bifrost +pkg_bifrost_description = Erlang FTP Server Framework +pkg_bifrost_homepage = https://github.com/thorstadt/bifrost +pkg_bifrost_fetch = git +pkg_bifrost_repo = https://github.com/thorstadt/bifrost +pkg_bifrost_commit = master + +PACKAGES += binpp +pkg_binpp_name = binpp +pkg_binpp_description = Erlang Binary Pretty Printer +pkg_binpp_homepage = https://github.com/jtendo/binpp +pkg_binpp_fetch = git +pkg_binpp_repo = https://github.com/jtendo/binpp +pkg_binpp_commit = master + +PACKAGES += bisect +pkg_bisect_name = bisect +pkg_bisect_description = Ordered fixed-size binary dictionary in Erlang +pkg_bisect_homepage = https://github.com/knutin/bisect +pkg_bisect_fetch = git +pkg_bisect_repo = https://github.com/knutin/bisect +pkg_bisect_commit = master + +PACKAGES += bitcask +pkg_bitcask_name = bitcask +pkg_bitcask_description = because you need another a key/value storage engine +pkg_bitcask_homepage = https://github.com/basho/bitcask +pkg_bitcask_fetch = git +pkg_bitcask_repo = https://github.com/basho/bitcask +pkg_bitcask_commit = master + +PACKAGES += bitstore +pkg_bitstore_name = bitstore +pkg_bitstore_description = A document based ontology development environment +pkg_bitstore_homepage = https://github.com/bdionne/bitstore +pkg_bitstore_fetch = git +pkg_bitstore_repo = https://github.com/bdionne/bitstore +pkg_bitstore_commit = master + +PACKAGES += bootstrap +pkg_bootstrap_name = bootstrap +pkg_bootstrap_description = A simple, yet powerful Erlang cluster bootstrapping application. +pkg_bootstrap_homepage = https://github.com/schlagert/bootstrap +pkg_bootstrap_fetch = git +pkg_bootstrap_repo = https://github.com/schlagert/bootstrap +pkg_bootstrap_commit = master + +PACKAGES += boss +pkg_boss_name = boss +pkg_boss_description = Erlang web MVC, now featuring Comet +pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_fetch = git +pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss +pkg_boss_commit = master + +PACKAGES += boss_db +pkg_boss_db_name = boss_db +pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang +pkg_boss_db_homepage = https://github.com/ErlyORM/boss_db +pkg_boss_db_fetch = git +pkg_boss_db_repo = https://github.com/ErlyORM/boss_db +pkg_boss_db_commit = master + +PACKAGES += bson +pkg_bson_name = bson +pkg_bson_description = BSON documents in Erlang, see bsonspec.org +pkg_bson_homepage = https://github.com/comtihon/bson-erlang +pkg_bson_fetch = git +pkg_bson_repo = https://github.com/comtihon/bson-erlang +pkg_bson_commit = master + +PACKAGES += bullet +pkg_bullet_name = bullet +pkg_bullet_description = Simple, reliable, efficient streaming for Cowboy. +pkg_bullet_homepage = http://ninenines.eu +pkg_bullet_fetch = git +pkg_bullet_repo = https://github.com/ninenines/bullet +pkg_bullet_commit = master + +PACKAGES += cache +pkg_cache_name = cache +pkg_cache_description = Erlang in-memory cache +pkg_cache_homepage = https://github.com/fogfish/cache +pkg_cache_fetch = git +pkg_cache_repo = https://github.com/fogfish/cache +pkg_cache_commit = master + +PACKAGES += cake +pkg_cake_name = cake +pkg_cake_description = Really simple terminal colorization +pkg_cake_homepage = https://github.com/darach/cake-erl +pkg_cake_fetch = git +pkg_cake_repo = https://github.com/darach/cake-erl +pkg_cake_commit = v0.1.2 + +PACKAGES += carotene +pkg_carotene_name = carotene +pkg_carotene_description = Real-time server +pkg_carotene_homepage = https://github.com/carotene/carotene +pkg_carotene_fetch = git +pkg_carotene_repo = https://github.com/carotene/carotene +pkg_carotene_commit = master + +PACKAGES += cberl +pkg_cberl_name = cberl +pkg_cberl_description = NIF based Erlang bindings for Couchbase +pkg_cberl_homepage = https://github.com/chitika/cberl +pkg_cberl_fetch = git +pkg_cberl_repo = https://github.com/chitika/cberl +pkg_cberl_commit = master + +PACKAGES += cecho +pkg_cecho_name = cecho +pkg_cecho_description = An ncurses library for Erlang +pkg_cecho_homepage = https://github.com/mazenharake/cecho +pkg_cecho_fetch = git +pkg_cecho_repo = https://github.com/mazenharake/cecho +pkg_cecho_commit = master + +PACKAGES += cferl +pkg_cferl_name = cferl +pkg_cferl_description = Rackspace / Open Stack Cloud Files Erlang Client +pkg_cferl_homepage = https://github.com/ddossot/cferl +pkg_cferl_fetch = git +pkg_cferl_repo = https://github.com/ddossot/cferl +pkg_cferl_commit = master + +PACKAGES += chaos_monkey +pkg_chaos_monkey_name = chaos_monkey +pkg_chaos_monkey_description = This is The CHAOS MONKEY. It will kill your processes. +pkg_chaos_monkey_homepage = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_fetch = git +pkg_chaos_monkey_repo = https://github.com/dLuna/chaos_monkey +pkg_chaos_monkey_commit = master + +PACKAGES += check_node +pkg_check_node_name = check_node +pkg_check_node_description = Nagios Scripts for monitoring Riak +pkg_check_node_homepage = https://github.com/basho-labs/riak_nagios +pkg_check_node_fetch = git +pkg_check_node_repo = https://github.com/basho-labs/riak_nagios +pkg_check_node_commit = master + +PACKAGES += chronos +pkg_chronos_name = chronos +pkg_chronos_description = Timer module for Erlang that makes it easy to abstact time out of the tests. +pkg_chronos_homepage = https://github.com/lehoff/chronos +pkg_chronos_fetch = git +pkg_chronos_repo = https://github.com/lehoff/chronos +pkg_chronos_commit = master + +PACKAGES += cl +pkg_cl_name = cl +pkg_cl_description = OpenCL binding for Erlang +pkg_cl_homepage = https://github.com/tonyrog/cl +pkg_cl_fetch = git +pkg_cl_repo = https://github.com/tonyrog/cl +pkg_cl_commit = master + +PACKAGES += classifier +pkg_classifier_name = classifier +pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier +pkg_classifier_homepage = https://github.com/inaka/classifier +pkg_classifier_fetch = git +pkg_classifier_repo = https://github.com/inaka/classifier +pkg_classifier_commit = master + +PACKAGES += clique +pkg_clique_name = clique +pkg_clique_description = CLI Framework for Erlang +pkg_clique_homepage = https://github.com/basho/clique +pkg_clique_fetch = git +pkg_clique_repo = https://github.com/basho/clique +pkg_clique_commit = develop + +PACKAGES += cloudi_core +pkg_cloudi_core_name = cloudi_core +pkg_cloudi_core_description = CloudI internal service runtime +pkg_cloudi_core_homepage = http://cloudi.org/ +pkg_cloudi_core_fetch = git +pkg_cloudi_core_repo = https://github.com/CloudI/cloudi_core +pkg_cloudi_core_commit = master + +PACKAGES += cloudi_service_api_requests +pkg_cloudi_service_api_requests_name = cloudi_service_api_requests +pkg_cloudi_service_api_requests_description = CloudI Service API requests (JSON-RPC/Erlang-term support) +pkg_cloudi_service_api_requests_homepage = http://cloudi.org/ +pkg_cloudi_service_api_requests_fetch = git +pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests +pkg_cloudi_service_api_requests_commit = master + +PACKAGES += cloudi_service_db +pkg_cloudi_service_db_name = cloudi_service_db +pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) +pkg_cloudi_service_db_homepage = http://cloudi.org/ +pkg_cloudi_service_db_fetch = git +pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db +pkg_cloudi_service_db_commit = master + +PACKAGES += cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service +pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_fetch = git +pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra +pkg_cloudi_service_db_cassandra_commit = master + +PACKAGES += cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service +pkg_cloudi_service_db_cassandra_cql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_cassandra_cql_fetch = git +pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql +pkg_cloudi_service_db_cassandra_cql_commit = master + +PACKAGES += cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service +pkg_cloudi_service_db_couchdb_homepage = http://cloudi.org/ +pkg_cloudi_service_db_couchdb_fetch = git +pkg_cloudi_service_db_couchdb_repo = https://github.com/CloudI/cloudi_service_db_couchdb +pkg_cloudi_service_db_couchdb_commit = master + +PACKAGES += cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_name = cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_description = elasticsearch CloudI Service +pkg_cloudi_service_db_elasticsearch_homepage = http://cloudi.org/ +pkg_cloudi_service_db_elasticsearch_fetch = git +pkg_cloudi_service_db_elasticsearch_repo = https://github.com/CloudI/cloudi_service_db_elasticsearch +pkg_cloudi_service_db_elasticsearch_commit = master + +PACKAGES += cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_name = cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_description = memcached CloudI Service +pkg_cloudi_service_db_memcached_homepage = http://cloudi.org/ +pkg_cloudi_service_db_memcached_fetch = git +pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached +pkg_cloudi_service_db_memcached_commit = master + +PACKAGES += cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_description = MySQL CloudI Service +pkg_cloudi_service_db_mysql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_mysql_fetch = git +pkg_cloudi_service_db_mysql_repo = https://github.com/CloudI/cloudi_service_db_mysql +pkg_cloudi_service_db_mysql_commit = master + +PACKAGES += cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_name = cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_description = PostgreSQL CloudI Service +pkg_cloudi_service_db_pgsql_homepage = http://cloudi.org/ +pkg_cloudi_service_db_pgsql_fetch = git +pkg_cloudi_service_db_pgsql_repo = https://github.com/CloudI/cloudi_service_db_pgsql +pkg_cloudi_service_db_pgsql_commit = master + +PACKAGES += cloudi_service_db_riak +pkg_cloudi_service_db_riak_name = cloudi_service_db_riak +pkg_cloudi_service_db_riak_description = Riak CloudI Service +pkg_cloudi_service_db_riak_homepage = http://cloudi.org/ +pkg_cloudi_service_db_riak_fetch = git +pkg_cloudi_service_db_riak_repo = https://github.com/CloudI/cloudi_service_db_riak +pkg_cloudi_service_db_riak_commit = master + +PACKAGES += cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_name = cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_description = Tokyo Tyrant CloudI Service +pkg_cloudi_service_db_tokyotyrant_homepage = http://cloudi.org/ +pkg_cloudi_service_db_tokyotyrant_fetch = git +pkg_cloudi_service_db_tokyotyrant_repo = https://github.com/CloudI/cloudi_service_db_tokyotyrant +pkg_cloudi_service_db_tokyotyrant_commit = master + +PACKAGES += cloudi_service_filesystem +pkg_cloudi_service_filesystem_name = cloudi_service_filesystem +pkg_cloudi_service_filesystem_description = Filesystem CloudI Service +pkg_cloudi_service_filesystem_homepage = http://cloudi.org/ +pkg_cloudi_service_filesystem_fetch = git +pkg_cloudi_service_filesystem_repo = https://github.com/CloudI/cloudi_service_filesystem +pkg_cloudi_service_filesystem_commit = master + +PACKAGES += cloudi_service_http_client +pkg_cloudi_service_http_client_name = cloudi_service_http_client +pkg_cloudi_service_http_client_description = HTTP client CloudI Service +pkg_cloudi_service_http_client_homepage = http://cloudi.org/ +pkg_cloudi_service_http_client_fetch = git +pkg_cloudi_service_http_client_repo = https://github.com/CloudI/cloudi_service_http_client +pkg_cloudi_service_http_client_commit = master + +PACKAGES += cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_name = cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_description = cowboy HTTP/HTTPS CloudI Service +pkg_cloudi_service_http_cowboy_homepage = http://cloudi.org/ +pkg_cloudi_service_http_cowboy_fetch = git +pkg_cloudi_service_http_cowboy_repo = https://github.com/CloudI/cloudi_service_http_cowboy +pkg_cloudi_service_http_cowboy_commit = master + +PACKAGES += cloudi_service_http_elli +pkg_cloudi_service_http_elli_name = cloudi_service_http_elli +pkg_cloudi_service_http_elli_description = elli HTTP CloudI Service +pkg_cloudi_service_http_elli_homepage = http://cloudi.org/ +pkg_cloudi_service_http_elli_fetch = git +pkg_cloudi_service_http_elli_repo = https://github.com/CloudI/cloudi_service_http_elli +pkg_cloudi_service_http_elli_commit = master + +PACKAGES += cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_name = cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_description = Map/Reduce CloudI Service +pkg_cloudi_service_map_reduce_homepage = http://cloudi.org/ +pkg_cloudi_service_map_reduce_fetch = git +pkg_cloudi_service_map_reduce_repo = https://github.com/CloudI/cloudi_service_map_reduce +pkg_cloudi_service_map_reduce_commit = master + +PACKAGES += cloudi_service_oauth1 +pkg_cloudi_service_oauth1_name = cloudi_service_oauth1 +pkg_cloudi_service_oauth1_description = OAuth v1.0 CloudI Service +pkg_cloudi_service_oauth1_homepage = http://cloudi.org/ +pkg_cloudi_service_oauth1_fetch = git +pkg_cloudi_service_oauth1_repo = https://github.com/CloudI/cloudi_service_oauth1 +pkg_cloudi_service_oauth1_commit = master + +PACKAGES += cloudi_service_queue +pkg_cloudi_service_queue_name = cloudi_service_queue +pkg_cloudi_service_queue_description = Persistent Queue Service +pkg_cloudi_service_queue_homepage = http://cloudi.org/ +pkg_cloudi_service_queue_fetch = git +pkg_cloudi_service_queue_repo = https://github.com/CloudI/cloudi_service_queue +pkg_cloudi_service_queue_commit = master + +PACKAGES += cloudi_service_quorum +pkg_cloudi_service_quorum_name = cloudi_service_quorum +pkg_cloudi_service_quorum_description = CloudI Quorum Service +pkg_cloudi_service_quorum_homepage = http://cloudi.org/ +pkg_cloudi_service_quorum_fetch = git +pkg_cloudi_service_quorum_repo = https://github.com/CloudI/cloudi_service_quorum +pkg_cloudi_service_quorum_commit = master + +PACKAGES += cloudi_service_router +pkg_cloudi_service_router_name = cloudi_service_router +pkg_cloudi_service_router_description = CloudI Router Service +pkg_cloudi_service_router_homepage = http://cloudi.org/ +pkg_cloudi_service_router_fetch = git +pkg_cloudi_service_router_repo = https://github.com/CloudI/cloudi_service_router +pkg_cloudi_service_router_commit = master + +PACKAGES += cloudi_service_tcp +pkg_cloudi_service_tcp_name = cloudi_service_tcp +pkg_cloudi_service_tcp_description = TCP CloudI Service +pkg_cloudi_service_tcp_homepage = http://cloudi.org/ +pkg_cloudi_service_tcp_fetch = git +pkg_cloudi_service_tcp_repo = https://github.com/CloudI/cloudi_service_tcp +pkg_cloudi_service_tcp_commit = master + +PACKAGES += cloudi_service_timers +pkg_cloudi_service_timers_name = cloudi_service_timers +pkg_cloudi_service_timers_description = Timers CloudI Service +pkg_cloudi_service_timers_homepage = http://cloudi.org/ +pkg_cloudi_service_timers_fetch = git +pkg_cloudi_service_timers_repo = https://github.com/CloudI/cloudi_service_timers +pkg_cloudi_service_timers_commit = master + +PACKAGES += cloudi_service_udp +pkg_cloudi_service_udp_name = cloudi_service_udp +pkg_cloudi_service_udp_description = UDP CloudI Service +pkg_cloudi_service_udp_homepage = http://cloudi.org/ +pkg_cloudi_service_udp_fetch = git +pkg_cloudi_service_udp_repo = https://github.com/CloudI/cloudi_service_udp +pkg_cloudi_service_udp_commit = master + +PACKAGES += cloudi_service_validate +pkg_cloudi_service_validate_name = cloudi_service_validate +pkg_cloudi_service_validate_description = CloudI Validate Service +pkg_cloudi_service_validate_homepage = http://cloudi.org/ +pkg_cloudi_service_validate_fetch = git +pkg_cloudi_service_validate_repo = https://github.com/CloudI/cloudi_service_validate +pkg_cloudi_service_validate_commit = master + +PACKAGES += cloudi_service_zeromq +pkg_cloudi_service_zeromq_name = cloudi_service_zeromq +pkg_cloudi_service_zeromq_description = ZeroMQ CloudI Service +pkg_cloudi_service_zeromq_homepage = http://cloudi.org/ +pkg_cloudi_service_zeromq_fetch = git +pkg_cloudi_service_zeromq_repo = https://github.com/CloudI/cloudi_service_zeromq +pkg_cloudi_service_zeromq_commit = master + +PACKAGES += cluster_info +pkg_cluster_info_name = cluster_info +pkg_cluster_info_description = Fork of Hibari's nifty cluster_info OTP app +pkg_cluster_info_homepage = https://github.com/basho/cluster_info +pkg_cluster_info_fetch = git +pkg_cluster_info_repo = https://github.com/basho/cluster_info +pkg_cluster_info_commit = master + +PACKAGES += color +pkg_color_name = color +pkg_color_description = ANSI colors for your Erlang +pkg_color_homepage = https://github.com/julianduque/erlang-color +pkg_color_fetch = git +pkg_color_repo = https://github.com/julianduque/erlang-color +pkg_color_commit = master + +PACKAGES += confetti +pkg_confetti_name = confetti +pkg_confetti_description = Erlang configuration provider / application:get_env/2 on steroids +pkg_confetti_homepage = https://github.com/jtendo/confetti +pkg_confetti_fetch = git +pkg_confetti_repo = https://github.com/jtendo/confetti +pkg_confetti_commit = master + +PACKAGES += couchbeam +pkg_couchbeam_name = couchbeam +pkg_couchbeam_description = Apache CouchDB client in Erlang +pkg_couchbeam_homepage = https://github.com/benoitc/couchbeam +pkg_couchbeam_fetch = git +pkg_couchbeam_repo = https://github.com/benoitc/couchbeam +pkg_couchbeam_commit = master + +PACKAGES += covertool +pkg_covertool_name = covertool +pkg_covertool_description = Tool to convert Erlang cover data files into Cobertura XML reports +pkg_covertool_homepage = https://github.com/idubrov/covertool +pkg_covertool_fetch = git +pkg_covertool_repo = https://github.com/idubrov/covertool +pkg_covertool_commit = master + +PACKAGES += cowboy +pkg_cowboy_name = cowboy +pkg_cowboy_description = Small, fast and modular HTTP server. +pkg_cowboy_homepage = http://ninenines.eu +pkg_cowboy_fetch = git +pkg_cowboy_repo = https://github.com/ninenines/cowboy +pkg_cowboy_commit = 1.0.1 + +PACKAGES += cowdb +pkg_cowdb_name = cowdb +pkg_cowdb_description = Pure Key/Value database library for Erlang Applications +pkg_cowdb_homepage = https://github.com/refuge/cowdb +pkg_cowdb_fetch = git +pkg_cowdb_repo = https://github.com/refuge/cowdb +pkg_cowdb_commit = master + +PACKAGES += cowlib +pkg_cowlib_name = cowlib +pkg_cowlib_description = Support library for manipulating Web protocols. +pkg_cowlib_homepage = http://ninenines.eu +pkg_cowlib_fetch = git +pkg_cowlib_repo = https://github.com/ninenines/cowlib +pkg_cowlib_commit = 1.0.1 + +PACKAGES += cpg +pkg_cpg_name = cpg +pkg_cpg_description = CloudI Process Groups +pkg_cpg_homepage = https://github.com/okeuday/cpg +pkg_cpg_fetch = git +pkg_cpg_repo = https://github.com/okeuday/cpg +pkg_cpg_commit = master + +PACKAGES += cqerl +pkg_cqerl_name = cqerl +pkg_cqerl_description = Native Erlang CQL client for Cassandra +pkg_cqerl_homepage = https://matehat.github.io/cqerl/ +pkg_cqerl_fetch = git +pkg_cqerl_repo = https://github.com/matehat/cqerl +pkg_cqerl_commit = master + +PACKAGES += cr +pkg_cr_name = cr +pkg_cr_description = Chain Replication +pkg_cr_homepage = https://synrc.com/apps/cr/doc/cr.htm +pkg_cr_fetch = git +pkg_cr_repo = https://github.com/spawnproc/cr +pkg_cr_commit = master + +PACKAGES += cuttlefish +pkg_cuttlefish_name = cuttlefish +pkg_cuttlefish_description = never lose your childlike sense of wonder baby cuttlefish, promise me? +pkg_cuttlefish_homepage = https://github.com/basho/cuttlefish +pkg_cuttlefish_fetch = git +pkg_cuttlefish_repo = https://github.com/basho/cuttlefish +pkg_cuttlefish_commit = master + +PACKAGES += damocles +pkg_damocles_name = damocles +pkg_damocles_description = Erlang library for generating adversarial network conditions for QAing distributed applications/systems on a single Linux box. +pkg_damocles_homepage = https://github.com/lostcolony/damocles +pkg_damocles_fetch = git +pkg_damocles_repo = https://github.com/lostcolony/damocles +pkg_damocles_commit = master + +PACKAGES += debbie +pkg_debbie_name = debbie +pkg_debbie_description = .DEB Built In Erlang +pkg_debbie_homepage = https://github.com/crownedgrouse/debbie +pkg_debbie_fetch = git +pkg_debbie_repo = https://github.com/crownedgrouse/debbie +pkg_debbie_commit = master + +PACKAGES += decimal +pkg_decimal_name = decimal +pkg_decimal_description = An Erlang decimal arithmetic library +pkg_decimal_homepage = https://github.com/tim/erlang-decimal +pkg_decimal_fetch = git +pkg_decimal_repo = https://github.com/tim/erlang-decimal +pkg_decimal_commit = master + +PACKAGES += detergent +pkg_detergent_name = detergent +pkg_detergent_description = An emulsifying Erlang SOAP library +pkg_detergent_homepage = https://github.com/devinus/detergent +pkg_detergent_fetch = git +pkg_detergent_repo = https://github.com/devinus/detergent +pkg_detergent_commit = master + +PACKAGES += detest +pkg_detest_name = detest +pkg_detest_description = Tool for running tests on a cluster of erlang nodes +pkg_detest_homepage = https://github.com/biokoda/detest +pkg_detest_fetch = git +pkg_detest_repo = https://github.com/biokoda/detest +pkg_detest_commit = master + +PACKAGES += dh_date +pkg_dh_date_name = dh_date +pkg_dh_date_description = Date formatting / parsing library for erlang +pkg_dh_date_homepage = https://github.com/daleharvey/dh_date +pkg_dh_date_fetch = git +pkg_dh_date_repo = https://github.com/daleharvey/dh_date +pkg_dh_date_commit = master + +PACKAGES += dhtcrawler +pkg_dhtcrawler_name = dhtcrawler +pkg_dhtcrawler_description = dhtcrawler is a DHT crawler written in erlang. It can join a DHT network and crawl many P2P torrents. +pkg_dhtcrawler_homepage = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_fetch = git +pkg_dhtcrawler_repo = https://github.com/kevinlynx/dhtcrawler +pkg_dhtcrawler_commit = master + +PACKAGES += dirbusterl +pkg_dirbusterl_name = dirbusterl +pkg_dirbusterl_description = DirBuster successor in Erlang +pkg_dirbusterl_homepage = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_fetch = git +pkg_dirbusterl_repo = https://github.com/silentsignal/DirBustErl +pkg_dirbusterl_commit = master + +PACKAGES += dispcount +pkg_dispcount_name = dispcount +pkg_dispcount_description = Erlang task dispatcher based on ETS counters. +pkg_dispcount_homepage = https://github.com/ferd/dispcount +pkg_dispcount_fetch = git +pkg_dispcount_repo = https://github.com/ferd/dispcount +pkg_dispcount_commit = master + +PACKAGES += dlhttpc +pkg_dlhttpc_name = dlhttpc +pkg_dlhttpc_description = dispcount-based lhttpc fork for massive amounts of requests to limited endpoints +pkg_dlhttpc_homepage = https://github.com/ferd/dlhttpc +pkg_dlhttpc_fetch = git +pkg_dlhttpc_repo = https://github.com/ferd/dlhttpc +pkg_dlhttpc_commit = master + +PACKAGES += dns +pkg_dns_name = dns +pkg_dns_description = Erlang DNS library +pkg_dns_homepage = https://github.com/aetrion/dns_erlang +pkg_dns_fetch = git +pkg_dns_repo = https://github.com/aetrion/dns_erlang +pkg_dns_commit = master + +PACKAGES += dnssd +pkg_dnssd_name = dnssd +pkg_dnssd_description = Erlang interface to Apple's Bonjour D NS Service Discovery implementation +pkg_dnssd_homepage = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_fetch = git +pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang +pkg_dnssd_commit = master + +PACKAGES += dtl +pkg_dtl_name = dtl +pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang. +pkg_dtl_homepage = https://github.com/oinksoft/dtl +pkg_dtl_fetch = git +pkg_dtl_repo = https://github.com/oinksoft/dtl +pkg_dtl_commit = master + +PACKAGES += dynamic_compile +pkg_dynamic_compile_name = dynamic_compile +pkg_dynamic_compile_description = compile and load erlang modules from string input +pkg_dynamic_compile_homepage = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_fetch = git +pkg_dynamic_compile_repo = https://github.com/jkvor/dynamic_compile +pkg_dynamic_compile_commit = master + +PACKAGES += e2 +pkg_e2_name = e2 +pkg_e2_description = Library to simply writing correct OTP applications. +pkg_e2_homepage = http://e2project.org +pkg_e2_fetch = git +pkg_e2_repo = https://github.com/gar1t/e2 +pkg_e2_commit = master + +PACKAGES += eamf +pkg_eamf_name = eamf +pkg_eamf_description = eAMF provides Action Message Format (AMF) support for Erlang +pkg_eamf_homepage = https://github.com/mrinalwadhwa/eamf +pkg_eamf_fetch = git +pkg_eamf_repo = https://github.com/mrinalwadhwa/eamf +pkg_eamf_commit = master + +PACKAGES += eavro +pkg_eavro_name = eavro +pkg_eavro_description = Apache Avro encoder/decoder +pkg_eavro_homepage = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_fetch = git +pkg_eavro_repo = https://github.com/SIfoxDevTeam/eavro +pkg_eavro_commit = master + +PACKAGES += ecapnp +pkg_ecapnp_name = ecapnp +pkg_ecapnp_description = Cap'n Proto library for Erlang +pkg_ecapnp_homepage = https://github.com/kaos/ecapnp +pkg_ecapnp_fetch = git +pkg_ecapnp_repo = https://github.com/kaos/ecapnp +pkg_ecapnp_commit = master + +PACKAGES += econfig +pkg_econfig_name = econfig +pkg_econfig_description = simple Erlang config handler using INI files +pkg_econfig_homepage = https://github.com/benoitc/econfig +pkg_econfig_fetch = git +pkg_econfig_repo = https://github.com/benoitc/econfig +pkg_econfig_commit = master + +PACKAGES += edate +pkg_edate_name = edate +pkg_edate_description = date manipulation library for erlang +pkg_edate_homepage = https://github.com/dweldon/edate +pkg_edate_fetch = git +pkg_edate_repo = https://github.com/dweldon/edate +pkg_edate_commit = master + +PACKAGES += edgar +pkg_edgar_name = edgar +pkg_edgar_description = Erlang Does GNU AR +pkg_edgar_homepage = https://github.com/crownedgrouse/edgar +pkg_edgar_fetch = git +pkg_edgar_repo = https://github.com/crownedgrouse/edgar +pkg_edgar_commit = master + +PACKAGES += edis +pkg_edis_name = edis +pkg_edis_description = An Erlang implementation of Redis KV Store +pkg_edis_homepage = http://inaka.github.com/edis/ +pkg_edis_fetch = git +pkg_edis_repo = https://github.com/inaka/edis +pkg_edis_commit = master + +PACKAGES += edns +pkg_edns_name = edns +pkg_edns_description = Erlang/OTP DNS server +pkg_edns_homepage = https://github.com/hcvst/erlang-dns +pkg_edns_fetch = git +pkg_edns_repo = https://github.com/hcvst/erlang-dns +pkg_edns_commit = master + +PACKAGES += edown +pkg_edown_name = edown +pkg_edown_description = EDoc extension for generating Github-flavored Markdown +pkg_edown_homepage = https://github.com/uwiger/edown +pkg_edown_fetch = git +pkg_edown_repo = https://github.com/uwiger/edown +pkg_edown_commit = master + +PACKAGES += eep +pkg_eep_name = eep +pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy +pkg_eep_homepage = https://github.com/virtan/eep +pkg_eep_fetch = git +pkg_eep_repo = https://github.com/virtan/eep +pkg_eep_commit = master + +PACKAGES += eep_app +pkg_eep_app_name = eep_app +pkg_eep_app_description = Embedded Event Processing +pkg_eep_app_homepage = https://github.com/darach/eep-erl +pkg_eep_app_fetch = git +pkg_eep_app_repo = https://github.com/darach/eep-erl +pkg_eep_app_commit = master + +PACKAGES += efene +pkg_efene_name = efene +pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX +pkg_efene_homepage = https://github.com/efene/efene +pkg_efene_fetch = git +pkg_efene_repo = https://github.com/efene/efene +pkg_efene_commit = master + +PACKAGES += eganglia +pkg_eganglia_name = eganglia +pkg_eganglia_description = Erlang library to interact with Ganglia +pkg_eganglia_homepage = https://github.com/inaka/eganglia +pkg_eganglia_fetch = git +pkg_eganglia_repo = https://github.com/inaka/eganglia +pkg_eganglia_commit = v0.9.1 + +PACKAGES += egeoip +pkg_egeoip_name = egeoip +pkg_egeoip_description = Erlang IP Geolocation module, currently supporting the MaxMind GeoLite City Database. +pkg_egeoip_homepage = https://github.com/mochi/egeoip +pkg_egeoip_fetch = git +pkg_egeoip_repo = https://github.com/mochi/egeoip +pkg_egeoip_commit = master + +PACKAGES += ehsa +pkg_ehsa_name = ehsa +pkg_ehsa_description = Erlang HTTP server basic and digest authentication modules +pkg_ehsa_homepage = https://bitbucket.org/a12n/ehsa +pkg_ehsa_fetch = hg +pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa +pkg_ehsa_commit = 2.0.4 + +PACKAGES += ej +pkg_ej_name = ej +pkg_ej_description = Helper module for working with Erlang terms representing JSON +pkg_ej_homepage = https://github.com/seth/ej +pkg_ej_fetch = git +pkg_ej_repo = https://github.com/seth/ej +pkg_ej_commit = master + +PACKAGES += ejabberd +pkg_ejabberd_name = ejabberd +pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform +pkg_ejabberd_homepage = https://github.com/processone/ejabberd +pkg_ejabberd_fetch = git +pkg_ejabberd_repo = https://github.com/processone/ejabberd +pkg_ejabberd_commit = master + +PACKAGES += ejwt +pkg_ejwt_name = ejwt +pkg_ejwt_description = erlang library for JSON Web Token +pkg_ejwt_homepage = https://github.com/artefactop/ejwt +pkg_ejwt_fetch = git +pkg_ejwt_repo = https://github.com/artefactop/ejwt +pkg_ejwt_commit = master + +PACKAGES += ekaf +pkg_ekaf_name = ekaf +pkg_ekaf_description = A minimal, high-performance Kafka client in Erlang. +pkg_ekaf_homepage = https://github.com/helpshift/ekaf +pkg_ekaf_fetch = git +pkg_ekaf_repo = https://github.com/helpshift/ekaf +pkg_ekaf_commit = master + +PACKAGES += elarm +pkg_elarm_name = elarm +pkg_elarm_description = Alarm Manager for Erlang. +pkg_elarm_homepage = https://github.com/esl/elarm +pkg_elarm_fetch = git +pkg_elarm_repo = https://github.com/esl/elarm +pkg_elarm_commit = master + +PACKAGES += eleveldb +pkg_eleveldb_name = eleveldb +pkg_eleveldb_description = Erlang LevelDB API +pkg_eleveldb_homepage = https://github.com/basho/eleveldb +pkg_eleveldb_fetch = git +pkg_eleveldb_repo = https://github.com/basho/eleveldb +pkg_eleveldb_commit = master + +PACKAGES += elli +pkg_elli_name = elli +pkg_elli_description = Simple, robust and performant Erlang web server +pkg_elli_homepage = https://github.com/knutin/elli +pkg_elli_fetch = git +pkg_elli_repo = https://github.com/knutin/elli +pkg_elli_commit = master + +PACKAGES += elvis +pkg_elvis_name = elvis +pkg_elvis_description = Erlang Style Reviewer +pkg_elvis_homepage = https://github.com/inaka/elvis +pkg_elvis_fetch = git +pkg_elvis_repo = https://github.com/inaka/elvis +pkg_elvis_commit = 0.2.4 + +PACKAGES += emagick +pkg_emagick_name = emagick +pkg_emagick_description = Wrapper for Graphics/ImageMagick command line tool. +pkg_emagick_homepage = https://github.com/kivra/emagick +pkg_emagick_fetch = git +pkg_emagick_repo = https://github.com/kivra/emagick +pkg_emagick_commit = master + +PACKAGES += emysql +pkg_emysql_name = emysql +pkg_emysql_description = Stable, pure Erlang MySQL driver. +pkg_emysql_homepage = https://github.com/Eonblast/Emysql +pkg_emysql_fetch = git +pkg_emysql_repo = https://github.com/Eonblast/Emysql +pkg_emysql_commit = master + +PACKAGES += enm +pkg_enm_name = enm +pkg_enm_description = Erlang driver for nanomsg +pkg_enm_homepage = https://github.com/basho/enm +pkg_enm_fetch = git +pkg_enm_repo = https://github.com/basho/enm +pkg_enm_commit = master + +PACKAGES += entop +pkg_entop_name = entop +pkg_entop_description = A top-like tool for monitoring an Erlang node +pkg_entop_homepage = https://github.com/mazenharake/entop +pkg_entop_fetch = git +pkg_entop_repo = https://github.com/mazenharake/entop +pkg_entop_commit = master + +PACKAGES += epcap +pkg_epcap_name = epcap +pkg_epcap_description = Erlang packet capture interface using pcap +pkg_epcap_homepage = https://github.com/msantos/epcap +pkg_epcap_fetch = git +pkg_epcap_repo = https://github.com/msantos/epcap +pkg_epcap_commit = master + +PACKAGES += eper +pkg_eper_name = eper +pkg_eper_description = Erlang performance and debugging tools. +pkg_eper_homepage = https://github.com/massemanet/eper +pkg_eper_fetch = git +pkg_eper_repo = https://github.com/massemanet/eper +pkg_eper_commit = master + +PACKAGES += epgsql +pkg_epgsql_name = epgsql +pkg_epgsql_description = Erlang PostgreSQL client library. +pkg_epgsql_homepage = https://github.com/epgsql/epgsql +pkg_epgsql_fetch = git +pkg_epgsql_repo = https://github.com/epgsql/epgsql +pkg_epgsql_commit = master + +PACKAGES += episcina +pkg_episcina_name = episcina +pkg_episcina_description = A simple non intrusive resource pool for connections +pkg_episcina_homepage = https://github.com/erlware/episcina +pkg_episcina_fetch = git +pkg_episcina_repo = https://github.com/erlware/episcina +pkg_episcina_commit = master + +PACKAGES += eplot +pkg_eplot_name = eplot +pkg_eplot_description = A plot engine written in erlang. +pkg_eplot_homepage = https://github.com/psyeugenic/eplot +pkg_eplot_fetch = git +pkg_eplot_repo = https://github.com/psyeugenic/eplot +pkg_eplot_commit = master + +PACKAGES += epocxy +pkg_epocxy_name = epocxy +pkg_epocxy_description = Erlang Patterns of Concurrency +pkg_epocxy_homepage = https://github.com/duomark/epocxy +pkg_epocxy_fetch = git +pkg_epocxy_repo = https://github.com/duomark/epocxy +pkg_epocxy_commit = master + +PACKAGES += epubnub +pkg_epubnub_name = epubnub +pkg_epubnub_description = Erlang PubNub API +pkg_epubnub_homepage = https://github.com/tsloughter/epubnub +pkg_epubnub_fetch = git +pkg_epubnub_repo = https://github.com/tsloughter/epubnub +pkg_epubnub_commit = master + +PACKAGES += eqm +pkg_eqm_name = eqm +pkg_eqm_description = Erlang pub sub with supply-demand channels +pkg_eqm_homepage = https://github.com/loucash/eqm +pkg_eqm_fetch = git +pkg_eqm_repo = https://github.com/loucash/eqm +pkg_eqm_commit = master + +PACKAGES += eredis +pkg_eredis_name = eredis +pkg_eredis_description = Erlang Redis client +pkg_eredis_homepage = https://github.com/wooga/eredis +pkg_eredis_fetch = git +pkg_eredis_repo = https://github.com/wooga/eredis +pkg_eredis_commit = master + +PACKAGES += eredis_pool +pkg_eredis_pool_name = eredis_pool +pkg_eredis_pool_description = eredis_pool is Pool of Redis clients, using eredis and poolboy. +pkg_eredis_pool_homepage = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_fetch = git +pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool +pkg_eredis_pool_commit = master + +PACKAGES += erl_streams +pkg_erl_streams_name = erl_streams +pkg_erl_streams_description = Streams in Erlang +pkg_erl_streams_homepage = https://github.com/epappas/erl_streams +pkg_erl_streams_fetch = git +pkg_erl_streams_repo = https://github.com/epappas/erl_streams +pkg_erl_streams_commit = master + +PACKAGES += erlang_cep +pkg_erlang_cep_name = erlang_cep +pkg_erlang_cep_description = A basic CEP package written in erlang +pkg_erlang_cep_homepage = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_fetch = git +pkg_erlang_cep_repo = https://github.com/danmacklin/erlang_cep +pkg_erlang_cep_commit = master + +PACKAGES += erlang_js +pkg_erlang_js_name = erlang_js +pkg_erlang_js_description = A linked-in driver for Erlang to Mozilla's Spidermonkey Javascript runtime. +pkg_erlang_js_homepage = https://github.com/basho/erlang_js +pkg_erlang_js_fetch = git +pkg_erlang_js_repo = https://github.com/basho/erlang_js +pkg_erlang_js_commit = master + +PACKAGES += erlang_localtime +pkg_erlang_localtime_name = erlang_localtime +pkg_erlang_localtime_description = Erlang library for conversion from one local time to another +pkg_erlang_localtime_homepage = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_fetch = git +pkg_erlang_localtime_repo = https://github.com/dmitryme/erlang_localtime +pkg_erlang_localtime_commit = master + +PACKAGES += erlang_smtp +pkg_erlang_smtp_name = erlang_smtp +pkg_erlang_smtp_description = Erlang SMTP and POP3 server code. +pkg_erlang_smtp_homepage = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_fetch = git +pkg_erlang_smtp_repo = https://github.com/tonyg/erlang-smtp +pkg_erlang_smtp_commit = master + +PACKAGES += erlang_term +pkg_erlang_term_name = erlang_term +pkg_erlang_term_description = Erlang Term Info +pkg_erlang_term_homepage = https://github.com/okeuday/erlang_term +pkg_erlang_term_fetch = git +pkg_erlang_term_repo = https://github.com/okeuday/erlang_term +pkg_erlang_term_commit = master + +PACKAGES += erlastic_search +pkg_erlastic_search_name = erlastic_search +pkg_erlastic_search_description = An Erlang app for communicating with Elastic Search's rest interface. +pkg_erlastic_search_homepage = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_fetch = git +pkg_erlastic_search_repo = https://github.com/tsloughter/erlastic_search +pkg_erlastic_search_commit = master + +PACKAGES += erlasticsearch +pkg_erlasticsearch_name = erlasticsearch +pkg_erlasticsearch_description = Erlang thrift interface to elastic_search +pkg_erlasticsearch_homepage = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_fetch = git +pkg_erlasticsearch_repo = https://github.com/dieswaytoofast/erlasticsearch +pkg_erlasticsearch_commit = master + +PACKAGES += erlbrake +pkg_erlbrake_name = erlbrake +pkg_erlbrake_description = Erlang Airbrake notification client +pkg_erlbrake_homepage = https://github.com/kenpratt/erlbrake +pkg_erlbrake_fetch = git +pkg_erlbrake_repo = https://github.com/kenpratt/erlbrake +pkg_erlbrake_commit = master + +PACKAGES += erlcloud +pkg_erlcloud_name = erlcloud +pkg_erlcloud_description = Cloud Computing library for erlang (Amazon EC2, S3, SQS, SimpleDB, Mechanical Turk, ELB) +pkg_erlcloud_homepage = https://github.com/gleber/erlcloud +pkg_erlcloud_fetch = git +pkg_erlcloud_repo = https://github.com/gleber/erlcloud +pkg_erlcloud_commit = master + +PACKAGES += erlcron +pkg_erlcron_name = erlcron +pkg_erlcron_description = Erlang cronish system +pkg_erlcron_homepage = https://github.com/erlware/erlcron +pkg_erlcron_fetch = git +pkg_erlcron_repo = https://github.com/erlware/erlcron +pkg_erlcron_commit = master + +PACKAGES += erldb +pkg_erldb_name = erldb +pkg_erldb_description = ORM (Object-relational mapping) application implemented in Erlang +pkg_erldb_homepage = http://erldb.org +pkg_erldb_fetch = git +pkg_erldb_repo = https://github.com/erldb/erldb +pkg_erldb_commit = master + +PACKAGES += erldis +pkg_erldis_name = erldis +pkg_erldis_description = redis erlang client library +pkg_erldis_homepage = https://github.com/cstar/erldis +pkg_erldis_fetch = git +pkg_erldis_repo = https://github.com/cstar/erldis +pkg_erldis_commit = master + +PACKAGES += erldns +pkg_erldns_name = erldns +pkg_erldns_description = DNS server, in erlang. +pkg_erldns_homepage = https://github.com/aetrion/erl-dns +pkg_erldns_fetch = git +pkg_erldns_repo = https://github.com/aetrion/erl-dns +pkg_erldns_commit = master + +PACKAGES += erldocker +pkg_erldocker_name = erldocker +pkg_erldocker_description = Docker Remote API client for Erlang +pkg_erldocker_homepage = https://github.com/proger/erldocker +pkg_erldocker_fetch = git +pkg_erldocker_repo = https://github.com/proger/erldocker +pkg_erldocker_commit = master + +PACKAGES += erlfsmon +pkg_erlfsmon_name = erlfsmon +pkg_erlfsmon_description = Erlang filesystem event watcher for Linux and OSX +pkg_erlfsmon_homepage = https://github.com/proger/erlfsmon +pkg_erlfsmon_fetch = git +pkg_erlfsmon_repo = https://github.com/proger/erlfsmon +pkg_erlfsmon_commit = master + +PACKAGES += erlgit +pkg_erlgit_name = erlgit +pkg_erlgit_description = Erlang convenience wrapper around git executable +pkg_erlgit_homepage = https://github.com/gleber/erlgit +pkg_erlgit_fetch = git +pkg_erlgit_repo = https://github.com/gleber/erlgit +pkg_erlgit_commit = master + +PACKAGES += erlguten +pkg_erlguten_name = erlguten +pkg_erlguten_description = ErlGuten is a system for high-quality typesetting, written purely in Erlang. +pkg_erlguten_homepage = https://github.com/richcarl/erlguten +pkg_erlguten_fetch = git +pkg_erlguten_repo = https://github.com/richcarl/erlguten +pkg_erlguten_commit = master + +PACKAGES += erlmc +pkg_erlmc_name = erlmc +pkg_erlmc_description = Erlang memcached binary protocol client +pkg_erlmc_homepage = https://github.com/jkvor/erlmc +pkg_erlmc_fetch = git +pkg_erlmc_repo = https://github.com/jkvor/erlmc +pkg_erlmc_commit = master + +PACKAGES += erlmongo +pkg_erlmongo_name = erlmongo +pkg_erlmongo_description = Record based Erlang driver for MongoDB with gridfs support +pkg_erlmongo_homepage = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_fetch = git +pkg_erlmongo_repo = https://github.com/SergejJurecko/erlmongo +pkg_erlmongo_commit = master + +PACKAGES += erlog +pkg_erlog_name = erlog +pkg_erlog_description = Prolog interpreter in and for Erlang +pkg_erlog_homepage = https://github.com/rvirding/erlog +pkg_erlog_fetch = git +pkg_erlog_repo = https://github.com/rvirding/erlog +pkg_erlog_commit = master + +PACKAGES += erlpass +pkg_erlpass_name = erlpass +pkg_erlpass_description = A library to handle password hashing and changing in a safe manner, independent from any kind of storage whatsoever. +pkg_erlpass_homepage = https://github.com/ferd/erlpass +pkg_erlpass_fetch = git +pkg_erlpass_repo = https://github.com/ferd/erlpass +pkg_erlpass_commit = master + +PACKAGES += erlport +pkg_erlport_name = erlport +pkg_erlport_description = ErlPort - connect Erlang to other languages +pkg_erlport_homepage = https://github.com/hdima/erlport +pkg_erlport_fetch = git +pkg_erlport_repo = https://github.com/hdima/erlport +pkg_erlport_commit = master + +PACKAGES += erlsh +pkg_erlsh_name = erlsh +pkg_erlsh_description = Erlang shell tools +pkg_erlsh_homepage = https://github.com/proger/erlsh +pkg_erlsh_fetch = git +pkg_erlsh_repo = https://github.com/proger/erlsh +pkg_erlsh_commit = master + +PACKAGES += erlsha2 +pkg_erlsha2_name = erlsha2 +pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs. +pkg_erlsha2_homepage = https://github.com/vinoski/erlsha2 +pkg_erlsha2_fetch = git +pkg_erlsha2_repo = https://github.com/vinoski/erlsha2 +pkg_erlsha2_commit = master + +PACKAGES += erlsom +pkg_erlsom_name = erlsom +pkg_erlsom_description = XML parser for Erlang +pkg_erlsom_homepage = https://github.com/willemdj/erlsom +pkg_erlsom_fetch = git +pkg_erlsom_repo = https://github.com/willemdj/erlsom +pkg_erlsom_commit = master + +PACKAGES += erlubi +pkg_erlubi_name = erlubi +pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer) +pkg_erlubi_homepage = https://github.com/krestenkrab/erlubi +pkg_erlubi_fetch = git +pkg_erlubi_repo = https://github.com/krestenkrab/erlubi +pkg_erlubi_commit = master + +PACKAGES += erlvolt +pkg_erlvolt_name = erlvolt +pkg_erlvolt_description = VoltDB Erlang Client Driver +pkg_erlvolt_homepage = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_fetch = git +pkg_erlvolt_repo = https://github.com/VoltDB/voltdb-client-erlang +pkg_erlvolt_commit = master + +PACKAGES += erlware_commons +pkg_erlware_commons_name = erlware_commons +pkg_erlware_commons_description = Erlware Commons is an Erlware project focused on all aspects of reusable Erlang components. +pkg_erlware_commons_homepage = https://github.com/erlware/erlware_commons +pkg_erlware_commons_fetch = git +pkg_erlware_commons_repo = https://github.com/erlware/erlware_commons +pkg_erlware_commons_commit = master + +PACKAGES += erlydtl +pkg_erlydtl_name = erlydtl +pkg_erlydtl_description = Django Template Language for Erlang. +pkg_erlydtl_homepage = https://github.com/erlydtl/erlydtl +pkg_erlydtl_fetch = git +pkg_erlydtl_repo = https://github.com/erlydtl/erlydtl +pkg_erlydtl_commit = master + +PACKAGES += errd +pkg_errd_name = errd +pkg_errd_description = Erlang RRDTool library +pkg_errd_homepage = https://github.com/archaelus/errd +pkg_errd_fetch = git +pkg_errd_repo = https://github.com/archaelus/errd +pkg_errd_commit = master + +PACKAGES += erserve +pkg_erserve_name = erserve +pkg_erserve_description = Erlang/Rserve communication interface +pkg_erserve_homepage = https://github.com/del/erserve +pkg_erserve_fetch = git +pkg_erserve_repo = https://github.com/del/erserve +pkg_erserve_commit = master + +PACKAGES += erwa +pkg_erwa_name = erwa +pkg_erwa_description = A WAMP router and client written in Erlang. +pkg_erwa_homepage = https://github.com/bwegh/erwa +pkg_erwa_fetch = git +pkg_erwa_repo = https://github.com/bwegh/erwa +pkg_erwa_commit = 0.1.1 + +PACKAGES += espec +pkg_espec_name = espec +pkg_espec_description = ESpec: Behaviour driven development framework for Erlang +pkg_espec_homepage = https://github.com/lucaspiller/espec +pkg_espec_fetch = git +pkg_espec_repo = https://github.com/lucaspiller/espec +pkg_espec_commit = master + +PACKAGES += estatsd +pkg_estatsd_name = estatsd +pkg_estatsd_description = Erlang stats aggregation app that periodically flushes data to graphite +pkg_estatsd_homepage = https://github.com/RJ/estatsd +pkg_estatsd_fetch = git +pkg_estatsd_repo = https://github.com/RJ/estatsd +pkg_estatsd_commit = master + +PACKAGES += etap +pkg_etap_name = etap +pkg_etap_description = etap is a simple erlang testing library that provides TAP compliant output. +pkg_etap_homepage = https://github.com/ngerakines/etap +pkg_etap_fetch = git +pkg_etap_repo = https://github.com/ngerakines/etap +pkg_etap_commit = master + +PACKAGES += etest +pkg_etest_name = etest +pkg_etest_description = A lightweight, convention over configuration test framework for Erlang +pkg_etest_homepage = https://github.com/wooga/etest +pkg_etest_fetch = git +pkg_etest_repo = https://github.com/wooga/etest +pkg_etest_commit = master + +PACKAGES += etest_http +pkg_etest_http_name = etest_http +pkg_etest_http_description = etest Assertions around HTTP (client-side) +pkg_etest_http_homepage = https://github.com/wooga/etest_http +pkg_etest_http_fetch = git +pkg_etest_http_repo = https://github.com/wooga/etest_http +pkg_etest_http_commit = master + +PACKAGES += etoml +pkg_etoml_name = etoml +pkg_etoml_description = TOML language erlang parser +pkg_etoml_homepage = https://github.com/kalta/etoml +pkg_etoml_fetch = git +pkg_etoml_repo = https://github.com/kalta/etoml +pkg_etoml_commit = master + +PACKAGES += eunit +pkg_eunit_name = eunit +pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. +pkg_eunit_homepage = https://github.com/richcarl/eunit +pkg_eunit_fetch = git +pkg_eunit_repo = https://github.com/richcarl/eunit +pkg_eunit_commit = master + +PACKAGES += eunit_formatters +pkg_eunit_formatters_name = eunit_formatters +pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better. +pkg_eunit_formatters_homepage = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_fetch = git +pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters +pkg_eunit_formatters_commit = master + +PACKAGES += euthanasia +pkg_euthanasia_name = euthanasia +pkg_euthanasia_description = Merciful killer for your Erlang processes +pkg_euthanasia_homepage = https://github.com/doubleyou/euthanasia +pkg_euthanasia_fetch = git +pkg_euthanasia_repo = https://github.com/doubleyou/euthanasia +pkg_euthanasia_commit = master + +PACKAGES += evum +pkg_evum_name = evum +pkg_evum_description = Spawn Linux VMs as Erlang processes in the Erlang VM +pkg_evum_homepage = https://github.com/msantos/evum +pkg_evum_fetch = git +pkg_evum_repo = https://github.com/msantos/evum +pkg_evum_commit = master + +PACKAGES += exec +pkg_exec_name = exec +pkg_exec_description = Execute and control OS processes from Erlang/OTP. +pkg_exec_homepage = http://saleyn.github.com/erlexec +pkg_exec_fetch = git +pkg_exec_repo = https://github.com/saleyn/erlexec +pkg_exec_commit = master + +PACKAGES += exml +pkg_exml_name = exml +pkg_exml_description = XML parsing library in Erlang +pkg_exml_homepage = https://github.com/paulgray/exml +pkg_exml_fetch = git +pkg_exml_repo = https://github.com/paulgray/exml +pkg_exml_commit = master + +PACKAGES += exometer +pkg_exometer_name = exometer +pkg_exometer_description = Basic measurement objects and probe behavior +pkg_exometer_homepage = https://github.com/Feuerlabs/exometer +pkg_exometer_fetch = git +pkg_exometer_repo = https://github.com/Feuerlabs/exometer +pkg_exometer_commit = 1.2 + +PACKAGES += exs1024 +pkg_exs1024_name = exs1024 +pkg_exs1024_description = Xorshift1024star pseudo random number generator for Erlang. +pkg_exs1024_homepage = https://github.com/jj1bdx/exs1024 +pkg_exs1024_fetch = git +pkg_exs1024_repo = https://github.com/jj1bdx/exs1024 +pkg_exs1024_commit = master + +PACKAGES += exs64 +pkg_exs64_name = exs64 +pkg_exs64_description = Xorshift64star pseudo random number generator for Erlang. +pkg_exs64_homepage = https://github.com/jj1bdx/exs64 +pkg_exs64_fetch = git +pkg_exs64_repo = https://github.com/jj1bdx/exs64 +pkg_exs64_commit = master + +PACKAGES += exsplus116 +pkg_exsplus116_name = exsplus116 +pkg_exsplus116_description = Xorshift116plus for Erlang +pkg_exsplus116_homepage = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_fetch = git +pkg_exsplus116_repo = https://github.com/jj1bdx/exsplus116 +pkg_exsplus116_commit = master + +PACKAGES += exsplus128 +pkg_exsplus128_name = exsplus128 +pkg_exsplus128_description = Xorshift128plus pseudo random number generator for Erlang. +pkg_exsplus128_homepage = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_fetch = git +pkg_exsplus128_repo = https://github.com/jj1bdx/exsplus128 +pkg_exsplus128_commit = master + +PACKAGES += ezmq +pkg_ezmq_name = ezmq +pkg_ezmq_description = zMQ implemented in Erlang +pkg_ezmq_homepage = https://github.com/RoadRunnr/ezmq +pkg_ezmq_fetch = git +pkg_ezmq_repo = https://github.com/RoadRunnr/ezmq +pkg_ezmq_commit = master + +PACKAGES += ezmtp +pkg_ezmtp_name = ezmtp +pkg_ezmtp_description = ZMTP protocol in pure Erlang. +pkg_ezmtp_homepage = https://github.com/a13x/ezmtp +pkg_ezmtp_fetch = git +pkg_ezmtp_repo = https://github.com/a13x/ezmtp +pkg_ezmtp_commit = master + +PACKAGES += fast_disk_log +pkg_fast_disk_log_name = fast_disk_log +pkg_fast_disk_log_description = Pool-based asynchronous Erlang disk logger +pkg_fast_disk_log_homepage = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_fetch = git +pkg_fast_disk_log_repo = https://github.com/lpgauth/fast_disk_log +pkg_fast_disk_log_commit = master + +PACKAGES += feeder +pkg_feeder_name = feeder +pkg_feeder_description = Stream parse RSS and Atom formatted XML feeds. +pkg_feeder_homepage = https://github.com/michaelnisi/feeder +pkg_feeder_fetch = git +pkg_feeder_repo = https://github.com/michaelnisi/feeder +pkg_feeder_commit = v1.4.6 + +PACKAGES += fix +pkg_fix_name = fix +pkg_fix_description = http://fixprotocol.org/ implementation. +pkg_fix_homepage = https://github.com/maxlapshin/fix +pkg_fix_fetch = git +pkg_fix_repo = https://github.com/maxlapshin/fix +pkg_fix_commit = master + +PACKAGES += flower +pkg_flower_name = flower +pkg_flower_description = FlowER - a Erlang OpenFlow development platform +pkg_flower_homepage = https://github.com/travelping/flower +pkg_flower_fetch = git +pkg_flower_repo = https://github.com/travelping/flower +pkg_flower_commit = master + +PACKAGES += fn +pkg_fn_name = fn +pkg_fn_description = Function utilities for Erlang +pkg_fn_homepage = https://github.com/reiddraper/fn +pkg_fn_fetch = git +pkg_fn_repo = https://github.com/reiddraper/fn +pkg_fn_commit = master + +PACKAGES += folsom +pkg_folsom_name = folsom +pkg_folsom_description = Expose Erlang Events and Metrics +pkg_folsom_homepage = https://github.com/boundary/folsom +pkg_folsom_fetch = git +pkg_folsom_repo = https://github.com/boundary/folsom +pkg_folsom_commit = master + +PACKAGES += folsom_cowboy +pkg_folsom_cowboy_name = folsom_cowboy +pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper. +pkg_folsom_cowboy_homepage = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_fetch = git +pkg_folsom_cowboy_repo = https://github.com/boundary/folsom_cowboy +pkg_folsom_cowboy_commit = master + +PACKAGES += folsomite +pkg_folsomite_name = folsomite +pkg_folsomite_description = blow up your graphite / riemann server with folsom metrics +pkg_folsomite_homepage = https://github.com/campanja/folsomite +pkg_folsomite_fetch = git +pkg_folsomite_repo = https://github.com/campanja/folsomite +pkg_folsomite_commit = master + +PACKAGES += fs +pkg_fs_name = fs +pkg_fs_description = Erlang FileSystem Listener +pkg_fs_homepage = https://github.com/synrc/fs +pkg_fs_fetch = git +pkg_fs_repo = https://github.com/synrc/fs +pkg_fs_commit = master + +PACKAGES += fuse +pkg_fuse_name = fuse +pkg_fuse_description = A Circuit Breaker for Erlang +pkg_fuse_homepage = https://github.com/jlouis/fuse +pkg_fuse_fetch = git +pkg_fuse_repo = https://github.com/jlouis/fuse +pkg_fuse_commit = master + +PACKAGES += gcm +pkg_gcm_name = gcm +pkg_gcm_description = An Erlang application for Google Cloud Messaging +pkg_gcm_homepage = https://github.com/pdincau/gcm-erlang +pkg_gcm_fetch = git +pkg_gcm_repo = https://github.com/pdincau/gcm-erlang +pkg_gcm_commit = master + +PACKAGES += gcprof +pkg_gcprof_name = gcprof +pkg_gcprof_description = Garbage Collection profiler for Erlang +pkg_gcprof_homepage = https://github.com/knutin/gcprof +pkg_gcprof_fetch = git +pkg_gcprof_repo = https://github.com/knutin/gcprof +pkg_gcprof_commit = master + +PACKAGES += geas +pkg_geas_name = geas +pkg_geas_description = Guess Erlang Application Scattering +pkg_geas_homepage = https://github.com/crownedgrouse/geas +pkg_geas_fetch = git +pkg_geas_repo = https://github.com/crownedgrouse/geas +pkg_geas_commit = master + +PACKAGES += geef +pkg_geef_name = geef +pkg_geef_description = Git NEEEEF (Erlang NIF) +pkg_geef_homepage = https://github.com/carlosmn/geef +pkg_geef_fetch = git +pkg_geef_repo = https://github.com/carlosmn/geef +pkg_geef_commit = master + +PACKAGES += gen_cycle +pkg_gen_cycle_name = gen_cycle +pkg_gen_cycle_description = Simple, generic OTP behaviour for recurring tasks +pkg_gen_cycle_homepage = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_fetch = git +pkg_gen_cycle_repo = https://github.com/aerosol/gen_cycle +pkg_gen_cycle_commit = develop + +PACKAGES += gen_icmp +pkg_gen_icmp_name = gen_icmp +pkg_gen_icmp_description = Erlang interface to ICMP sockets +pkg_gen_icmp_homepage = https://github.com/msantos/gen_icmp +pkg_gen_icmp_fetch = git +pkg_gen_icmp_repo = https://github.com/msantos/gen_icmp +pkg_gen_icmp_commit = master + +PACKAGES += gen_nb_server +pkg_gen_nb_server_name = gen_nb_server +pkg_gen_nb_server_description = OTP behavior for writing non-blocking servers +pkg_gen_nb_server_homepage = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_fetch = git +pkg_gen_nb_server_repo = https://github.com/kevsmith/gen_nb_server +pkg_gen_nb_server_commit = master + +PACKAGES += gen_paxos +pkg_gen_paxos_name = gen_paxos +pkg_gen_paxos_description = An Erlang/OTP-style implementation of the PAXOS distributed consensus protocol +pkg_gen_paxos_homepage = https://github.com/gburd/gen_paxos +pkg_gen_paxos_fetch = git +pkg_gen_paxos_repo = https://github.com/gburd/gen_paxos +pkg_gen_paxos_commit = master + +PACKAGES += gen_smtp +pkg_gen_smtp_name = gen_smtp +pkg_gen_smtp_description = A generic Erlang SMTP server and client that can be extended via callback modules +pkg_gen_smtp_homepage = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_fetch = git +pkg_gen_smtp_repo = https://github.com/Vagabond/gen_smtp +pkg_gen_smtp_commit = master + +PACKAGES += gen_tracker +pkg_gen_tracker_name = gen_tracker +pkg_gen_tracker_description = supervisor with ets handling of children and their metadata +pkg_gen_tracker_homepage = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_fetch = git +pkg_gen_tracker_repo = https://github.com/erlyvideo/gen_tracker +pkg_gen_tracker_commit = master + +PACKAGES += gen_unix +pkg_gen_unix_name = gen_unix +pkg_gen_unix_description = Erlang Unix socket interface +pkg_gen_unix_homepage = https://github.com/msantos/gen_unix +pkg_gen_unix_fetch = git +pkg_gen_unix_repo = https://github.com/msantos/gen_unix +pkg_gen_unix_commit = master + +PACKAGES += getopt +pkg_getopt_name = getopt +pkg_getopt_description = Module to parse command line arguments using the GNU getopt syntax +pkg_getopt_homepage = https://github.com/jcomellas/getopt +pkg_getopt_fetch = git +pkg_getopt_repo = https://github.com/jcomellas/getopt +pkg_getopt_commit = master + +PACKAGES += gettext +pkg_gettext_name = gettext +pkg_gettext_description = Erlang internationalization library. +pkg_gettext_homepage = https://github.com/etnt/gettext +pkg_gettext_fetch = git +pkg_gettext_repo = https://github.com/etnt/gettext +pkg_gettext_commit = master + +PACKAGES += giallo +pkg_giallo_name = giallo +pkg_giallo_description = Small and flexible web framework on top of Cowboy +pkg_giallo_homepage = https://github.com/kivra/giallo +pkg_giallo_fetch = git +pkg_giallo_repo = https://github.com/kivra/giallo +pkg_giallo_commit = master + +PACKAGES += gin +pkg_gin_name = gin +pkg_gin_description = The guards and for Erlang parse_transform +pkg_gin_homepage = https://github.com/mad-cocktail/gin +pkg_gin_fetch = git +pkg_gin_repo = https://github.com/mad-cocktail/gin +pkg_gin_commit = master + +PACKAGES += gitty +pkg_gitty_name = gitty +pkg_gitty_description = Git access in erlang +pkg_gitty_homepage = https://github.com/maxlapshin/gitty +pkg_gitty_fetch = git +pkg_gitty_repo = https://github.com/maxlapshin/gitty +pkg_gitty_commit = master + +PACKAGES += gold_fever +pkg_gold_fever_name = gold_fever +pkg_gold_fever_description = A Treasure Hunt for Erlangers +pkg_gold_fever_homepage = https://github.com/inaka/gold_fever +pkg_gold_fever_fetch = git +pkg_gold_fever_repo = https://github.com/inaka/gold_fever +pkg_gold_fever_commit = master + +PACKAGES += gossiperl +pkg_gossiperl_name = gossiperl +pkg_gossiperl_description = Gossip middleware in Erlang +pkg_gossiperl_homepage = http://gossiperl.com/ +pkg_gossiperl_fetch = git +pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl +pkg_gossiperl_commit = master + +PACKAGES += gpb +pkg_gpb_name = gpb +pkg_gpb_description = A Google Protobuf implementation for Erlang +pkg_gpb_homepage = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_fetch = git +pkg_gpb_repo = https://github.com/tomas-abrahamsson/gpb +pkg_gpb_commit = master + +PACKAGES += gproc +pkg_gproc_name = gproc +pkg_gproc_description = Extended process registry for Erlang +pkg_gproc_homepage = https://github.com/uwiger/gproc +pkg_gproc_fetch = git +pkg_gproc_repo = https://github.com/uwiger/gproc +pkg_gproc_commit = master + +PACKAGES += grapherl +pkg_grapherl_name = grapherl +pkg_grapherl_description = Create graphs of Erlang systems and programs +pkg_grapherl_homepage = https://github.com/eproxus/grapherl +pkg_grapherl_fetch = git +pkg_grapherl_repo = https://github.com/eproxus/grapherl +pkg_grapherl_commit = master + +PACKAGES += gun +pkg_gun_name = gun +pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang. +pkg_gun_homepage = http//ninenines.eu +pkg_gun_fetch = git +pkg_gun_repo = https://github.com/ninenines/gun +pkg_gun_commit = master + +PACKAGES += gut +pkg_gut_name = gut +pkg_gut_description = gut is a template printing, aka scaffolding, tool for Erlang. Like rails generate or yeoman +pkg_gut_homepage = https://github.com/unbalancedparentheses/gut +pkg_gut_fetch = git +pkg_gut_repo = https://github.com/unbalancedparentheses/gut +pkg_gut_commit = master + +PACKAGES += hackney +pkg_hackney_name = hackney +pkg_hackney_description = simple HTTP client in Erlang +pkg_hackney_homepage = https://github.com/benoitc/hackney +pkg_hackney_fetch = git +pkg_hackney_repo = https://github.com/benoitc/hackney +pkg_hackney_commit = master + +PACKAGES += hamcrest +pkg_hamcrest_name = hamcrest +pkg_hamcrest_description = Erlang port of Hamcrest +pkg_hamcrest_homepage = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_fetch = git +pkg_hamcrest_repo = https://github.com/hyperthunk/hamcrest-erlang +pkg_hamcrest_commit = master + +PACKAGES += hanoidb +pkg_hanoidb_name = hanoidb +pkg_hanoidb_description = Erlang LSM BTree Storage +pkg_hanoidb_homepage = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_fetch = git +pkg_hanoidb_repo = https://github.com/krestenkrab/hanoidb +pkg_hanoidb_commit = master + +PACKAGES += hottub +pkg_hottub_name = hottub +pkg_hottub_description = Permanent Erlang Worker Pool +pkg_hottub_homepage = https://github.com/bfrog/hottub +pkg_hottub_fetch = git +pkg_hottub_repo = https://github.com/bfrog/hottub +pkg_hottub_commit = master + +PACKAGES += hpack +pkg_hpack_name = hpack +pkg_hpack_description = HPACK Implementation for Erlang +pkg_hpack_homepage = https://github.com/joedevivo/hpack +pkg_hpack_fetch = git +pkg_hpack_repo = https://github.com/joedevivo/hpack +pkg_hpack_commit = master + +PACKAGES += hyper +pkg_hyper_name = hyper +pkg_hyper_description = Erlang implementation of HyperLogLog +pkg_hyper_homepage = https://github.com/GameAnalytics/hyper +pkg_hyper_fetch = git +pkg_hyper_repo = https://github.com/GameAnalytics/hyper +pkg_hyper_commit = master + +PACKAGES += ibrowse +pkg_ibrowse_name = ibrowse +pkg_ibrowse_description = Erlang HTTP client +pkg_ibrowse_homepage = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_fetch = git +pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse +pkg_ibrowse_commit = v4.1.1 + +PACKAGES += ierlang +pkg_ierlang_name = ierlang +pkg_ierlang_description = An Erlang language kernel for IPython. +pkg_ierlang_homepage = https://github.com/robbielynch/ierlang +pkg_ierlang_fetch = git +pkg_ierlang_repo = https://github.com/robbielynch/ierlang +pkg_ierlang_commit = master + +PACKAGES += iota +pkg_iota_name = iota +pkg_iota_description = iota (Inter-dependency Objective Testing Apparatus) - a tool to enforce clean separation of responsibilities in Erlang code +pkg_iota_homepage = https://github.com/jpgneves/iota +pkg_iota_fetch = git +pkg_iota_repo = https://github.com/jpgneves/iota +pkg_iota_commit = master + +PACKAGES += irc_lib +pkg_irc_lib_name = irc_lib +pkg_irc_lib_description = Erlang irc client library +pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_fetch = git +pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib +pkg_irc_lib_commit = master + +PACKAGES += ircd +pkg_ircd_name = ircd +pkg_ircd_description = A pluggable IRC daemon application/library for Erlang. +pkg_ircd_homepage = https://github.com/tonyg/erlang-ircd +pkg_ircd_fetch = git +pkg_ircd_repo = https://github.com/tonyg/erlang-ircd +pkg_ircd_commit = master + +PACKAGES += iris +pkg_iris_name = iris +pkg_iris_description = Iris Erlang binding +pkg_iris_homepage = https://github.com/project-iris/iris-erl +pkg_iris_fetch = git +pkg_iris_repo = https://github.com/project-iris/iris-erl +pkg_iris_commit = master + +PACKAGES += iso8601 +pkg_iso8601_name = iso8601 +pkg_iso8601_description = Erlang ISO 8601 date formatter/parser +pkg_iso8601_homepage = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_fetch = git +pkg_iso8601_repo = https://github.com/seansawyer/erlang_iso8601 +pkg_iso8601_commit = master + +PACKAGES += jamdb_sybase +pkg_jamdb_sybase_name = jamdb_sybase +pkg_jamdb_sybase_description = Erlang driver for SAP Sybase ASE +pkg_jamdb_sybase_homepage = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_fetch = git +pkg_jamdb_sybase_repo = https://github.com/erlangbureau/jamdb_sybase +pkg_jamdb_sybase_commit = 0.6.0 + +PACKAGES += jerg +pkg_jerg_name = jerg +pkg_jerg_description = JSON Schema to Erlang Records Generator +pkg_jerg_homepage = https://github.com/ddossot/jerg +pkg_jerg_fetch = git +pkg_jerg_repo = https://github.com/ddossot/jerg +pkg_jerg_commit = master + +PACKAGES += jesse +pkg_jesse_name = jesse +pkg_jesse_description = jesse (JSon Schema Erlang) is an implementation of a json schema validator for Erlang. +pkg_jesse_homepage = https://github.com/klarna/jesse +pkg_jesse_fetch = git +pkg_jesse_repo = https://github.com/klarna/jesse +pkg_jesse_commit = master + +PACKAGES += jiffy +pkg_jiffy_name = jiffy +pkg_jiffy_description = JSON NIFs for Erlang. +pkg_jiffy_homepage = https://github.com/davisp/jiffy +pkg_jiffy_fetch = git +pkg_jiffy_repo = https://github.com/davisp/jiffy +pkg_jiffy_commit = master + +PACKAGES += jiffy_v +pkg_jiffy_v_name = jiffy_v +pkg_jiffy_v_description = JSON validation utility +pkg_jiffy_v_homepage = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_fetch = git +pkg_jiffy_v_repo = https://github.com/shizzard/jiffy-v +pkg_jiffy_v_commit = 0.3.3 + +PACKAGES += jobs +pkg_jobs_name = jobs +pkg_jobs_description = a Job scheduler for load regulation +pkg_jobs_homepage = https://github.com/esl/jobs +pkg_jobs_fetch = git +pkg_jobs_repo = https://github.com/esl/jobs +pkg_jobs_commit = 0.3 + +PACKAGES += joxa +pkg_joxa_name = joxa +pkg_joxa_description = A Modern Lisp for the Erlang VM +pkg_joxa_homepage = https://github.com/joxa/joxa +pkg_joxa_fetch = git +pkg_joxa_repo = https://github.com/joxa/joxa +pkg_joxa_commit = master + +PACKAGES += json +pkg_json_name = json +pkg_json_description = a high level json library for erlang (17.0+) +pkg_json_homepage = https://github.com/talentdeficit/json +pkg_json_fetch = git +pkg_json_repo = https://github.com/talentdeficit/json +pkg_json_commit = master + +PACKAGES += json_rec +pkg_json_rec_name = json_rec +pkg_json_rec_description = JSON to erlang record +pkg_json_rec_homepage = https://github.com/justinkirby/json_rec +pkg_json_rec_fetch = git +pkg_json_rec_repo = https://github.com/justinkirby/json_rec +pkg_json_rec_commit = master + +PACKAGES += jsonerl +pkg_jsonerl_name = jsonerl +pkg_jsonerl_description = yet another but slightly different erlang <-> json encoder/decoder +pkg_jsonerl_homepage = https://github.com/lambder/jsonerl +pkg_jsonerl_fetch = git +pkg_jsonerl_repo = https://github.com/lambder/jsonerl +pkg_jsonerl_commit = master + +PACKAGES += jsonpath +pkg_jsonpath_name = jsonpath +pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation +pkg_jsonpath_homepage = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_fetch = git +pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath +pkg_jsonpath_commit = master + +PACKAGES += jsonx +pkg_jsonx_name = jsonx +pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C. +pkg_jsonx_homepage = https://github.com/iskra/jsonx +pkg_jsonx_fetch = git +pkg_jsonx_repo = https://github.com/iskra/jsonx +pkg_jsonx_commit = master + +PACKAGES += jsx +pkg_jsx_name = jsx +pkg_jsx_description = An Erlang application for consuming, producing and manipulating JSON. +pkg_jsx_homepage = https://github.com/talentdeficit/jsx +pkg_jsx_fetch = git +pkg_jsx_repo = https://github.com/talentdeficit/jsx +pkg_jsx_commit = master + +PACKAGES += kafka +pkg_kafka_name = kafka +pkg_kafka_description = Kafka consumer and producer in Erlang +pkg_kafka_homepage = https://github.com/wooga/kafka-erlang +pkg_kafka_fetch = git +pkg_kafka_repo = https://github.com/wooga/kafka-erlang +pkg_kafka_commit = master + +PACKAGES += kai +pkg_kai_name = kai +pkg_kai_description = DHT storage by Takeshi Inoue +pkg_kai_homepage = https://github.com/synrc/kai +pkg_kai_fetch = git +pkg_kai_repo = https://github.com/synrc/kai +pkg_kai_commit = master + +PACKAGES += katja +pkg_katja_name = katja +pkg_katja_description = A simple Riemann client written in Erlang. +pkg_katja_homepage = https://github.com/nifoc/katja +pkg_katja_fetch = git +pkg_katja_repo = https://github.com/nifoc/katja +pkg_katja_commit = master + +PACKAGES += kdht +pkg_kdht_name = kdht +pkg_kdht_description = kdht is an erlang DHT implementation +pkg_kdht_homepage = https://github.com/kevinlynx/kdht +pkg_kdht_fetch = git +pkg_kdht_repo = https://github.com/kevinlynx/kdht +pkg_kdht_commit = master + +PACKAGES += key2value +pkg_key2value_name = key2value +pkg_key2value_description = Erlang 2-way map +pkg_key2value_homepage = https://github.com/okeuday/key2value +pkg_key2value_fetch = git +pkg_key2value_repo = https://github.com/okeuday/key2value +pkg_key2value_commit = master + +PACKAGES += keys1value +pkg_keys1value_name = keys1value +pkg_keys1value_description = Erlang set associative map for key lists +pkg_keys1value_homepage = https://github.com/okeuday/keys1value +pkg_keys1value_fetch = git +pkg_keys1value_repo = https://github.com/okeuday/keys1value +pkg_keys1value_commit = master + +PACKAGES += kinetic +pkg_kinetic_name = kinetic +pkg_kinetic_description = Erlang Kinesis Client +pkg_kinetic_homepage = https://github.com/AdRoll/kinetic +pkg_kinetic_fetch = git +pkg_kinetic_repo = https://github.com/AdRoll/kinetic +pkg_kinetic_commit = master + +PACKAGES += kjell +pkg_kjell_name = kjell +pkg_kjell_description = Erlang Shell +pkg_kjell_homepage = https://github.com/karlll/kjell +pkg_kjell_fetch = git +pkg_kjell_repo = https://github.com/karlll/kjell +pkg_kjell_commit = master + +PACKAGES += kraken +pkg_kraken_name = kraken +pkg_kraken_description = Distributed Pubsub Server for Realtime Apps +pkg_kraken_homepage = https://github.com/Asana/kraken +pkg_kraken_fetch = git +pkg_kraken_repo = https://github.com/Asana/kraken +pkg_kraken_commit = master + +PACKAGES += kucumberl +pkg_kucumberl_name = kucumberl +pkg_kucumberl_description = A pure-erlang, open-source, implementation of Cucumber +pkg_kucumberl_homepage = https://github.com/openshine/kucumberl +pkg_kucumberl_fetch = git +pkg_kucumberl_repo = https://github.com/openshine/kucumberl +pkg_kucumberl_commit = master + +PACKAGES += kvc +pkg_kvc_name = kvc +pkg_kvc_description = KVC - Key Value Coding for Erlang data structures +pkg_kvc_homepage = https://github.com/etrepum/kvc +pkg_kvc_fetch = git +pkg_kvc_repo = https://github.com/etrepum/kvc +pkg_kvc_commit = master + +PACKAGES += kvlists +pkg_kvlists_name = kvlists +pkg_kvlists_description = Lists of key-value pairs (decoded JSON) in Erlang +pkg_kvlists_homepage = https://github.com/jcomellas/kvlists +pkg_kvlists_fetch = git +pkg_kvlists_repo = https://github.com/jcomellas/kvlists +pkg_kvlists_commit = master + +PACKAGES += kvs +pkg_kvs_name = kvs +pkg_kvs_description = Container and Iterator +pkg_kvs_homepage = https://github.com/synrc/kvs +pkg_kvs_fetch = git +pkg_kvs_repo = https://github.com/synrc/kvs +pkg_kvs_commit = master + +PACKAGES += lager +pkg_lager_name = lager +pkg_lager_description = A logging framework for Erlang/OTP. +pkg_lager_homepage = https://github.com/basho/lager +pkg_lager_fetch = git +pkg_lager_repo = https://github.com/basho/lager +pkg_lager_commit = master + +PACKAGES += lager_amqp_backend +pkg_lager_amqp_backend_name = lager_amqp_backend +pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend +pkg_lager_amqp_backend_homepage = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_fetch = git +pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend +pkg_lager_amqp_backend_commit = master + +PACKAGES += lager_syslog +pkg_lager_syslog_name = lager_syslog +pkg_lager_syslog_description = Syslog backend for lager +pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog +pkg_lager_syslog_fetch = git +pkg_lager_syslog_repo = https://github.com/basho/lager_syslog +pkg_lager_syslog_commit = master + +PACKAGES += lambdapad +pkg_lambdapad_name = lambdapad +pkg_lambdapad_description = Static site generator using Erlang. Yes, Erlang. +pkg_lambdapad_homepage = https://github.com/gar1t/lambdapad +pkg_lambdapad_fetch = git +pkg_lambdapad_repo = https://github.com/gar1t/lambdapad +pkg_lambdapad_commit = master + +PACKAGES += lasp +pkg_lasp_name = lasp +pkg_lasp_description = A Language for Distributed, Eventually Consistent Computations +pkg_lasp_homepage = http://lasp-lang.org/ +pkg_lasp_fetch = git +pkg_lasp_repo = https://github.com/lasp-lang/lasp +pkg_lasp_commit = master + +PACKAGES += lasse +pkg_lasse_name = lasse +pkg_lasse_description = SSE handler for Cowboy +pkg_lasse_homepage = https://github.com/inaka/lasse +pkg_lasse_fetch = git +pkg_lasse_repo = https://github.com/inaka/lasse +pkg_lasse_commit = 0.1.0 + +PACKAGES += ldap +pkg_ldap_name = ldap +pkg_ldap_description = LDAP server written in Erlang +pkg_ldap_homepage = https://github.com/spawnproc/ldap +pkg_ldap_fetch = git +pkg_ldap_repo = https://github.com/spawnproc/ldap +pkg_ldap_commit = master + +PACKAGES += lethink +pkg_lethink_name = lethink +pkg_lethink_description = erlang driver for rethinkdb +pkg_lethink_homepage = https://github.com/taybin/lethink +pkg_lethink_fetch = git +pkg_lethink_repo = https://github.com/taybin/lethink +pkg_lethink_commit = master + +PACKAGES += lfe +pkg_lfe_name = lfe +pkg_lfe_description = Lisp Flavoured Erlang (LFE) +pkg_lfe_homepage = https://github.com/rvirding/lfe +pkg_lfe_fetch = git +pkg_lfe_repo = https://github.com/rvirding/lfe +pkg_lfe_commit = master + +PACKAGES += ling +pkg_ling_name = ling +pkg_ling_description = Erlang on Xen +pkg_ling_homepage = https://github.com/cloudozer/ling +pkg_ling_fetch = git +pkg_ling_repo = https://github.com/cloudozer/ling +pkg_ling_commit = master + +PACKAGES += live +pkg_live_name = live +pkg_live_description = Automated module and configuration reloader. +pkg_live_homepage = http://ninenines.eu +pkg_live_fetch = git +pkg_live_repo = https://github.com/ninenines/live +pkg_live_commit = master + +PACKAGES += lmq +pkg_lmq_name = lmq +pkg_lmq_description = Lightweight Message Queue +pkg_lmq_homepage = https://github.com/iij/lmq +pkg_lmq_fetch = git +pkg_lmq_repo = https://github.com/iij/lmq +pkg_lmq_commit = master + +PACKAGES += locker +pkg_locker_name = locker +pkg_locker_description = Atomic distributed 'check and set' for short-lived keys +pkg_locker_homepage = https://github.com/wooga/locker +pkg_locker_fetch = git +pkg_locker_repo = https://github.com/wooga/locker +pkg_locker_commit = master + +PACKAGES += locks +pkg_locks_name = locks +pkg_locks_description = A scalable, deadlock-resolving resource locker +pkg_locks_homepage = https://github.com/uwiger/locks +pkg_locks_fetch = git +pkg_locks_repo = https://github.com/uwiger/locks +pkg_locks_commit = master + +PACKAGES += log4erl +pkg_log4erl_name = log4erl +pkg_log4erl_description = A logger for erlang in the spirit of Log4J. +pkg_log4erl_homepage = https://github.com/ahmednawras/log4erl +pkg_log4erl_fetch = git +pkg_log4erl_repo = https://github.com/ahmednawras/log4erl +pkg_log4erl_commit = master + +PACKAGES += lol +pkg_lol_name = lol +pkg_lol_description = Lisp on erLang, and programming is fun again +pkg_lol_homepage = https://github.com/b0oh/lol +pkg_lol_fetch = git +pkg_lol_repo = https://github.com/b0oh/lol +pkg_lol_commit = master + +PACKAGES += lucid +pkg_lucid_name = lucid +pkg_lucid_description = HTTP/2 server written in Erlang +pkg_lucid_homepage = https://github.com/tatsuhiro-t/lucid +pkg_lucid_fetch = git +pkg_lucid_repo = https://github.com/tatsuhiro-t/lucid +pkg_lucid_commit = master + +PACKAGES += luerl +pkg_luerl_name = luerl +pkg_luerl_description = Lua in Erlang +pkg_luerl_homepage = https://github.com/rvirding/luerl +pkg_luerl_fetch = git +pkg_luerl_repo = https://github.com/rvirding/luerl +pkg_luerl_commit = develop + +PACKAGES += luwak +pkg_luwak_name = luwak +pkg_luwak_description = Large-object storage interface for Riak +pkg_luwak_homepage = https://github.com/basho/luwak +pkg_luwak_fetch = git +pkg_luwak_repo = https://github.com/basho/luwak +pkg_luwak_commit = master + +PACKAGES += lux +pkg_lux_name = lux +pkg_lux_description = Lux (LUcid eXpect scripting) simplifies test automation and provides an Expect-style execution of commands +pkg_lux_homepage = https://github.com/hawk/lux +pkg_lux_fetch = git +pkg_lux_repo = https://github.com/hawk/lux +pkg_lux_commit = master + +PACKAGES += machi +pkg_machi_name = machi +pkg_machi_description = Machi file store +pkg_machi_homepage = https://github.com/basho/machi +pkg_machi_fetch = git +pkg_machi_repo = https://github.com/basho/machi +pkg_machi_commit = master + +PACKAGES += mad +pkg_mad_name = mad +pkg_mad_description = Small and Fast Rebar Replacement +pkg_mad_homepage = https://github.com/synrc/mad +pkg_mad_fetch = git +pkg_mad_repo = https://github.com/synrc/mad +pkg_mad_commit = master + +PACKAGES += marina +pkg_marina_name = marina +pkg_marina_description = Non-blocking Erlang Cassandra CQL3 client +pkg_marina_homepage = https://github.com/lpgauth/marina +pkg_marina_fetch = git +pkg_marina_repo = https://github.com/lpgauth/marina +pkg_marina_commit = master + +PACKAGES += mavg +pkg_mavg_name = mavg +pkg_mavg_description = Erlang :: Exponential moving average library +pkg_mavg_homepage = https://github.com/EchoTeam/mavg +pkg_mavg_fetch = git +pkg_mavg_repo = https://github.com/EchoTeam/mavg +pkg_mavg_commit = master + +PACKAGES += mc_erl +pkg_mc_erl_name = mc_erl +pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. +pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl +pkg_mc_erl_fetch = git +pkg_mc_erl_repo = https://github.com/clonejo/mc-erl +pkg_mc_erl_commit = master + +PACKAGES += mcd +pkg_mcd_name = mcd +pkg_mcd_description = Fast memcached protocol client in pure Erlang +pkg_mcd_homepage = https://github.com/EchoTeam/mcd +pkg_mcd_fetch = git +pkg_mcd_repo = https://github.com/EchoTeam/mcd +pkg_mcd_commit = master + +PACKAGES += mcerlang +pkg_mcerlang_name = mcerlang +pkg_mcerlang_description = The McErlang model checker for Erlang +pkg_mcerlang_homepage = https://github.com/fredlund/McErlang +pkg_mcerlang_fetch = git +pkg_mcerlang_repo = https://github.com/fredlund/McErlang +pkg_mcerlang_commit = master + +PACKAGES += meck +pkg_meck_name = meck +pkg_meck_description = A mocking library for Erlang +pkg_meck_homepage = https://github.com/eproxus/meck +pkg_meck_fetch = git +pkg_meck_repo = https://github.com/eproxus/meck +pkg_meck_commit = master + +PACKAGES += mekao +pkg_mekao_name = mekao +pkg_mekao_description = SQL constructor +pkg_mekao_homepage = https://github.com/ddosia/mekao +pkg_mekao_fetch = git +pkg_mekao_repo = https://github.com/ddosia/mekao +pkg_mekao_commit = master + +PACKAGES += memo +pkg_memo_name = memo +pkg_memo_description = Erlang memoization server +pkg_memo_homepage = https://github.com/tuncer/memo +pkg_memo_fetch = git +pkg_memo_repo = https://github.com/tuncer/memo +pkg_memo_commit = master + +PACKAGES += merge_index +pkg_merge_index_name = merge_index +pkg_merge_index_description = MergeIndex is an Erlang library for storing ordered sets on disk. It is very similar to an SSTable (in Google's Bigtable) or an HFile (in Hadoop). +pkg_merge_index_homepage = https://github.com/basho/merge_index +pkg_merge_index_fetch = git +pkg_merge_index_repo = https://github.com/basho/merge_index +pkg_merge_index_commit = master + +PACKAGES += merl +pkg_merl_name = merl +pkg_merl_description = Metaprogramming in Erlang +pkg_merl_homepage = https://github.com/richcarl/merl +pkg_merl_fetch = git +pkg_merl_repo = https://github.com/richcarl/merl +pkg_merl_commit = master + +PACKAGES += mimetypes +pkg_mimetypes_name = mimetypes +pkg_mimetypes_description = Erlang MIME types library +pkg_mimetypes_homepage = https://github.com/spawngrid/mimetypes +pkg_mimetypes_fetch = git +pkg_mimetypes_repo = https://github.com/spawngrid/mimetypes +pkg_mimetypes_commit = master + +PACKAGES += mixer +pkg_mixer_name = mixer +pkg_mixer_description = Mix in functions from other modules +pkg_mixer_homepage = https://github.com/chef/mixer +pkg_mixer_fetch = git +pkg_mixer_repo = https://github.com/chef/mixer +pkg_mixer_commit = master + +PACKAGES += mochiweb +pkg_mochiweb_name = mochiweb +pkg_mochiweb_description = MochiWeb is an Erlang library for building lightweight HTTP servers. +pkg_mochiweb_homepage = https://github.com/mochi/mochiweb +pkg_mochiweb_fetch = git +pkg_mochiweb_repo = https://github.com/mochi/mochiweb +pkg_mochiweb_commit = master + +PACKAGES += mochiweb_xpath +pkg_mochiweb_xpath_name = mochiweb_xpath +pkg_mochiweb_xpath_description = XPath support for mochiweb's html parser +pkg_mochiweb_xpath_homepage = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_fetch = git +pkg_mochiweb_xpath_repo = https://github.com/retnuh/mochiweb_xpath +pkg_mochiweb_xpath_commit = master + +PACKAGES += mockgyver +pkg_mockgyver_name = mockgyver +pkg_mockgyver_description = A mocking library for Erlang +pkg_mockgyver_homepage = https://github.com/klajo/mockgyver +pkg_mockgyver_fetch = git +pkg_mockgyver_repo = https://github.com/klajo/mockgyver +pkg_mockgyver_commit = master + +PACKAGES += modlib +pkg_modlib_name = modlib +pkg_modlib_description = Web framework based on Erlang's inets httpd +pkg_modlib_homepage = https://github.com/gar1t/modlib +pkg_modlib_fetch = git +pkg_modlib_repo = https://github.com/gar1t/modlib +pkg_modlib_commit = master + +PACKAGES += mongodb +pkg_mongodb_name = mongodb +pkg_mongodb_description = MongoDB driver for Erlang +pkg_mongodb_homepage = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_fetch = git +pkg_mongodb_repo = https://github.com/comtihon/mongodb-erlang +pkg_mongodb_commit = master + +PACKAGES += mongooseim +pkg_mongooseim_name = mongooseim +pkg_mongooseim_description = Jabber / XMPP server with focus on performance and scalability, by Erlang Solutions +pkg_mongooseim_homepage = https://www.erlang-solutions.com/products/mongooseim-massively-scalable-ejabberd-platform +pkg_mongooseim_fetch = git +pkg_mongooseim_repo = https://github.com/esl/MongooseIM +pkg_mongooseim_commit = master + +PACKAGES += moyo +pkg_moyo_name = moyo +pkg_moyo_description = Erlang utility functions library +pkg_moyo_homepage = https://github.com/dwango/moyo +pkg_moyo_fetch = git +pkg_moyo_repo = https://github.com/dwango/moyo +pkg_moyo_commit = master + +PACKAGES += msgpack +pkg_msgpack_name = msgpack +pkg_msgpack_description = MessagePack (de)serializer implementation for Erlang +pkg_msgpack_homepage = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_fetch = git +pkg_msgpack_repo = https://github.com/msgpack/msgpack-erlang +pkg_msgpack_commit = master + +PACKAGES += mu2 +pkg_mu2_name = mu2 +pkg_mu2_description = Erlang mutation testing tool +pkg_mu2_homepage = https://github.com/ramsay-t/mu2 +pkg_mu2_fetch = git +pkg_mu2_repo = https://github.com/ramsay-t/mu2 +pkg_mu2_commit = master + +PACKAGES += mustache +pkg_mustache_name = mustache +pkg_mustache_description = Mustache template engine for Erlang. +pkg_mustache_homepage = https://github.com/mojombo/mustache.erl +pkg_mustache_fetch = git +pkg_mustache_repo = https://github.com/mojombo/mustache.erl +pkg_mustache_commit = master + +PACKAGES += myproto +pkg_myproto_name = myproto +pkg_myproto_description = MySQL Server Protocol in Erlang +pkg_myproto_homepage = https://github.com/altenwald/myproto +pkg_myproto_fetch = git +pkg_myproto_repo = https://github.com/altenwald/myproto +pkg_myproto_commit = master + +PACKAGES += mysql +pkg_mysql_name = mysql +pkg_mysql_description = Erlang MySQL Driver (from code.google.com) +pkg_mysql_homepage = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_fetch = git +pkg_mysql_repo = https://github.com/dizzyd/erlang-mysql-driver +pkg_mysql_commit = master + +PACKAGES += n2o +pkg_n2o_name = n2o +pkg_n2o_description = WebSocket Application Server +pkg_n2o_homepage = https://github.com/5HT/n2o +pkg_n2o_fetch = git +pkg_n2o_repo = https://github.com/5HT/n2o +pkg_n2o_commit = master + +PACKAGES += nat_upnp +pkg_nat_upnp_name = nat_upnp +pkg_nat_upnp_description = Erlang library to map your internal port to an external using UNP IGD +pkg_nat_upnp_homepage = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_fetch = git +pkg_nat_upnp_repo = https://github.com/benoitc/nat_upnp +pkg_nat_upnp_commit = master + +PACKAGES += neo4j +pkg_neo4j_name = neo4j +pkg_neo4j_description = Erlang client library for Neo4J. +pkg_neo4j_homepage = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_fetch = git +pkg_neo4j_repo = https://github.com/dmitriid/neo4j-erlang +pkg_neo4j_commit = master + +PACKAGES += neotoma +pkg_neotoma_name = neotoma +pkg_neotoma_description = Erlang library and packrat parser-generator for parsing expression grammars. +pkg_neotoma_homepage = https://github.com/seancribbs/neotoma +pkg_neotoma_fetch = git +pkg_neotoma_repo = https://github.com/seancribbs/neotoma +pkg_neotoma_commit = master + +PACKAGES += newrelic +pkg_newrelic_name = newrelic +pkg_newrelic_description = Erlang library for sending metrics to New Relic +pkg_newrelic_homepage = https://github.com/wooga/newrelic-erlang +pkg_newrelic_fetch = git +pkg_newrelic_repo = https://github.com/wooga/newrelic-erlang +pkg_newrelic_commit = master + +PACKAGES += nifty +pkg_nifty_name = nifty +pkg_nifty_description = Erlang NIF wrapper generator +pkg_nifty_homepage = https://github.com/parapluu/nifty +pkg_nifty_fetch = git +pkg_nifty_repo = https://github.com/parapluu/nifty +pkg_nifty_commit = master + +PACKAGES += nitrogen_core +pkg_nitrogen_core_name = nitrogen_core +pkg_nitrogen_core_description = The core Nitrogen library. +pkg_nitrogen_core_homepage = http://nitrogenproject.com/ +pkg_nitrogen_core_fetch = git +pkg_nitrogen_core_repo = https://github.com/nitrogen/nitrogen_core +pkg_nitrogen_core_commit = master + +PACKAGES += nkbase +pkg_nkbase_name = nkbase +pkg_nkbase_description = NkBASE distributed database +pkg_nkbase_homepage = https://github.com/Nekso/nkbase +pkg_nkbase_fetch = git +pkg_nkbase_repo = https://github.com/Nekso/nkbase +pkg_nkbase_commit = develop + +PACKAGES += nkdocker +pkg_nkdocker_name = nkdocker +pkg_nkdocker_description = Erlang Docker client +pkg_nkdocker_homepage = https://github.com/Nekso/nkdocker +pkg_nkdocker_fetch = git +pkg_nkdocker_repo = https://github.com/Nekso/nkdocker +pkg_nkdocker_commit = master + +PACKAGES += nkpacket +pkg_nkpacket_name = nkpacket +pkg_nkpacket_description = Generic Erlang transport layer +pkg_nkpacket_homepage = https://github.com/Nekso/nkpacket +pkg_nkpacket_fetch = git +pkg_nkpacket_repo = https://github.com/Nekso/nkpacket +pkg_nkpacket_commit = master + +PACKAGES += nksip +pkg_nksip_name = nksip +pkg_nksip_description = Erlang SIP application server +pkg_nksip_homepage = https://github.com/kalta/nksip +pkg_nksip_fetch = git +pkg_nksip_repo = https://github.com/kalta/nksip +pkg_nksip_commit = master + +PACKAGES += nodefinder +pkg_nodefinder_name = nodefinder +pkg_nodefinder_description = automatic node discovery via UDP multicast +pkg_nodefinder_homepage = https://github.com/erlanger/nodefinder +pkg_nodefinder_fetch = git +pkg_nodefinder_repo = https://github.com/okeuday/nodefinder +pkg_nodefinder_commit = master + +PACKAGES += nprocreg +pkg_nprocreg_name = nprocreg +pkg_nprocreg_description = Minimal Distributed Erlang Process Registry +pkg_nprocreg_homepage = http://nitrogenproject.com/ +pkg_nprocreg_fetch = git +pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg +pkg_nprocreg_commit = master + +PACKAGES += oauth +pkg_oauth_name = oauth +pkg_oauth_description = An Erlang OAuth 1.0 implementation +pkg_oauth_homepage = https://github.com/tim/erlang-oauth +pkg_oauth_fetch = git +pkg_oauth_repo = https://github.com/tim/erlang-oauth +pkg_oauth_commit = master + +PACKAGES += oauth2 +pkg_oauth2_name = oauth2 +pkg_oauth2_description = Erlang Oauth2 implementation +pkg_oauth2_homepage = https://github.com/kivra/oauth2 +pkg_oauth2_fetch = git +pkg_oauth2_repo = https://github.com/kivra/oauth2 +pkg_oauth2_commit = master + +PACKAGES += oauth2c +pkg_oauth2c_name = oauth2c +pkg_oauth2c_description = Erlang OAuth2 Client +pkg_oauth2c_homepage = https://github.com/kivra/oauth2_client +pkg_oauth2c_fetch = git +pkg_oauth2c_repo = https://github.com/kivra/oauth2_client +pkg_oauth2c_commit = master + +PACKAGES += octopus +pkg_octopus_name = octopus +pkg_octopus_description = Small and flexible pool manager written in Erlang +pkg_octopus_homepage = https://github.com/erlangbureau/octopus +pkg_octopus_fetch = git +pkg_octopus_repo = https://github.com/erlangbureau/octopus +pkg_octopus_commit = 1.0.0 + +PACKAGES += of_protocol +pkg_of_protocol_name = of_protocol +pkg_of_protocol_description = OpenFlow Protocol Library for Erlang +pkg_of_protocol_homepage = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_fetch = git +pkg_of_protocol_repo = https://github.com/FlowForwarding/of_protocol +pkg_of_protocol_commit = master + +PACKAGES += opencouch +pkg_opencouch_name = couch +pkg_opencouch_description = A embeddable document oriented database compatible with Apache CouchDB +pkg_opencouch_homepage = https://github.com/benoitc/opencouch +pkg_opencouch_fetch = git +pkg_opencouch_repo = https://github.com/benoitc/opencouch +pkg_opencouch_commit = master + +PACKAGES += openflow +pkg_openflow_name = openflow +pkg_openflow_description = An OpenFlow controller written in pure erlang +pkg_openflow_homepage = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_fetch = git +pkg_openflow_repo = https://github.com/renatoaguiar/erlang-openflow +pkg_openflow_commit = master + +PACKAGES += openid +pkg_openid_name = openid +pkg_openid_description = Erlang OpenID +pkg_openid_homepage = https://github.com/brendonh/erl_openid +pkg_openid_fetch = git +pkg_openid_repo = https://github.com/brendonh/erl_openid +pkg_openid_commit = master + +PACKAGES += openpoker +pkg_openpoker_name = openpoker +pkg_openpoker_description = Genesis Texas hold'em Game Server +pkg_openpoker_homepage = https://github.com/hpyhacking/openpoker +pkg_openpoker_fetch = git +pkg_openpoker_repo = https://github.com/hpyhacking/openpoker +pkg_openpoker_commit = master + +PACKAGES += pal +pkg_pal_name = pal +pkg_pal_description = Pragmatic Authentication Library +pkg_pal_homepage = https://github.com/manifest/pal +pkg_pal_fetch = git +pkg_pal_repo = https://github.com/manifest/pal +pkg_pal_commit = master + +PACKAGES += parse_trans +pkg_parse_trans_name = parse_trans +pkg_parse_trans_description = Parse transform utilities for Erlang +pkg_parse_trans_homepage = https://github.com/uwiger/parse_trans +pkg_parse_trans_fetch = git +pkg_parse_trans_repo = https://github.com/uwiger/parse_trans +pkg_parse_trans_commit = master + +PACKAGES += parsexml +pkg_parsexml_name = parsexml +pkg_parsexml_description = Simple DOM XML parser with convenient and very simple API +pkg_parsexml_homepage = https://github.com/maxlapshin/parsexml +pkg_parsexml_fetch = git +pkg_parsexml_repo = https://github.com/maxlapshin/parsexml +pkg_parsexml_commit = master + +PACKAGES += pegjs +pkg_pegjs_name = pegjs +pkg_pegjs_description = An implementation of PEG.js grammar for Erlang. +pkg_pegjs_homepage = https://github.com/dmitriid/pegjs +pkg_pegjs_fetch = git +pkg_pegjs_repo = https://github.com/dmitriid/pegjs +pkg_pegjs_commit = 0.3 + +PACKAGES += percept2 +pkg_percept2_name = percept2 +pkg_percept2_description = Concurrent profiling tool for Erlang +pkg_percept2_homepage = https://github.com/huiqing/percept2 +pkg_percept2_fetch = git +pkg_percept2_repo = https://github.com/huiqing/percept2 +pkg_percept2_commit = master + +PACKAGES += pgsql +pkg_pgsql_name = pgsql +pkg_pgsql_description = Erlang PostgreSQL driver +pkg_pgsql_homepage = https://github.com/semiocast/pgsql +pkg_pgsql_fetch = git +pkg_pgsql_repo = https://github.com/semiocast/pgsql +pkg_pgsql_commit = master + +PACKAGES += pkgx +pkg_pkgx_name = pkgx +pkg_pkgx_description = Build .deb packages from Erlang releases +pkg_pkgx_homepage = https://github.com/arjan/pkgx +pkg_pkgx_fetch = git +pkg_pkgx_repo = https://github.com/arjan/pkgx +pkg_pkgx_commit = master + +PACKAGES += pkt +pkg_pkt_name = pkt +pkg_pkt_description = Erlang network protocol library +pkg_pkt_homepage = https://github.com/msantos/pkt +pkg_pkt_fetch = git +pkg_pkt_repo = https://github.com/msantos/pkt +pkg_pkt_commit = master + +PACKAGES += plain_fsm +pkg_plain_fsm_name = plain_fsm +pkg_plain_fsm_description = A behaviour/support library for writing plain Erlang FSMs. +pkg_plain_fsm_homepage = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_fetch = git +pkg_plain_fsm_repo = https://github.com/uwiger/plain_fsm +pkg_plain_fsm_commit = master + +PACKAGES += plumtree +pkg_plumtree_name = plumtree +pkg_plumtree_description = Epidemic Broadcast Trees +pkg_plumtree_homepage = https://github.com/helium/plumtree +pkg_plumtree_fetch = git +pkg_plumtree_repo = https://github.com/helium/plumtree +pkg_plumtree_commit = master + +PACKAGES += pmod_transform +pkg_pmod_transform_name = pmod_transform +pkg_pmod_transform_description = Parse transform for parameterized modules +pkg_pmod_transform_homepage = https://github.com/erlang/pmod_transform +pkg_pmod_transform_fetch = git +pkg_pmod_transform_repo = https://github.com/erlang/pmod_transform +pkg_pmod_transform_commit = master + +PACKAGES += pobox +pkg_pobox_name = pobox +pkg_pobox_description = External buffer processes to protect against mailbox overflow in Erlang +pkg_pobox_homepage = https://github.com/ferd/pobox +pkg_pobox_fetch = git +pkg_pobox_repo = https://github.com/ferd/pobox +pkg_pobox_commit = master + +PACKAGES += ponos +pkg_ponos_name = ponos +pkg_ponos_description = ponos is a simple yet powerful load generator written in erlang +pkg_ponos_homepage = https://github.com/klarna/ponos +pkg_ponos_fetch = git +pkg_ponos_repo = https://github.com/klarna/ponos +pkg_ponos_commit = master + +PACKAGES += poolboy +pkg_poolboy_name = poolboy +pkg_poolboy_description = A hunky Erlang worker pool factory +pkg_poolboy_homepage = https://github.com/devinus/poolboy +pkg_poolboy_fetch = git +pkg_poolboy_repo = https://github.com/devinus/poolboy +pkg_poolboy_commit = master + +PACKAGES += pooler +pkg_pooler_name = pooler +pkg_pooler_description = An OTP Process Pool Application +pkg_pooler_homepage = https://github.com/seth/pooler +pkg_pooler_fetch = git +pkg_pooler_repo = https://github.com/seth/pooler +pkg_pooler_commit = master + +PACKAGES += pqueue +pkg_pqueue_name = pqueue +pkg_pqueue_description = Erlang Priority Queues +pkg_pqueue_homepage = https://github.com/okeuday/pqueue +pkg_pqueue_fetch = git +pkg_pqueue_repo = https://github.com/okeuday/pqueue +pkg_pqueue_commit = master + +PACKAGES += procket +pkg_procket_name = procket +pkg_procket_description = Erlang interface to low level socket operations +pkg_procket_homepage = http://blog.listincomprehension.com/search/label/procket +pkg_procket_fetch = git +pkg_procket_repo = https://github.com/msantos/procket +pkg_procket_commit = master + +PACKAGES += prop +pkg_prop_name = prop +pkg_prop_description = An Erlang code scaffolding and generator system. +pkg_prop_homepage = https://github.com/nuex/prop +pkg_prop_fetch = git +pkg_prop_repo = https://github.com/nuex/prop +pkg_prop_commit = master + +PACKAGES += proper +pkg_proper_name = proper +pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang. +pkg_proper_homepage = http://proper.softlab.ntua.gr +pkg_proper_fetch = git +pkg_proper_repo = https://github.com/manopapad/proper +pkg_proper_commit = master + +PACKAGES += props +pkg_props_name = props +pkg_props_description = Property structure library +pkg_props_homepage = https://github.com/greyarea/props +pkg_props_fetch = git +pkg_props_repo = https://github.com/greyarea/props +pkg_props_commit = master + +PACKAGES += protobuffs +pkg_protobuffs_name = protobuffs +pkg_protobuffs_description = An implementation of Google's Protocol Buffers for Erlang, based on ngerakines/erlang_protobuffs. +pkg_protobuffs_homepage = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_fetch = git +pkg_protobuffs_repo = https://github.com/basho/erlang_protobuffs +pkg_protobuffs_commit = master + +PACKAGES += psycho +pkg_psycho_name = psycho +pkg_psycho_description = HTTP server that provides a WSGI-like interface for applications and middleware. +pkg_psycho_homepage = https://github.com/gar1t/psycho +pkg_psycho_fetch = git +pkg_psycho_repo = https://github.com/gar1t/psycho +pkg_psycho_commit = master + +PACKAGES += purity +pkg_purity_name = purity +pkg_purity_description = A side-effect analyzer for Erlang +pkg_purity_homepage = https://github.com/mpitid/purity +pkg_purity_fetch = git +pkg_purity_repo = https://github.com/mpitid/purity +pkg_purity_commit = master + +PACKAGES += push_service +pkg_push_service_name = push_service +pkg_push_service_description = Push service +pkg_push_service_homepage = https://github.com/hairyhum/push_service +pkg_push_service_fetch = git +pkg_push_service_repo = https://github.com/hairyhum/push_service +pkg_push_service_commit = master + +PACKAGES += qdate +pkg_qdate_name = qdate +pkg_qdate_description = Date, time, and timezone parsing, formatting, and conversion for Erlang. +pkg_qdate_homepage = https://github.com/choptastic/qdate +pkg_qdate_fetch = git +pkg_qdate_repo = https://github.com/choptastic/qdate +pkg_qdate_commit = 0.4.0 + +PACKAGES += qrcode +pkg_qrcode_name = qrcode +pkg_qrcode_description = QR Code encoder in Erlang +pkg_qrcode_homepage = https://github.com/komone/qrcode +pkg_qrcode_fetch = git +pkg_qrcode_repo = https://github.com/komone/qrcode +pkg_qrcode_commit = master + +PACKAGES += quest +pkg_quest_name = quest +pkg_quest_description = Learn Erlang through this set of challenges. An interactive system for getting to know Erlang. +pkg_quest_homepage = https://github.com/eriksoe/ErlangQuest +pkg_quest_fetch = git +pkg_quest_repo = https://github.com/eriksoe/ErlangQuest +pkg_quest_commit = master + +PACKAGES += quickrand +pkg_quickrand_name = quickrand +pkg_quickrand_description = Quick Erlang Random Number Generation +pkg_quickrand_homepage = https://github.com/okeuday/quickrand +pkg_quickrand_fetch = git +pkg_quickrand_repo = https://github.com/okeuday/quickrand +pkg_quickrand_commit = master + +PACKAGES += rabbit +pkg_rabbit_name = rabbit +pkg_rabbit_description = RabbitMQ Server +pkg_rabbit_homepage = https://www.rabbitmq.com/ +pkg_rabbit_fetch = git +pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git +pkg_rabbit_commit = master + +PACKAGES += rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak +pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak +pkg_rabbit_exchange_type_riak_homepage = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_fetch = git +pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange +pkg_rabbit_exchange_type_riak_commit = master + +PACKAGES += rack +pkg_rack_name = rack +pkg_rack_description = Rack handler for erlang +pkg_rack_homepage = https://github.com/erlyvideo/rack +pkg_rack_fetch = git +pkg_rack_repo = https://github.com/erlyvideo/rack +pkg_rack_commit = master + +PACKAGES += radierl +pkg_radierl_name = radierl +pkg_radierl_description = RADIUS protocol stack implemented in Erlang. +pkg_radierl_homepage = https://github.com/vances/radierl +pkg_radierl_fetch = git +pkg_radierl_repo = https://github.com/vances/radierl +pkg_radierl_commit = master + +PACKAGES += rafter +pkg_rafter_name = rafter +pkg_rafter_description = An Erlang library application which implements the Raft consensus protocol +pkg_rafter_homepage = https://github.com/andrewjstone/rafter +pkg_rafter_fetch = git +pkg_rafter_repo = https://github.com/andrewjstone/rafter +pkg_rafter_commit = master + +PACKAGES += ranch +pkg_ranch_name = ranch +pkg_ranch_description = Socket acceptor pool for TCP protocols. +pkg_ranch_homepage = http://ninenines.eu +pkg_ranch_fetch = git +pkg_ranch_repo = https://github.com/ninenines/ranch +pkg_ranch_commit = 1.1.0 + +PACKAGES += rbeacon +pkg_rbeacon_name = rbeacon +pkg_rbeacon_description = LAN discovery and presence in Erlang. +pkg_rbeacon_homepage = https://github.com/refuge/rbeacon +pkg_rbeacon_fetch = git +pkg_rbeacon_repo = https://github.com/refuge/rbeacon +pkg_rbeacon_commit = master + +PACKAGES += rebar +pkg_rebar_name = rebar +pkg_rebar_description = Erlang build tool that makes it easy to compile and test Erlang applications, port drivers and releases. +pkg_rebar_homepage = http://www.rebar3.org +pkg_rebar_fetch = git +pkg_rebar_repo = https://github.com/rebar/rebar3 +pkg_rebar_commit = master + +PACKAGES += rebus +pkg_rebus_name = rebus +pkg_rebus_description = A stupid simple, internal, pub/sub event bus written in- and for Erlang. +pkg_rebus_homepage = https://github.com/olle/rebus +pkg_rebus_fetch = git +pkg_rebus_repo = https://github.com/olle/rebus +pkg_rebus_commit = master + +PACKAGES += rec2json +pkg_rec2json_name = rec2json +pkg_rec2json_description = Compile erlang record definitions into modules to convert them to/from json easily. +pkg_rec2json_homepage = https://github.com/lordnull/rec2json +pkg_rec2json_fetch = git +pkg_rec2json_repo = https://github.com/lordnull/rec2json +pkg_rec2json_commit = master + +PACKAGES += recon +pkg_recon_name = recon +pkg_recon_description = Collection of functions and scripts to debug Erlang in production. +pkg_recon_homepage = https://github.com/ferd/recon +pkg_recon_fetch = git +pkg_recon_repo = https://github.com/ferd/recon +pkg_recon_commit = 2.2.1 + +PACKAGES += record_info +pkg_record_info_name = record_info +pkg_record_info_description = Convert between record and proplist +pkg_record_info_homepage = https://github.com/bipthelin/erlang-record_info +pkg_record_info_fetch = git +pkg_record_info_repo = https://github.com/bipthelin/erlang-record_info +pkg_record_info_commit = master + +PACKAGES += redgrid +pkg_redgrid_name = redgrid +pkg_redgrid_description = automatic Erlang node discovery via redis +pkg_redgrid_homepage = https://github.com/jkvor/redgrid +pkg_redgrid_fetch = git +pkg_redgrid_repo = https://github.com/jkvor/redgrid +pkg_redgrid_commit = master + +PACKAGES += redo +pkg_redo_name = redo +pkg_redo_description = pipelined erlang redis client +pkg_redo_homepage = https://github.com/jkvor/redo +pkg_redo_fetch = git +pkg_redo_repo = https://github.com/jkvor/redo +pkg_redo_commit = master + +PACKAGES += reload_mk +pkg_reload_mk_name = reload_mk +pkg_reload_mk_description = Live reload plugin for erlang.mk. +pkg_reload_mk_homepage = https://github.com/bullno1/reload.mk +pkg_reload_mk_fetch = git +pkg_reload_mk_repo = https://github.com/bullno1/reload.mk +pkg_reload_mk_commit = master + +PACKAGES += reltool_util +pkg_reltool_util_name = reltool_util +pkg_reltool_util_description = Erlang reltool utility functionality application +pkg_reltool_util_homepage = https://github.com/okeuday/reltool_util +pkg_reltool_util_fetch = git +pkg_reltool_util_repo = https://github.com/okeuday/reltool_util +pkg_reltool_util_commit = master + +PACKAGES += relx +pkg_relx_name = relx +pkg_relx_description = Sane, simple release creation for Erlang +pkg_relx_homepage = https://github.com/erlware/relx +pkg_relx_fetch = git +pkg_relx_repo = https://github.com/erlware/relx +pkg_relx_commit = master + +PACKAGES += resource_discovery +pkg_resource_discovery_name = resource_discovery +pkg_resource_discovery_description = An application used to dynamically discover resources present in an Erlang node cluster. +pkg_resource_discovery_homepage = http://erlware.org/ +pkg_resource_discovery_fetch = git +pkg_resource_discovery_repo = https://github.com/erlware/resource_discovery +pkg_resource_discovery_commit = master + +PACKAGES += restc +pkg_restc_name = restc +pkg_restc_description = Erlang Rest Client +pkg_restc_homepage = https://github.com/kivra/restclient +pkg_restc_fetch = git +pkg_restc_repo = https://github.com/kivra/restclient +pkg_restc_commit = master + +PACKAGES += rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_name = rfc4627_jsonrpc +pkg_rfc4627_jsonrpc_description = Erlang RFC4627 (JSON) codec and JSON-RPC server implementation. +pkg_rfc4627_jsonrpc_homepage = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_fetch = git +pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627 +pkg_rfc4627_jsonrpc_commit = master + +PACKAGES += riak_control +pkg_riak_control_name = riak_control +pkg_riak_control_description = Webmachine-based administration interface for Riak. +pkg_riak_control_homepage = https://github.com/basho/riak_control +pkg_riak_control_fetch = git +pkg_riak_control_repo = https://github.com/basho/riak_control +pkg_riak_control_commit = master + +PACKAGES += riak_core +pkg_riak_core_name = riak_core +pkg_riak_core_description = Distributed systems infrastructure used by Riak. +pkg_riak_core_homepage = https://github.com/basho/riak_core +pkg_riak_core_fetch = git +pkg_riak_core_repo = https://github.com/basho/riak_core +pkg_riak_core_commit = master + +PACKAGES += riak_dt +pkg_riak_dt_name = riak_dt +pkg_riak_dt_description = Convergent replicated datatypes in Erlang +pkg_riak_dt_homepage = https://github.com/basho/riak_dt +pkg_riak_dt_fetch = git +pkg_riak_dt_repo = https://github.com/basho/riak_dt +pkg_riak_dt_commit = master + +PACKAGES += riak_ensemble +pkg_riak_ensemble_name = riak_ensemble +pkg_riak_ensemble_description = Multi-Paxos framework in Erlang +pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_fetch = git +pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble +pkg_riak_ensemble_commit = master + +PACKAGES += riak_kv +pkg_riak_kv_name = riak_kv +pkg_riak_kv_description = Riak Key/Value Store +pkg_riak_kv_homepage = https://github.com/basho/riak_kv +pkg_riak_kv_fetch = git +pkg_riak_kv_repo = https://github.com/basho/riak_kv +pkg_riak_kv_commit = master + +PACKAGES += riak_pg +pkg_riak_pg_name = riak_pg +pkg_riak_pg_description = Distributed process groups with riak_core. +pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_fetch = git +pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg +pkg_riak_pg_commit = master + +PACKAGES += riak_pipe +pkg_riak_pipe_name = riak_pipe +pkg_riak_pipe_description = Riak Pipelines +pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe +pkg_riak_pipe_fetch = git +pkg_riak_pipe_repo = https://github.com/basho/riak_pipe +pkg_riak_pipe_commit = master + +PACKAGES += riak_sysmon +pkg_riak_sysmon_name = riak_sysmon +pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages +pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_fetch = git +pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon +pkg_riak_sysmon_commit = master + +PACKAGES += riak_test +pkg_riak_test_name = riak_test +pkg_riak_test_description = I'm in your cluster, testing your riaks +pkg_riak_test_homepage = https://github.com/basho/riak_test +pkg_riak_test_fetch = git +pkg_riak_test_repo = https://github.com/basho/riak_test +pkg_riak_test_commit = master + +PACKAGES += riakc +pkg_riakc_name = riakc +pkg_riakc_description = Erlang clients for Riak. +pkg_riakc_homepage = https://github.com/basho/riak-erlang-client +pkg_riakc_fetch = git +pkg_riakc_repo = https://github.com/basho/riak-erlang-client +pkg_riakc_commit = master + +PACKAGES += riakhttpc +pkg_riakhttpc_name = riakhttpc +pkg_riakhttpc_description = Riak Erlang client using the HTTP interface +pkg_riakhttpc_homepage = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_fetch = git +pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client +pkg_riakhttpc_commit = master + +PACKAGES += riaknostic +pkg_riaknostic_name = riaknostic +pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap +pkg_riaknostic_homepage = https://github.com/basho/riaknostic +pkg_riaknostic_fetch = git +pkg_riaknostic_repo = https://github.com/basho/riaknostic +pkg_riaknostic_commit = master + +PACKAGES += riakpool +pkg_riakpool_name = riakpool +pkg_riakpool_description = erlang riak client pool +pkg_riakpool_homepage = https://github.com/dweldon/riakpool +pkg_riakpool_fetch = git +pkg_riakpool_repo = https://github.com/dweldon/riakpool +pkg_riakpool_commit = master + +PACKAGES += rivus_cep +pkg_rivus_cep_name = rivus_cep +pkg_rivus_cep_description = Complex event processing in Erlang +pkg_rivus_cep_homepage = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_fetch = git +pkg_rivus_cep_repo = https://github.com/vascokk/rivus_cep +pkg_rivus_cep_commit = master + +PACKAGES += rlimit +pkg_rlimit_name = rlimit +pkg_rlimit_description = Magnus Klaar's rate limiter code from etorrent +pkg_rlimit_homepage = https://github.com/jlouis/rlimit +pkg_rlimit_fetch = git +pkg_rlimit_repo = https://github.com/jlouis/rlimit +pkg_rlimit_commit = master + +PACKAGES += safetyvalve +pkg_safetyvalve_name = safetyvalve +pkg_safetyvalve_description = A safety valve for your erlang node +pkg_safetyvalve_homepage = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_fetch = git +pkg_safetyvalve_repo = https://github.com/jlouis/safetyvalve +pkg_safetyvalve_commit = master + +PACKAGES += seestar +pkg_seestar_name = seestar +pkg_seestar_description = The Erlang client for Cassandra 1.2+ binary protocol +pkg_seestar_homepage = https://github.com/iamaleksey/seestar +pkg_seestar_fetch = git +pkg_seestar_repo = https://github.com/iamaleksey/seestar +pkg_seestar_commit = master + +PACKAGES += service +pkg_service_name = service +pkg_service_description = A minimal Erlang behavior for creating CloudI internal services +pkg_service_homepage = http://cloudi.org/ +pkg_service_fetch = git +pkg_service_repo = https://github.com/CloudI/service +pkg_service_commit = master + +PACKAGES += setup +pkg_setup_name = setup +pkg_setup_description = Generic setup utility for Erlang-based systems +pkg_setup_homepage = https://github.com/uwiger/setup +pkg_setup_fetch = git +pkg_setup_repo = https://github.com/uwiger/setup +pkg_setup_commit = master + +PACKAGES += sext +pkg_sext_name = sext +pkg_sext_description = Sortable Erlang Term Serialization +pkg_sext_homepage = https://github.com/uwiger/sext +pkg_sext_fetch = git +pkg_sext_repo = https://github.com/uwiger/sext +pkg_sext_commit = master + +PACKAGES += sfmt +pkg_sfmt_name = sfmt +pkg_sfmt_description = SFMT pseudo random number generator for Erlang. +pkg_sfmt_homepage = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_fetch = git +pkg_sfmt_repo = https://github.com/jj1bdx/sfmt-erlang +pkg_sfmt_commit = master + +PACKAGES += sgte +pkg_sgte_name = sgte +pkg_sgte_description = A simple Erlang Template Engine +pkg_sgte_homepage = https://github.com/filippo/sgte +pkg_sgte_fetch = git +pkg_sgte_repo = https://github.com/filippo/sgte +pkg_sgte_commit = master + +PACKAGES += sheriff +pkg_sheriff_name = sheriff +pkg_sheriff_description = Parse transform for type based validation. +pkg_sheriff_homepage = http://ninenines.eu +pkg_sheriff_fetch = git +pkg_sheriff_repo = https://github.com/extend/sheriff +pkg_sheriff_commit = master + +PACKAGES += shotgun +pkg_shotgun_name = shotgun +pkg_shotgun_description = better than just a gun +pkg_shotgun_homepage = https://github.com/inaka/shotgun +pkg_shotgun_fetch = git +pkg_shotgun_repo = https://github.com/inaka/shotgun +pkg_shotgun_commit = 0.1.0 + +PACKAGES += sidejob +pkg_sidejob_name = sidejob +pkg_sidejob_description = Parallel worker and capacity limiting library for Erlang +pkg_sidejob_homepage = https://github.com/basho/sidejob +pkg_sidejob_fetch = git +pkg_sidejob_repo = https://github.com/basho/sidejob +pkg_sidejob_commit = master + +PACKAGES += sieve +pkg_sieve_name = sieve +pkg_sieve_description = sieve is a simple TCP routing proxy (layer 7) in erlang +pkg_sieve_homepage = https://github.com/benoitc/sieve +pkg_sieve_fetch = git +pkg_sieve_repo = https://github.com/benoitc/sieve +pkg_sieve_commit = master + +PACKAGES += sighandler +pkg_sighandler_name = sighandler +pkg_sighandler_description = Handle UNIX signals in Er lang +pkg_sighandler_homepage = https://github.com/jkingsbery/sighandler +pkg_sighandler_fetch = git +pkg_sighandler_repo = https://github.com/jkingsbery/sighandler +pkg_sighandler_commit = master + +PACKAGES += simhash +pkg_simhash_name = simhash +pkg_simhash_description = Simhashing for Erlang -- hashing algorithm to find near-duplicates in binary data. +pkg_simhash_homepage = https://github.com/ferd/simhash +pkg_simhash_fetch = git +pkg_simhash_repo = https://github.com/ferd/simhash +pkg_simhash_commit = master + +PACKAGES += simple_bridge +pkg_simple_bridge_name = simple_bridge +pkg_simple_bridge_description = A simple, standardized interface library to Erlang HTTP Servers. +pkg_simple_bridge_homepage = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_fetch = git +pkg_simple_bridge_repo = https://github.com/nitrogen/simple_bridge +pkg_simple_bridge_commit = master + +PACKAGES += simple_oauth2 +pkg_simple_oauth2_name = simple_oauth2 +pkg_simple_oauth2_description = Simple erlang OAuth2 client module for any http server framework (Google, Facebook, Yandex, Vkontakte are preconfigured) +pkg_simple_oauth2_homepage = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_fetch = git +pkg_simple_oauth2_repo = https://github.com/virtan/simple_oauth2 +pkg_simple_oauth2_commit = master + +PACKAGES += skel +pkg_skel_name = skel +pkg_skel_description = A Streaming Process-based Skeleton Library for Erlang +pkg_skel_homepage = https://github.com/ParaPhrase/skel +pkg_skel_fetch = git +pkg_skel_repo = https://github.com/ParaPhrase/skel +pkg_skel_commit = master + +PACKAGES += smother +pkg_smother_name = smother +pkg_smother_description = Extended code coverage metrics for Erlang. +pkg_smother_homepage = https://ramsay-t.github.io/Smother/ +pkg_smother_fetch = git +pkg_smother_repo = https://github.com/ramsay-t/Smother +pkg_smother_commit = master + +PACKAGES += social +pkg_social_name = social +pkg_social_description = Cowboy handler for social login via OAuth2 providers +pkg_social_homepage = https://github.com/dvv/social +pkg_social_fetch = git +pkg_social_repo = https://github.com/dvv/social +pkg_social_commit = master + +PACKAGES += spapi_router +pkg_spapi_router_name = spapi_router +pkg_spapi_router_description = Partially-connected Erlang clustering +pkg_spapi_router_homepage = https://github.com/spilgames/spapi-router +pkg_spapi_router_fetch = git +pkg_spapi_router_repo = https://github.com/spilgames/spapi-router +pkg_spapi_router_commit = master + +PACKAGES += sqerl +pkg_sqerl_name = sqerl +pkg_sqerl_description = An Erlang-flavoured SQL DSL +pkg_sqerl_homepage = https://github.com/hairyhum/sqerl +pkg_sqerl_fetch = git +pkg_sqerl_repo = https://github.com/hairyhum/sqerl +pkg_sqerl_commit = master + +PACKAGES += srly +pkg_srly_name = srly +pkg_srly_description = Native Erlang Unix serial interface +pkg_srly_homepage = https://github.com/msantos/srly +pkg_srly_fetch = git +pkg_srly_repo = https://github.com/msantos/srly +pkg_srly_commit = master + +PACKAGES += sshrpc +pkg_sshrpc_name = sshrpc +pkg_sshrpc_description = Erlang SSH RPC module (experimental) +pkg_sshrpc_homepage = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_fetch = git +pkg_sshrpc_repo = https://github.com/jj1bdx/sshrpc +pkg_sshrpc_commit = master + +PACKAGES += stable +pkg_stable_name = stable +pkg_stable_description = Library of assorted helpers for Cowboy web server. +pkg_stable_homepage = https://github.com/dvv/stable +pkg_stable_fetch = git +pkg_stable_repo = https://github.com/dvv/stable +pkg_stable_commit = master + +PACKAGES += statebox +pkg_statebox_name = statebox +pkg_statebox_description = Erlang state monad with merge/conflict-resolution capabilities. Useful for Riak. +pkg_statebox_homepage = https://github.com/mochi/statebox +pkg_statebox_fetch = git +pkg_statebox_repo = https://github.com/mochi/statebox +pkg_statebox_commit = master + +PACKAGES += statebox_riak +pkg_statebox_riak_name = statebox_riak +pkg_statebox_riak_description = Convenience library that makes it easier to use statebox with riak, extracted from best practices in our production code at Mochi Media. +pkg_statebox_riak_homepage = https://github.com/mochi/statebox_riak +pkg_statebox_riak_fetch = git +pkg_statebox_riak_repo = https://github.com/mochi/statebox_riak +pkg_statebox_riak_commit = master + +PACKAGES += statman +pkg_statman_name = statman +pkg_statman_description = Efficiently collect massive volumes of metrics inside the Erlang VM +pkg_statman_homepage = https://github.com/knutin/statman +pkg_statman_fetch = git +pkg_statman_repo = https://github.com/knutin/statman +pkg_statman_commit = master + +PACKAGES += statsderl +pkg_statsderl_name = statsderl +pkg_statsderl_description = StatsD client (erlang) +pkg_statsderl_homepage = https://github.com/lpgauth/statsderl +pkg_statsderl_fetch = git +pkg_statsderl_repo = https://github.com/lpgauth/statsderl +pkg_statsderl_commit = master + +PACKAGES += stdinout_pool +pkg_stdinout_pool_name = stdinout_pool +pkg_stdinout_pool_description = stdinout_pool : stuff goes in, stuff goes out. there's never any miscommunication. +pkg_stdinout_pool_homepage = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_fetch = git +pkg_stdinout_pool_repo = https://github.com/mattsta/erlang-stdinout-pool +pkg_stdinout_pool_commit = master + +PACKAGES += stockdb +pkg_stockdb_name = stockdb +pkg_stockdb_description = Database for storing Stock Exchange quotes in erlang +pkg_stockdb_homepage = https://github.com/maxlapshin/stockdb +pkg_stockdb_fetch = git +pkg_stockdb_repo = https://github.com/maxlapshin/stockdb +pkg_stockdb_commit = master + +PACKAGES += stripe +pkg_stripe_name = stripe +pkg_stripe_description = Erlang interface to the stripe.com API +pkg_stripe_homepage = https://github.com/mattsta/stripe-erlang +pkg_stripe_fetch = git +pkg_stripe_repo = https://github.com/mattsta/stripe-erlang +pkg_stripe_commit = v1 + +PACKAGES += surrogate +pkg_surrogate_name = surrogate +pkg_surrogate_description = Proxy server written in erlang. Supports reverse proxy load balancing and forward proxy with http (including CONNECT), socks4, socks5, and transparent proxy modes. +pkg_surrogate_homepage = https://github.com/skruger/Surrogate +pkg_surrogate_fetch = git +pkg_surrogate_repo = https://github.com/skruger/Surrogate +pkg_surrogate_commit = master + +PACKAGES += swab +pkg_swab_name = swab +pkg_swab_description = General purpose buffer handling module +pkg_swab_homepage = https://github.com/crownedgrouse/swab +pkg_swab_fetch = git +pkg_swab_repo = https://github.com/crownedgrouse/swab +pkg_swab_commit = master + +PACKAGES += swarm +pkg_swarm_name = swarm +pkg_swarm_description = Fast and simple acceptor pool for Erlang +pkg_swarm_homepage = https://github.com/jeremey/swarm +pkg_swarm_fetch = git +pkg_swarm_repo = https://github.com/jeremey/swarm +pkg_swarm_commit = master + +PACKAGES += switchboard +pkg_switchboard_name = switchboard +pkg_switchboard_description = A framework for processing email using worker plugins. +pkg_switchboard_homepage = https://github.com/thusfresh/switchboard +pkg_switchboard_fetch = git +pkg_switchboard_repo = https://github.com/thusfresh/switchboard +pkg_switchboard_commit = master + +PACKAGES += syn +pkg_syn_name = syn +pkg_syn_description = A global process registry for Erlang. +pkg_syn_homepage = https://github.com/ostinelli/syn +pkg_syn_fetch = git +pkg_syn_repo = https://github.com/ostinelli/syn +pkg_syn_commit = master + +PACKAGES += sync +pkg_sync_name = sync +pkg_sync_description = On-the-fly recompiling and reloading in Erlang. +pkg_sync_homepage = https://github.com/rustyio/sync +pkg_sync_fetch = git +pkg_sync_repo = https://github.com/rustyio/sync +pkg_sync_commit = master + +PACKAGES += syntaxerl +pkg_syntaxerl_name = syntaxerl +pkg_syntaxerl_description = Syntax checker for Erlang +pkg_syntaxerl_homepage = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_fetch = git +pkg_syntaxerl_repo = https://github.com/ten0s/syntaxerl +pkg_syntaxerl_commit = master + +PACKAGES += syslog +pkg_syslog_name = syslog +pkg_syslog_description = Erlang port driver for interacting with syslog via syslog(3) +pkg_syslog_homepage = https://github.com/Vagabond/erlang-syslog +pkg_syslog_fetch = git +pkg_syslog_repo = https://github.com/Vagabond/erlang-syslog +pkg_syslog_commit = master + +PACKAGES += taskforce +pkg_taskforce_name = taskforce +pkg_taskforce_description = Erlang worker pools for controlled parallelisation of arbitrary tasks. +pkg_taskforce_homepage = https://github.com/g-andrade/taskforce +pkg_taskforce_fetch = git +pkg_taskforce_repo = https://github.com/g-andrade/taskforce +pkg_taskforce_commit = master + +PACKAGES += tddreloader +pkg_tddreloader_name = tddreloader +pkg_tddreloader_description = Shell utility for recompiling, reloading, and testing code as it changes +pkg_tddreloader_homepage = https://github.com/version2beta/tddreloader +pkg_tddreloader_fetch = git +pkg_tddreloader_repo = https://github.com/version2beta/tddreloader +pkg_tddreloader_commit = master + +PACKAGES += tempo +pkg_tempo_name = tempo +pkg_tempo_description = NIF-based date and time parsing and formatting for Erlang. +pkg_tempo_homepage = https://github.com/selectel/tempo +pkg_tempo_fetch = git +pkg_tempo_repo = https://github.com/selectel/tempo +pkg_tempo_commit = master + +PACKAGES += ticktick +pkg_ticktick_name = ticktick +pkg_ticktick_description = Ticktick is an id generator for message service. +pkg_ticktick_homepage = https://github.com/ericliang/ticktick +pkg_ticktick_fetch = git +pkg_ticktick_repo = https://github.com/ericliang/ticktick +pkg_ticktick_commit = master + +PACKAGES += tinymq +pkg_tinymq_name = tinymq +pkg_tinymq_description = TinyMQ - a diminutive, in-memory message queue +pkg_tinymq_homepage = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_fetch = git +pkg_tinymq_repo = https://github.com/ChicagoBoss/tinymq +pkg_tinymq_commit = master + +PACKAGES += tinymt +pkg_tinymt_name = tinymt +pkg_tinymt_description = TinyMT pseudo random number generator for Erlang. +pkg_tinymt_homepage = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_fetch = git +pkg_tinymt_repo = https://github.com/jj1bdx/tinymt-erlang +pkg_tinymt_commit = master + +PACKAGES += tirerl +pkg_tirerl_name = tirerl +pkg_tirerl_description = Erlang interface to Elastic Search +pkg_tirerl_homepage = https://github.com/inaka/tirerl +pkg_tirerl_fetch = git +pkg_tirerl_repo = https://github.com/inaka/tirerl +pkg_tirerl_commit = master + +PACKAGES += traffic_tools +pkg_traffic_tools_name = traffic_tools +pkg_traffic_tools_description = Simple traffic limiting library +pkg_traffic_tools_homepage = https://github.com/systra/traffic_tools +pkg_traffic_tools_fetch = git +pkg_traffic_tools_repo = https://github.com/systra/traffic_tools +pkg_traffic_tools_commit = master + +PACKAGES += trails +pkg_trails_name = trails +pkg_trails_description = A couple of improvements over Cowboy Routes +pkg_trails_homepage = http://inaka.github.io/cowboy-trails/ +pkg_trails_fetch = git +pkg_trails_repo = https://github.com/inaka/cowboy-trails +pkg_trails_commit = master + +PACKAGES += trane +pkg_trane_name = trane +pkg_trane_description = SAX style broken HTML parser in Erlang +pkg_trane_homepage = https://github.com/massemanet/trane +pkg_trane_fetch = git +pkg_trane_repo = https://github.com/massemanet/trane +pkg_trane_commit = master + +PACKAGES += transit +pkg_transit_name = transit +pkg_transit_description = transit format for erlang +pkg_transit_homepage = https://github.com/isaiah/transit-erlang +pkg_transit_fetch = git +pkg_transit_repo = https://github.com/isaiah/transit-erlang +pkg_transit_commit = master + +PACKAGES += trie +pkg_trie_name = trie +pkg_trie_description = Erlang Trie Implementation +pkg_trie_homepage = https://github.com/okeuday/trie +pkg_trie_fetch = git +pkg_trie_repo = https://github.com/okeuday/trie +pkg_trie_commit = master + +PACKAGES += triq +pkg_triq_name = triq +pkg_triq_description = Trifork QuickCheck +pkg_triq_homepage = https://github.com/krestenkrab/triq +pkg_triq_fetch = git +pkg_triq_repo = https://github.com/krestenkrab/triq +pkg_triq_commit = master + +PACKAGES += tunctl +pkg_tunctl_name = tunctl +pkg_tunctl_description = Erlang TUN/TAP interface +pkg_tunctl_homepage = https://github.com/msantos/tunctl +pkg_tunctl_fetch = git +pkg_tunctl_repo = https://github.com/msantos/tunctl +pkg_tunctl_commit = master + +PACKAGES += twerl +pkg_twerl_name = twerl +pkg_twerl_description = Erlang client for the Twitter Streaming API +pkg_twerl_homepage = https://github.com/lucaspiller/twerl +pkg_twerl_fetch = git +pkg_twerl_repo = https://github.com/lucaspiller/twerl +pkg_twerl_commit = oauth + +PACKAGES += twitter_erlang +pkg_twitter_erlang_name = twitter_erlang +pkg_twitter_erlang_description = An Erlang twitter client +pkg_twitter_erlang_homepage = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_fetch = git +pkg_twitter_erlang_repo = https://github.com/ngerakines/erlang_twitter +pkg_twitter_erlang_commit = master + +PACKAGES += ucol_nif +pkg_ucol_nif_name = ucol_nif +pkg_ucol_nif_description = ICU based collation Erlang module +pkg_ucol_nif_homepage = https://github.com/refuge/ucol_nif +pkg_ucol_nif_fetch = git +pkg_ucol_nif_repo = https://github.com/refuge/ucol_nif +pkg_ucol_nif_commit = master + +PACKAGES += unicorn +pkg_unicorn_name = unicorn +pkg_unicorn_description = Generic configuration server +pkg_unicorn_homepage = https://github.com/shizzard/unicorn +pkg_unicorn_fetch = git +pkg_unicorn_repo = https://github.com/shizzard/unicorn +pkg_unicorn_commit = 0.3.0 + +PACKAGES += unsplit +pkg_unsplit_name = unsplit +pkg_unsplit_description = Resolves conflicts in Mnesia after network splits +pkg_unsplit_homepage = https://github.com/uwiger/unsplit +pkg_unsplit_fetch = git +pkg_unsplit_repo = https://github.com/uwiger/unsplit +pkg_unsplit_commit = master + +PACKAGES += uuid +pkg_uuid_name = uuid +pkg_uuid_description = Erlang UUID Implementation +pkg_uuid_homepage = https://github.com/okeuday/uuid +pkg_uuid_fetch = git +pkg_uuid_repo = https://github.com/okeuday/uuid +pkg_uuid_commit = v1.4.0 + +PACKAGES += ux +pkg_ux_name = ux +pkg_ux_description = Unicode eXtention for Erlang (Strings, Collation) +pkg_ux_homepage = https://github.com/erlang-unicode/ux +pkg_ux_fetch = git +pkg_ux_repo = https://github.com/erlang-unicode/ux +pkg_ux_commit = master + +PACKAGES += vert +pkg_vert_name = vert +pkg_vert_description = Erlang binding to libvirt virtualization API +pkg_vert_homepage = https://github.com/msantos/erlang-libvirt +pkg_vert_fetch = git +pkg_vert_repo = https://github.com/msantos/erlang-libvirt +pkg_vert_commit = master + +PACKAGES += verx +pkg_verx_name = verx +pkg_verx_description = Erlang implementation of the libvirtd remote protocol +pkg_verx_homepage = https://github.com/msantos/verx +pkg_verx_fetch = git +pkg_verx_repo = https://github.com/msantos/verx +pkg_verx_commit = master + +PACKAGES += vmq_acl +pkg_vmq_acl_name = vmq_acl +pkg_vmq_acl_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_acl_homepage = https://verne.mq/ +pkg_vmq_acl_fetch = git +pkg_vmq_acl_repo = https://github.com/erlio/vmq_acl +pkg_vmq_acl_commit = master + +PACKAGES += vmq_bridge +pkg_vmq_bridge_name = vmq_bridge +pkg_vmq_bridge_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_bridge_homepage = https://verne.mq/ +pkg_vmq_bridge_fetch = git +pkg_vmq_bridge_repo = https://github.com/erlio/vmq_bridge +pkg_vmq_bridge_commit = master + +PACKAGES += vmq_graphite +pkg_vmq_graphite_name = vmq_graphite +pkg_vmq_graphite_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_graphite_homepage = https://verne.mq/ +pkg_vmq_graphite_fetch = git +pkg_vmq_graphite_repo = https://github.com/erlio/vmq_graphite +pkg_vmq_graphite_commit = master + +PACKAGES += vmq_passwd +pkg_vmq_passwd_name = vmq_passwd +pkg_vmq_passwd_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_passwd_homepage = https://verne.mq/ +pkg_vmq_passwd_fetch = git +pkg_vmq_passwd_repo = https://github.com/erlio/vmq_passwd +pkg_vmq_passwd_commit = master + +PACKAGES += vmq_server +pkg_vmq_server_name = vmq_server +pkg_vmq_server_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_server_homepage = https://verne.mq/ +pkg_vmq_server_fetch = git +pkg_vmq_server_repo = https://github.com/erlio/vmq_server +pkg_vmq_server_commit = master + +PACKAGES += vmq_snmp +pkg_vmq_snmp_name = vmq_snmp +pkg_vmq_snmp_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_snmp_homepage = https://verne.mq/ +pkg_vmq_snmp_fetch = git +pkg_vmq_snmp_repo = https://github.com/erlio/vmq_snmp +pkg_vmq_snmp_commit = master + +PACKAGES += vmq_systree +pkg_vmq_systree_name = vmq_systree +pkg_vmq_systree_description = Component of VerneMQ: A distributed MQTT message broker +pkg_vmq_systree_homepage = https://verne.mq/ +pkg_vmq_systree_fetch = git +pkg_vmq_systree_repo = https://github.com/erlio/vmq_systree +pkg_vmq_systree_commit = master + +PACKAGES += vmstats +pkg_vmstats_name = vmstats +pkg_vmstats_description = tiny Erlang app that works in conjunction with statsderl in order to generate information on the Erlang VM for graphite logs. +pkg_vmstats_homepage = https://github.com/ferd/vmstats +pkg_vmstats_fetch = git +pkg_vmstats_repo = https://github.com/ferd/vmstats +pkg_vmstats_commit = master + +PACKAGES += walrus +pkg_walrus_name = walrus +pkg_walrus_description = Walrus - Mustache-like Templating +pkg_walrus_homepage = https://github.com/devinus/walrus +pkg_walrus_fetch = git +pkg_walrus_repo = https://github.com/devinus/walrus +pkg_walrus_commit = master + +PACKAGES += webmachine +pkg_webmachine_name = webmachine +pkg_webmachine_description = A REST-based system for building web applications. +pkg_webmachine_homepage = https://github.com/basho/webmachine +pkg_webmachine_fetch = git +pkg_webmachine_repo = https://github.com/basho/webmachine +pkg_webmachine_commit = master + +PACKAGES += websocket_client +pkg_websocket_client_name = websocket_client +pkg_websocket_client_description = Erlang websocket client (ws and wss supported) +pkg_websocket_client_homepage = https://github.com/jeremyong/websocket_client +pkg_websocket_client_fetch = git +pkg_websocket_client_repo = https://github.com/jeremyong/websocket_client +pkg_websocket_client_commit = master + +PACKAGES += worker_pool +pkg_worker_pool_name = worker_pool +pkg_worker_pool_description = a simple erlang worker pool +pkg_worker_pool_homepage = https://github.com/inaka/worker_pool +pkg_worker_pool_fetch = git +pkg_worker_pool_repo = https://github.com/inaka/worker_pool +pkg_worker_pool_commit = 1.0.3 + +PACKAGES += wrangler +pkg_wrangler_name = wrangler +pkg_wrangler_description = Import of the Wrangler svn repository. +pkg_wrangler_homepage = http://www.cs.kent.ac.uk/projects/wrangler/Home.html +pkg_wrangler_fetch = git +pkg_wrangler_repo = https://github.com/RefactoringTools/wrangler +pkg_wrangler_commit = master + +PACKAGES += wsock +pkg_wsock_name = wsock +pkg_wsock_description = Erlang library to build WebSocket clients and servers +pkg_wsock_homepage = https://github.com/madtrick/wsock +pkg_wsock_fetch = git +pkg_wsock_repo = https://github.com/madtrick/wsock +pkg_wsock_commit = master + +PACKAGES += xhttpc +pkg_xhttpc_name = xhttpc +pkg_xhttpc_description = Extensible HTTP Client for Erlang +pkg_xhttpc_homepage = https://github.com/seriyps/xhttpc +pkg_xhttpc_fetch = git +pkg_xhttpc_repo = https://github.com/seriyps/xhttpc +pkg_xhttpc_commit = master + +PACKAGES += xref_runner +pkg_xref_runner_name = xref_runner +pkg_xref_runner_description = Erlang Xref Runner (inspired in rebar xref) +pkg_xref_runner_homepage = https://github.com/inaka/xref_runner +pkg_xref_runner_fetch = git +pkg_xref_runner_repo = https://github.com/inaka/xref_runner +pkg_xref_runner_commit = 0.2.0 + +PACKAGES += yamerl +pkg_yamerl_name = yamerl +pkg_yamerl_description = YAML 1.2 parser in pure Erlang +pkg_yamerl_homepage = https://github.com/yakaz/yamerl +pkg_yamerl_fetch = git +pkg_yamerl_repo = https://github.com/yakaz/yamerl +pkg_yamerl_commit = master + +PACKAGES += yamler +pkg_yamler_name = yamler +pkg_yamler_description = libyaml-based yaml loader for Erlang +pkg_yamler_homepage = https://github.com/goertzenator/yamler +pkg_yamler_fetch = git +pkg_yamler_repo = https://github.com/goertzenator/yamler +pkg_yamler_commit = master + +PACKAGES += yaws +pkg_yaws_name = yaws +pkg_yaws_description = Yaws webserver +pkg_yaws_homepage = http://yaws.hyber.org +pkg_yaws_fetch = git +pkg_yaws_repo = https://github.com/klacke/yaws +pkg_yaws_commit = master + +PACKAGES += zab_engine +pkg_zab_engine_name = zab_engine +pkg_zab_engine_description = zab propotocol implement by erlang +pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_fetch = git +pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine +pkg_zab_engine_commit = master + +PACKAGES += zeta +pkg_zeta_name = zeta +pkg_zeta_description = HTTP access log parser in Erlang +pkg_zeta_homepage = https://github.com/s1n4/zeta +pkg_zeta_fetch = git +pkg_zeta_repo = https://github.com/s1n4/zeta +pkg_zeta_commit = + +PACKAGES += zippers +pkg_zippers_name = zippers +pkg_zippers_description = A library for functional zipper data structures in Erlang. Read more on zippers +pkg_zippers_homepage = https://github.com/ferd/zippers +pkg_zippers_fetch = git +pkg_zippers_repo = https://github.com/ferd/zippers +pkg_zippers_commit = master + +PACKAGES += zlists +pkg_zlists_name = zlists +pkg_zlists_description = Erlang lazy lists library. +pkg_zlists_homepage = https://github.com/vjache/erlang-zlists +pkg_zlists_fetch = git +pkg_zlists_repo = https://github.com/vjache/erlang-zlists +pkg_zlists_commit = master + +PACKAGES += zraft_lib +pkg_zraft_lib_name = zraft_lib +pkg_zraft_lib_description = Erlang raft consensus protocol implementation +pkg_zraft_lib_homepage = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_fetch = git +pkg_zraft_lib_repo = https://github.com/dreyk/zraft_lib +pkg_zraft_lib_commit = master + +PACKAGES += zucchini +pkg_zucchini_name = zucchini +pkg_zucchini_description = An Erlang INI parser +pkg_zucchini_homepage = https://github.com/devinus/zucchini +pkg_zucchini_fetch = git +pkg_zucchini_repo = https://github.com/devinus/zucchini +pkg_zucchini_commit = master + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: search + +define pkg_print + $(verbose) printf "%s\n" \ + $(if $(call core_eq,$(1),$(pkg_$(1)_name)),,"Pkg name: $(1)") \ + "App name: $(pkg_$(1)_name)" \ + "Description: $(pkg_$(1)_description)" \ + "Home page: $(pkg_$(1)_homepage)" \ + "Fetch with: $(pkg_$(1)_fetch)" \ + "Repository: $(pkg_$(1)_repo)" \ + "Commit: $(pkg_$(1)_commit)" \ + "" + +endef + +search: +ifdef q + $(foreach p,$(PACKAGES), \ + $(if $(findstring $(call core_lc,$(q)),$(call core_lc,$(pkg_$(p)_name) $(pkg_$(p)_description))), \ + $(call pkg_print,$(p)))) +else + $(foreach p,$(PACKAGES),$(call pkg_print,$(p))) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-deps + +# Configuration. + +ifdef OTP_DEPS +$(warning The variable OTP_DEPS is deprecated in favor of LOCAL_DEPS.) +endif + +IGNORE_DEPS ?= +export IGNORE_DEPS + +APPS_DIR ?= $(CURDIR)/apps +export APPS_DIR + +DEPS_DIR ?= $(CURDIR)/deps +export DEPS_DIR + +REBAR_DEPS_DIR = $(DEPS_DIR) +export REBAR_DEPS_DIR + +dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) +dep_repo = $(patsubst git://github.com/%,https://github.com/%, \ + $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))) +dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) + +ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) +ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) + +ifeq ($(filter $(APPS_DIR) $(DEPS_DIR),$(subst :, ,$(ERL_LIBS))),) +ifeq ($(ERL_LIBS),) + ERL_LIBS = $(APPS_DIR):$(DEPS_DIR) +else + ERL_LIBS := $(ERL_LIBS):$(APPS_DIR):$(DEPS_DIR) +endif +endif +export ERL_LIBS + +export NO_AUTOPATCH + +# Verbosity. + +dep_verbose_0 = @echo " DEP " $(1); +dep_verbose_2 = set -x; +dep_verbose = $(dep_verbose_$(V)) + +# Core targets. + +ifneq ($(SKIP_DEPS),) +deps:: +else +deps:: $(ALL_DEPS_DIRS) +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \ + if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \ + :; \ + else \ + echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ + if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ + $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \ + else \ + echo "Error: No Makefile to build dependency $$dep."; \ + exit 2; \ + fi \ + fi \ + done +endif + +# Deps related targets. + +# @todo rename GNUmakefile and makefile into Makefile first, if they exist +# While Makefile file could be GNUmakefile or makefile, +# in practice only Makefile is needed so far. +define dep_autopatch + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \ + $(call dep_autopatch2,$(1)); \ + elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \ + $(call dep_autopatch2,$(1)); \ + else \ + if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + $(call dep_autopatch_erlang_mk,$(1)); \ + else \ + $(call erlang,$(call dep_autopatch_app.erl,$(1))); \ + fi \ + fi \ + else \ + if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ + $(call dep_autopatch_noop,$(1)); \ + else \ + $(call dep_autopatch2,$(1)); \ + fi \ + fi +endef + +define dep_autopatch2 + $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ + if [ -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \ + $(call dep_autopatch_fetch_rebar); \ + $(call dep_autopatch_rebar,$(1)); \ + else \ + $(call dep_autopatch_gen,$(1)); \ + fi +endef + +define dep_autopatch_noop + printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile +endef + +# Overwrite erlang.mk with the current file by default. +ifeq ($(NO_AUTOPATCH_ERLANG_MK),) +define dep_autopatch_erlang_mk + echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \ + > $(DEPS_DIR)/$1/erlang.mk +endef +else +define dep_autopatch_erlang_mk + : +endef +endif + +define dep_autopatch_gen + printf "%s\n" \ + "ERLC_OPTS = +debug_info" \ + "include ../../erlang.mk" > $(DEPS_DIR)/$(1)/Makefile +endef + +define dep_autopatch_fetch_rebar + mkdir -p $(ERLANG_MK_TMP); \ + if [ ! -d $(ERLANG_MK_TMP)/rebar ]; then \ + git clone -q -n -- https://github.com/rebar/rebar $(ERLANG_MK_TMP)/rebar; \ + cd $(ERLANG_MK_TMP)/rebar; \ + git checkout -q 791db716b5a3a7671e0b351f95ddf24b848ee173; \ + $(MAKE); \ + cd -; \ + fi +endef + +define dep_autopatch_rebar + if [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ + mv $(DEPS_DIR)/$(1)/Makefile $(DEPS_DIR)/$(1)/Makefile.orig.mk; \ + fi; \ + $(call erlang,$(call dep_autopatch_rebar.erl,$(1))); \ + rm -f $(DEPS_DIR)/$(1)/ebin/$(1).app +endef + +define dep_autopatch_rebar.erl + application:load(rebar), + application:set_env(rebar, log_level, debug), + Conf1 = case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config)") of + {ok, Conf0} -> Conf0; + _ -> [] + end, + {Conf, OsEnv} = fun() -> + case filelib:is_file("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)") of + false -> {Conf1, []}; + true -> + Bindings0 = erl_eval:new_bindings(), + Bindings1 = erl_eval:add_binding('CONFIG', Conf1, Bindings0), + Bindings = erl_eval:add_binding('SCRIPT', "$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings1), + Before = os:getenv(), + {ok, Conf2} = file:script("$(call core_native_path,$(DEPS_DIR)/$1/rebar.config.script)", Bindings), + {Conf2, lists:foldl(fun(E, Acc) -> lists:delete(E, Acc) end, os:getenv(), Before)} + end + end(), + Write = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/Makefile)", Text, [append]) + end, + Escape = fun (Text) -> + re:replace(Text, "\\\\$$", "\$$$$", [global, {return, list}]) + end, + Write("IGNORE_DEPS += edown eper eunit_formatters meck node_package " + "rebar_lock_deps_plugin rebar_vsn_plugin reltool_util\n"), + Write("C_SRC_DIR = /path/do/not/exist\n"), + Write("C_SRC_TYPE = rebar\n"), + Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"), + Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]), + fun() -> + Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), + case lists:keyfind(erl_opts, 1, Conf) of + false -> ok; + {_, ErlOpts} -> + lists:foreach(fun + ({d, D}) -> + Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + ({i, I}) -> + Write(["ERLC_OPTS += -I ", I, "\n"]); + ({platform_define, Regex, D}) -> + case rebar_utils:is_arch(Regex) of + true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); + false -> ok + end; + ({parse_transform, PT}) -> + Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n"); + (_) -> ok + end, ErlOpts) + end, + Write("\n") + end(), + fun() -> + File = case lists:keyfind(deps, 1, Conf) of + false -> []; + {_, Deps} -> + [begin case case Dep of + {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}}; + {N, S} when is_tuple(S) -> {N, S}; + {N, _, S} -> {N, S}; + {N, _, S, _} -> {N, S}; + _ -> false + end of + false -> ok; + {Name, Source} -> + {Method, Repo, Commit} = case Source of + {hex, V} -> {hex, V, undefined}; + {git, R} -> {git, R, master}; + {M, R, {branch, C}} -> {M, R, C}; + {M, R, {ref, C}} -> {M, R, C}; + {M, R, {tag, C}} -> {M, R, C}; + {M, R, C} -> {M, R, C} + end, + Write(io_lib:format("DEPS += ~s\ndep_~s = ~s ~s ~s~n", [Name, Name, Method, Repo, Commit])) + end end || Dep <- Deps] + end + end(), + fun() -> + case lists:keyfind(erl_first_files, 1, Conf) of + false -> ok; + {_, Files} -> + Names = [[" ", case lists:reverse(F) of + "lre." ++ Elif -> lists:reverse(Elif); + Elif -> lists:reverse(Elif) + end] || "src/" ++ F <- Files], + Write(io_lib:format("COMPILE_FIRST +=~s\n", [Names])) + end + end(), + FindFirst = fun(F, Fd) -> + case io:parse_erl_form(Fd, undefined) of + {ok, {attribute, _, compile, {parse_transform, PT}}, _} -> + [PT, F(F, Fd)]; + {ok, {attribute, _, compile, CompileOpts}, _} when is_list(CompileOpts) -> + case proplists:get_value(parse_transform, CompileOpts) of + undefined -> [F(F, Fd)]; + PT -> [PT, F(F, Fd)] + end; + {ok, {attribute, _, include, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end + end; + {ok, {attribute, _, include_lib, "$(1)/include/" ++ Hrl}, _} -> + {ok, HrlFd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]), + [F(F, HrlFd), F(F, Fd)]; + {ok, {attribute, _, include_lib, Hrl}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/include/)" ++ Hrl, [read]) of + {ok, HrlFd} -> [F(F, HrlFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {ok, {attribute, _, import, {Imp, _}}, _} -> + case file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(Imp) ++ ".erl", [read]) of + {ok, ImpFd} -> [Imp, F(F, ImpFd), F(F, Fd)]; + _ -> [F(F, Fd)] + end; + {eof, _} -> + file:close(Fd), + []; + _ -> + F(F, Fd) + end + end, + fun() -> + ErlFiles = filelib:wildcard("$(call core_native_path,$(DEPS_DIR)/$1/src/)*.erl"), + First0 = lists:usort(lists:flatten([begin + {ok, Fd} = file:open(F, [read]), + FindFirst(FindFirst, Fd) + end || F <- ErlFiles])), + First = lists:flatten([begin + {ok, Fd} = file:open("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", [read]), + FindFirst(FindFirst, Fd) + end || M <- First0, lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)]) ++ First0, + Write(["COMPILE_FIRST +=", [[" ", atom_to_list(M)] || M <- First, + lists:member("$(call core_native_path,$(DEPS_DIR)/$1/src/)" ++ atom_to_list(M) ++ ".erl", ErlFiles)], "\n"]) + end(), + Write("\n\nrebar_dep: preprocess pre-deps deps pre-app app\n"), + Write("\npreprocess::\n"), + Write("\npre-deps::\n"), + Write("\npre-app::\n"), + PatchHook = fun(Cmd) -> + case Cmd of + "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); + "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + "gmake " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); + _ -> Escape(Cmd) + end + end, + fun() -> + case lists:keyfind(pre_hooks, 1, Conf) of + false -> ok; + {_, Hooks} -> + [case H of + {'get-deps', Cmd} -> + Write("\npre-deps::\n\t" ++ PatchHook(Cmd) ++ "\n"); + {compile, Cmd} -> + Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + {Regex, compile, Cmd} -> + case rebar_utils:is_arch(Regex) of + true -> Write("\npre-app::\n\tCC=$$\(CC) " ++ PatchHook(Cmd) ++ "\n"); + false -> ok + end; + _ -> ok + end || H <- Hooks] + end + end(), + ShellToMk = fun(V) -> + re:replace(re:replace(V, "(\\\\$$)(\\\\w*)", "\\\\1(\\\\2)", [global]), + "-Werror\\\\b", "", [{return, list}, global]) + end, + PortSpecs = fun() -> + case lists:keyfind(port_specs, 1, Conf) of + false -> + case filelib:is_dir("$(call core_native_path,$(DEPS_DIR)/$1/c_src)") of + false -> []; + true -> + [{"priv/" ++ proplists:get_value(so_name, Conf, "$(1)_drv.so"), + proplists:get_value(port_sources, Conf, ["c_src/*.c"]), []}] + end; + {_, Specs} -> + lists:flatten([case S of + {Output, Input} -> {ShellToMk(Output), Input, []}; + {Regex, Output, Input} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, []}; + false -> [] + end; + {Regex, Output, Input, [{env, Env}]} -> + case rebar_utils:is_arch(Regex) of + true -> {ShellToMk(Output), Input, Env}; + false -> [] + end + end || S <- Specs]) + end + end(), + PortSpecWrite = fun (Text) -> + file:write_file("$(call core_native_path,$(DEPS_DIR)/$1/c_src/Makefile.erlang.mk)", Text, [append]) + end, + case PortSpecs of + [] -> ok; + _ -> + Write("\npre-app::\n\t$$\(MAKE) -f c_src/Makefile.erlang.mk\n"), + PortSpecWrite(io_lib:format("ERL_CFLAGS = -finline-functions -Wall -fPIC -I ~s/erts-~s/include -I ~s\n", + [code:root_dir(), erlang:system_info(version), code:lib_dir(erl_interface, include)])), + PortSpecWrite(io_lib:format("ERL_LDFLAGS = -L ~s -lerl_interface -lei\n", + [code:lib_dir(erl_interface, lib)])), + [PortSpecWrite(["\n", E, "\n"]) || E <- OsEnv], + FilterEnv = fun(Env) -> + lists:flatten([case E of + {_, _} -> E; + {Regex, K, V} -> + case rebar_utils:is_arch(Regex) of + true -> {K, V}; + false -> [] + end + end || E <- Env]) + end, + MergeEnv = fun(Env) -> + lists:foldl(fun ({K, V}, Acc) -> + case lists:keyfind(K, 1, Acc) of + false -> [{K, rebar_utils:expand_env_variable(V, K, "")}|Acc]; + {_, V0} -> [{K, rebar_utils:expand_env_variable(V, K, V0)}|Acc] + end + end, [], Env) + end, + PortEnv = case lists:keyfind(port_env, 1, Conf) of + false -> []; + {_, PortEnv0} -> FilterEnv(PortEnv0) + end, + PortSpec = fun ({Output, Input0, Env}) -> + filelib:ensure_dir("$(call core_native_path,$(DEPS_DIR)/$1/)" ++ Output), + Input = [[" ", I] || I <- Input0], + PortSpecWrite([ + [["\n", K, " = ", ShellToMk(V)] || {K, V} <- lists:reverse(MergeEnv(PortEnv))], + case $(PLATFORM) of + darwin -> "\n\nLDFLAGS += -flat_namespace -undefined suppress"; + _ -> "" + end, + "\n\nall:: ", Output, "\n\n", + "%.o: %.c\n\t$$\(CC) -c -o $$\@ $$\< $$\(CFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.C\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cc\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + "%.o: %.cpp\n\t$$\(CXX) -c -o $$\@ $$\< $$\(CXXFLAGS) $$\(ERL_CFLAGS) $$\(DRV_CFLAGS) $$\(EXE_CFLAGS)\n\n", + [[Output, ": ", K, " = ", ShellToMk(V), "\n"] || {K, V} <- lists:reverse(MergeEnv(FilterEnv(Env)))], + Output, ": $$\(foreach ext,.c .C .cc .cpp,", + "$$\(patsubst %$$\(ext),%.o,$$\(filter %$$\(ext),$$\(wildcard", Input, "))))\n", + "\t$$\(CC) -o $$\@ $$\? $$\(LDFLAGS) $$\(ERL_LDFLAGS) $$\(DRV_LDFLAGS) $$\(EXE_LDFLAGS)", + case filename:extension(Output) of + [] -> "\n"; + _ -> " -shared\n" + end]) + end, + [PortSpec(S) || S <- PortSpecs] + end, + Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"), + RunPlugin = fun(Plugin, Step) -> + case erlang:function_exported(Plugin, Step, 2) of + false -> ok; + true -> + c:cd("$(call core_native_path,$(DEPS_DIR)/$1/)"), + Ret = Plugin:Step({config, "", Conf, dict:new(), dict:new(), dict:new(), + dict:store(base_dir, "", dict:new())}, undefined), + io:format("rebar plugin ~p step ~p ret ~p~n", [Plugin, Step, Ret]) + end + end, + fun() -> + case lists:keyfind(plugins, 1, Conf) of + false -> ok; + {_, Plugins} -> + [begin + case lists:keyfind(deps, 1, Conf) of + false -> ok; + {_, Deps} -> + case lists:keyfind(P, 1, Deps) of + false -> ok; + _ -> + Path = "$(call core_native_path,$(DEPS_DIR)/)" ++ atom_to_list(P), + io:format("~s", [os:cmd("$(MAKE) -C $(call core_native_path,$(DEPS_DIR)/$1) " ++ Path)]), + io:format("~s", [os:cmd("$(MAKE) -C " ++ Path ++ " IS_DEP=1")]), + code:add_patha(Path ++ "/ebin") + end + end + end || P <- Plugins], + [case code:load_file(P) of + {module, P} -> ok; + _ -> + case lists:keyfind(plugin_dir, 1, Conf) of + false -> ok; + {_, PluginsDir} -> + ErlFile = "$(call core_native_path,$(DEPS_DIR)/$1/)" ++ PluginsDir ++ "/" ++ atom_to_list(P) ++ ".erl", + {ok, P, Bin} = compile:file(ErlFile, [binary]), + {module, P} = code:load_binary(P, ErlFile, Bin) + end + end || P <- Plugins], + [RunPlugin(P, preprocess) || P <- Plugins], + [RunPlugin(P, pre_compile) || P <- Plugins], + [RunPlugin(P, compile) || P <- Plugins] + end + end(), + halt() +endef + +define dep_autopatch_app.erl + UpdateModules = fun(App) -> + case filelib:is_regular(App) of + false -> ok; + true -> + {ok, [{application, '$(1)', L0}]} = file:consult(App), + Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true, + fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []), + L = lists:keystore(modules, 1, L0, {modules, Mods}), + ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}])) + end + end, + UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"), + halt() +endef + +define dep_autopatch_appsrc.erl + AppSrcOut = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", + AppSrcIn = case filelib:is_regular(AppSrcOut) of false -> "$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"; true -> AppSrcOut end, + case filelib:is_regular(AppSrcIn) of + false -> ok; + true -> + {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), + L1 = lists:keystore(modules, 1, L0, {modules, []}), + L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end, + L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end, + ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])), + case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end + end, + halt() +endef + +define dep_fetch_git + git clone -q -n -- $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && git checkout -q $(call dep_commit,$(1)); +endef + +define dep_fetch_git-submodule + git submodule update --init -- $(DEPS_DIR)/$1; +endef + +define dep_fetch_hg + hg clone -q -U $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); \ + cd $(DEPS_DIR)/$(call dep_name,$(1)) && hg update -q $(call dep_commit,$(1)); +endef + +define dep_fetch_svn + svn checkout -q $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_cp + cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); +endef + +define dep_fetch_hex.erl + ssl:start(), + inets:start(), + {ok, {{_, 200, _}, _, Body}} = httpc:request(get, + {"https://s3.amazonaws.com/s3.hex.pm/tarballs/$(1)-$(2).tar", []}, + [], [{body_format, binary}]), + {ok, Files} = erl_tar:extract({binary, Body}, [memory]), + {_, Source} = lists:keyfind("contents.tar.gz", 1, Files), + ok = erl_tar:extract({binary, Source}, [{cwd, "$(call core_native_path,$(DEPS_DIR)/$1)"}, compressed]), + halt() +endef + +# Hex only has a package version. No need to look in the Erlang.mk packages. +define dep_fetch_hex + $(call erlang,$(call dep_fetch_hex.erl,$(1),$(strip $(word 2,$(dep_$(1)))))); +endef + +define dep_fetch_fail + echo "Error: Unknown or invalid dependency: $(1)." >&2; \ + exit 78; +endef + +# Kept for compatibility purposes with older Erlang.mk configuration. +define dep_fetch_legacy + $(warning WARNING: '$(1)' dependency configuration uses deprecated format.) \ + git clone -q -n -- $(word 1,$(dep_$(1))) $(DEPS_DIR)/$(1); \ + cd $(DEPS_DIR)/$(1) && git checkout -q $(if $(word 2,$(dep_$(1))),$(word 2,$(dep_$(1))),master); +endef + +define dep_fetch + $(if $(dep_$(1)), \ + $(if $(dep_fetch_$(word 1,$(dep_$(1)))), \ + $(word 1,$(dep_$(1))), \ + $(if $(IS_DEP),legacy,fail)), \ + $(if $(filter $(1),$(PACKAGES)), \ + $(pkg_$(1)_fetch), \ + fail)) +endef + +define dep_target +$(DEPS_DIR)/$(call dep_name,$1): + $(eval DEP_NAME := $(call dep_name,$1)) + $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) + $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ + echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \ + exit 17; \ + fi + $(verbose) mkdir -p $(DEPS_DIR) + $(dep_verbose) $(call dep_fetch_$(strip $(call dep_fetch,$1)),$1) + $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure.ac -o -f $(DEPS_DIR)/$(DEP_NAME)/configure.in ]; then \ + echo " AUTO " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && autoreconf -Wall -vif -I m4; \ + fi + - $(verbose) if [ -f $(DEPS_DIR)/$(DEP_NAME)/configure ]; then \ + echo " CONF " $(DEP_STR); \ + cd $(DEPS_DIR)/$(DEP_NAME) && ./configure; \ + fi +ifeq ($(filter $(1),$(NO_AUTOPATCH)),) + $(verbose) if [ "$(1)" = "amqp_client" -a "$(RABBITMQ_CLIENT_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi; \ + if [ ! -d $(DEPS_DIR)/rabbitmq-server ]; then \ + echo " PATCH Downloading rabbitmq-server"; \ + git clone https://github.com/rabbitmq/rabbitmq-server.git $(DEPS_DIR)/rabbitmq-server; \ + fi; \ + ln -s $(DEPS_DIR)/amqp_client/deps/rabbit_common-0.0.0 $(DEPS_DIR)/rabbit_common; \ + elif [ "$(1)" = "rabbit" -a "$(RABBITMQ_SERVER_PATCH)" ]; then \ + if [ ! -d $(DEPS_DIR)/rabbitmq-codegen ]; then \ + echo " PATCH Downloading rabbitmq-codegen"; \ + git clone https://github.com/rabbitmq/rabbitmq-codegen.git $(DEPS_DIR)/rabbitmq-codegen; \ + fi \ + else \ + $$(call dep_autopatch,$(DEP_NAME)) \ + fi +endif +endef + +$(foreach dep,$(BUILD_DEPS) $(DEPS),$(eval $(call dep_target,$(dep)))) + +ifndef IS_APP +clean:: clean-apps + +clean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \ + done + +distclean:: distclean-apps + +distclean-apps: + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \ + done +endif + +ifndef SKIP_DEPS +distclean:: distclean-deps + +distclean-deps: + $(gen_verbose) rm -rf $(DEPS_DIR) +endif + +# Forward-declare variables used in core/deps-tools.mk. This is required +# in case plugins use them. + +ERLANG_MK_RECURSIVE_DEPS_LIST = $(ERLANG_MK_TMP)/list-deps.log +ERLANG_MK_RECURSIVE_DOC_DEPS_LIST = $(ERLANG_MK_TMP)/list-doc-deps.log +ERLANG_MK_RECURSIVE_REL_DEPS_LIST = $(ERLANG_MK_TMP)/list-rel-deps.log +ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/list-test-deps.log +ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/list-shell-deps.log + +# External plugins. + +DEP_PLUGINS ?= + +define core_dep_plugin +-include $(DEPS_DIR)/$(1) + +$(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; +endef + +$(foreach p,$(DEP_PLUGINS),\ + $(eval $(if $(findstring /,$p),\ + $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ + $(call core_dep_plugin,$p/plugins.mk,$p)))) + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Configuration. + +DTL_FULL_PATH ?= +DTL_PATH ?= templates/ +DTL_SUFFIX ?= _dtl + +# Verbosity. + +dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); +dtl_verbose = $(dtl_verbose_$(V)) + +# Core targets. + +define erlydtl_compile.erl + [begin + Module0 = case "$(strip $(DTL_FULL_PATH))" of + "" -> + filename:basename(F, ".dtl"); + _ -> + "$(DTL_PATH)" ++ F2 = filename:rootname(F, ".dtl"), + re:replace(F2, "/", "_", [{return, list}, global]) + end, + Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), + case erlydtl:compile(F, Module, [{out_dir, "ebin/"}, return_errors, {doc_root, "templates"}]) of + ok -> ok; + {ok, _} -> ok + end + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) + +DTL_FILES = $(sort $(call core_find,$(DTL_PATH),*.dtl)) + +ifdef DTL_FULL_PATH +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(subst /,_,$(DTL_FILES:$(DTL_PATH)%=%)))) +else +BEAM_FILES += $(addprefix ebin/,$(patsubst %.dtl,%_dtl.beam,$(notdir $(DTL_FILES)))) +endif + +ifneq ($(words $(DTL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(DTL_FILES); \ + fi + @touch $@ + +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl +endif + +ebin/$(PROJECT).app:: $(DTL_FILES) + $(if $(strip $?),\ + $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$?,-pa ebin/ $(DEPS_DIR)/erlydtl/ebin/))) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Verbosity. + +proto_verbose_0 = @echo " PROTO " $(filter %.proto,$(?F)); +proto_verbose = $(proto_verbose_$(V)) + +# Core targets. + +define compile_proto + $(verbose) mkdir -p ebin/ include/ + $(proto_verbose) $(call erlang,$(call compile_proto.erl,$(1))) + $(proto_verbose) erlc +debug_info -o ebin/ ebin/*.erl + $(verbose) rm ebin/*.erl +endef + +define compile_proto.erl + [begin + Dir = filename:dirname(filename:dirname(F)), + protobuffs_compile:generate_source(F, + [{output_include_dir, Dir ++ "/include"}, + {output_src_dir, Dir ++ "/ebin"}]) + end || F <- string:tokens("$(1)", " ")], + halt(). +endef + +ifneq ($(wildcard src/),) +ebin/$(PROJECT).app:: $(sort $(call core_find,src/,*.proto)) + $(if $(strip $?),$(call compile_proto,$?)) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-app + +# Configuration. + +ERLC_OPTS ?= -Werror +debug_info +warn_export_vars +warn_shadow_vars \ + +warn_obsolete_guard # +bin_opt_info +warn_export_all +warn_missing_spec +COMPILE_FIRST ?= +COMPILE_FIRST_PATHS = $(addprefix src/,$(addsuffix .erl,$(COMPILE_FIRST))) +ERLC_EXCLUDE ?= +ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE))) + +ERLC_MIB_OPTS ?= +COMPILE_MIB_FIRST ?= +COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST))) + +# Verbosity. + +app_verbose_0 = @echo " APP " $(PROJECT); +app_verbose_2 = set -x; +app_verbose = $(app_verbose_$(V)) + +appsrc_verbose_0 = @echo " APP " $(PROJECT).app.src; +appsrc_verbose_2 = set -x; +appsrc_verbose = $(appsrc_verbose_$(V)) + +makedep_verbose_0 = @echo " DEPEND" $(PROJECT).d; +makedep_verbose_2 = set -x; +makedep_verbose = $(makedep_verbose_$(V)) + +erlc_verbose_0 = @echo " ERLC " $(filter-out $(patsubst %,%.erl,$(ERLC_EXCLUDE)),\ + $(filter %.erl %.core,$(?F))); +erlc_verbose_2 = set -x; +erlc_verbose = $(erlc_verbose_$(V)) + +xyrl_verbose_0 = @echo " XYRL " $(filter %.xrl %.yrl,$(?F)); +xyrl_verbose_2 = set -x; +xyrl_verbose = $(xyrl_verbose_$(V)) + +asn1_verbose_0 = @echo " ASN1 " $(filter %.asn1,$(?F)); +asn1_verbose_2 = set -x; +asn1_verbose = $(asn1_verbose_$(V)) + +mib_verbose_0 = @echo " MIB " $(filter %.bin %.mib,$(?F)); +mib_verbose_2 = set -x; +mib_verbose = $(mib_verbose_$(V)) + +ifneq ($(wildcard src/),) + +# Targets. + +ifeq ($(wildcard ebin/test),) +app:: deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +else +app:: clean deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build +endif + +ifeq ($(wildcard src/$(PROJECT)_app.erl),) +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, []}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]} +]}. +endef +else +define app_file +{application, $(PROJECT), [ + {description, "$(PROJECT_DESCRIPTION)"}, + {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), + {id$(comma)$(space)"$(1)"}$(comma)) + {modules, [$(call comma_list,$(2))]}, + {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]}, + {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}, + {mod, {$(PROJECT)_app, []}} +]}. +endef +endif + +app-build: ebin/$(PROJECT).app + $(verbose) : + +# Source files. + +ERL_FILES = $(sort $(call core_find,src/,*.erl)) +CORE_FILES = $(sort $(call core_find,src/,*.core)) + +# ASN.1 files. + +ifneq ($(wildcard asn1/),) +ASN1_FILES = $(sort $(call core_find,asn1/,*.asn1)) +ERL_FILES += $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +define compile_asn1 + $(verbose) mkdir -p include/ + $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1) + $(verbose) mv asn1/*.erl src/ + $(verbose) mv asn1/*.hrl include/ + $(verbose) mv asn1/*.asn1db include/ +endef + +$(PROJECT).d:: $(ASN1_FILES) + $(if $(strip $?),$(call compile_asn1,$?)) +endif + +# SNMP MIB files. + +ifneq ($(wildcard mibs/),) +MIB_FILES = $(sort $(call core_find,mibs/,*.mib)) + +$(PROJECT).d:: $(COMPILE_MIB_FIRST_PATHS) $(MIB_FILES) + $(verbose) mkdir -p include/ priv/mibs/ + $(mib_verbose) erlc -v $(ERLC_MIB_OPTS) -o priv/mibs/ -I priv/mibs/ $? + $(mib_verbose) erlc -o include/ -- $(addprefix priv/mibs/,$(patsubst %.mib,%.bin,$(notdir $?))) +endif + +# Leex and Yecc files. + +XRL_FILES = $(sort $(call core_find,src/,*.xrl)) +XRL_ERL_FILES = $(addprefix src/,$(patsubst %.xrl,%.erl,$(notdir $(XRL_FILES)))) +ERL_FILES += $(XRL_ERL_FILES) + +YRL_FILES = $(sort $(call core_find,src/,*.yrl)) +YRL_ERL_FILES = $(addprefix src/,$(patsubst %.yrl,%.erl,$(notdir $(YRL_FILES)))) +ERL_FILES += $(YRL_ERL_FILES) + +$(PROJECT).d:: $(XRL_FILES) $(YRL_FILES) + $(if $(strip $?),$(xyrl_verbose) erlc -v -o src/ $?) + +# Erlang and Core Erlang files. + +define makedep.erl + ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")), + Modules = [{filename:basename(F, ".erl"), F} || F <- ErlFiles], + Add = fun (Dep, Acc) -> + case lists:keyfind(atom_to_list(Dep), 1, Modules) of + {_, DepFile} -> [DepFile|Acc]; + false -> Acc + end + end, + AddHd = fun (Dep, Acc) -> + case {Dep, lists:keymember(Dep, 2, Modules)} of + {"src/" ++ _, false} -> [Dep|Acc]; + {"include/" ++ _, false} -> [Dep|Acc]; + _ -> Acc + end + end, + CompileFirst = fun (Deps) -> + First0 = [case filename:extension(D) of + ".erl" -> filename:basename(D, ".erl"); + _ -> [] + end || D <- Deps], + case lists:usort(First0) of + [] -> []; + [[]] -> []; + First -> ["COMPILE_FIRST +=", [[" ", F] || F <- First], "\n"] + end + end, + Depend = [begin + case epp:parse_file(F, ["include/"], []) of + {ok, Forms} -> + Deps = lists:usort(lists:foldl(fun + ({attribute, _, behavior, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, behaviour, Dep}, Acc) -> Add(Dep, Acc); + ({attribute, _, compile, {parse_transform, Dep}}, Acc) -> Add(Dep, Acc); + ({attribute, _, file, {Dep, _}}, Acc) -> AddHd(Dep, Acc); + (_, Acc) -> Acc + end, [], Forms)), + case Deps of + [] -> ""; + _ -> [F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n", CompileFirst(Deps)] + end; + {error, enoent} -> + [] + end + end || F <- ErlFiles], + ok = file:write_file("$(1)", Depend), + halt() +endef + +ifeq ($(if $(NO_MAKEDEP),$(wildcard $(PROJECT).d),),) +$(PROJECT).d:: $(ERL_FILES) $(call core_find,include/,*.hrl) $(MAKEFILE_LIST) + $(makedep_verbose) $(call erlang,$(call makedep.erl,$@)) +endif + +ifneq ($(words $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES)),0) +# Rebuild everything when the Makefile changes. +$(ERLANG_MK_TMP)/last-makefile-change: $(MAKEFILE_LIST) + @mkdir -p $(ERLANG_MK_TMP) + @if test -f $@; then \ + touch $(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES); \ + touch -c $(PROJECT).d; \ + fi + @touch $@ + +$(ERL_FILES) $(CORE_FILES) $(ASN1_FILES) $(MIB_FILES) $(XRL_FILES) $(YRL_FILES):: $(ERLANG_MK_TMP)/last-makefile-change +ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change +endif + +-include $(PROJECT).d + +ebin/$(PROJECT).app:: ebin/ + +ebin/: + $(verbose) mkdir -p ebin/ + +define compile_erl + $(erlc_verbose) erlc -v $(if $(IS_DEP),$(filter-out -Werror,$(ERLC_OPTS)),$(ERLC_OPTS)) -o ebin/ \ + -pa ebin/ -I include/ $(filter-out $(ERLC_EXCLUDE_PATHS),$(COMPILE_FIRST_PATHS) $(1)) +endef + +ebin/$(PROJECT).app:: $(ERL_FILES) $(CORE_FILES) $(wildcard src/$(PROJECT).app.src) + $(eval FILES_TO_COMPILE := $(filter-out src/$(PROJECT).app.src,$?)) + $(if $(strip $(FILES_TO_COMPILE)),$(call compile_erl,$(FILES_TO_COMPILE))) + $(eval GITDESCRIBE := $(shell git describe --dirty --abbrev=7 --tags --always --first-parent 2>/dev/null || true)) + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ + $(filter-out $(ERLC_EXCLUDE_PATHS),$(ERL_FILES) $(CORE_FILES) $(BEAM_FILES))))))) +ifeq ($(wildcard src/$(PROJECT).app.src),) + $(app_verbose) printf "$(subst $(newline),\n,$(subst ",\",$(call app_file,$(GITDESCRIBE),$(MODULES))))" \ + > ebin/$(PROJECT).app +else + $(verbose) if [ -z "$$(grep -E '^[^%]*{\s*modules\s*,' src/$(PROJECT).app.src)" ]; then \ + echo "Empty modules entry not found in $(PROJECT).app.src. Please consult the erlang.mk README for instructions." >&2; \ + exit 1; \ + fi + $(appsrc_verbose) cat src/$(PROJECT).app.src \ + | sed "s/{[[:space:]]*modules[[:space:]]*,[[:space:]]*\[\]}/{modules, \[$(call comma_list,$(MODULES))\]}/" \ + | sed "s/{id,[[:space:]]*\"git\"}/{id, \"$(GITDESCRIBE)\"}/" \ + > ebin/$(PROJECT).app +endif + +clean:: clean-app + +clean-app: + $(gen_verbose) rm -rf $(PROJECT).d ebin/ priv/mibs/ $(XRL_ERL_FILES) $(YRL_ERL_FILES) \ + $(addprefix include/,$(patsubst %.mib,%.hrl,$(notdir $(MIB_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.hrl,$(notdir $(ASN1_FILES)))) \ + $(addprefix include/,$(patsubst %.asn1,%.asn1db,$(notdir $(ASN1_FILES)))) \ + $(addprefix src/,$(patsubst %.asn1,%.erl,$(notdir $(ASN1_FILES)))) + +endif + +# Copyright (c) 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: docs-deps + +# Configuration. + +ALL_DOC_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(DOC_DEPS)) + +# Targets. + +$(foreach dep,$(DOC_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +doc-deps: +else +doc-deps: $(ALL_DOC_DEPS_DIRS) + $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rel-deps + +# Configuration. + +ALL_REL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(REL_DEPS)) + +# Targets. + +$(foreach dep,$(REL_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +rel-deps: +else +rel-deps: $(ALL_REL_DEPS_DIRS) + $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: test-deps test-dir test-build clean-test-dir + +# Configuration. + +TEST_DIR ?= $(CURDIR)/test + +ALL_TEST_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(TEST_DEPS)) + +TEST_ERLC_OPTS ?= +debug_info +warn_export_vars +warn_shadow_vars +warn_obsolete_guard +TEST_ERLC_OPTS += -DTEST=1 + +# Targets. + +$(foreach dep,$(TEST_DEPS),$(eval $(call dep_target,$(dep)))) + +ifneq ($(SKIP_DEPS),) +test-deps: +else +test-deps: $(ALL_TEST_DEPS_DIRS) + $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done +endif + +ifneq ($(wildcard $(TEST_DIR)),) +test-dir: + $(gen_verbose) erlc -v $(TEST_ERLC_OPTS) -I include/ -o $(TEST_DIR) \ + $(call core_find,$(TEST_DIR)/,*.erl) -pa ebin/ +endif + +ifeq ($(wildcard ebin/test),) +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: clean deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" + $(gen_verbose) touch ebin/test +else +test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) +test-build:: deps test-deps $(PROJECT).d + $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" +endif + +clean:: clean-test-dir + +clean-test-dir: +ifneq ($(wildcard $(TEST_DIR)/*.beam),) + $(gen_verbose) rm -f $(TEST_DIR)/*.beam +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: rebar.config + +# We strip out -Werror because we don't want to fail due to +# warnings when used as a dependency. + +compat_prepare_erlc_opts = $(shell echo "$1" | sed 's/, */,/') + +define compat_convert_erlc_opts +$(if $(filter-out -Werror,$1),\ + $(if $(findstring +,$1),\ + $(shell echo $1 | cut -b 2-))) +endef + +define compat_rebar_config +{deps, [$(call comma_list,$(foreach d,$(DEPS),\ + {$(call dep_name,$d),".*",{git,"$(call dep_repo,$d)","$(call dep_commit,$d)"}}))]}. +{erl_opts, [$(call comma_list,$(foreach o,$(call compat_prepare_erlc_opts,$(ERLC_OPTS)),\ + $(call compat_convert_erlc_opts,$o)))]}. +endef + +$(eval _compat_rebar_config = $$(compat_rebar_config)) +$(eval export _compat_rebar_config) + +rebar.config: + $(gen_verbose) echo "$${_compat_rebar_config}" > rebar.config + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: asciidoc asciidoc-guide asciidoc-manual install-asciidoc distclean-asciidoc + +MAN_INSTALL_PATH ?= /usr/local/share/man +MAN_SECTIONS ?= 3 7 + +docs:: asciidoc + +asciidoc: distclean-asciidoc doc-deps asciidoc-guide asciidoc-manual + +ifeq ($(wildcard doc/src/guide/book.asciidoc),) +asciidoc-guide: +else +asciidoc-guide: + a2x -v -f pdf doc/src/guide/book.asciidoc && mv doc/src/guide/book.pdf doc/guide.pdf + a2x -v -f chunked doc/src/guide/book.asciidoc && mv doc/src/guide/book.chunked/ doc/html/ +endif + +ifeq ($(wildcard doc/src/manual/*.asciidoc),) +asciidoc-manual: +else +asciidoc-manual: + for f in doc/src/manual/*.asciidoc ; do \ + a2x -v -f manpage $$f ; \ + done + for s in $(MAN_SECTIONS); do \ + mkdir -p doc/man$$s/ ; \ + mv doc/src/manual/*.$$s doc/man$$s/ ; \ + gzip doc/man$$s/*.$$s ; \ + done + +install-docs:: install-asciidoc + +install-asciidoc: asciidoc-manual + for s in $(MAN_SECTIONS); do \ + mkdir -p $(MAN_INSTALL_PATH)/man$$s/ ; \ + install -g 0 -o 0 -m 0644 doc/man$$s/*.gz $(MAN_INSTALL_PATH)/man$$s/ ; \ + done +endif + +distclean:: distclean-asciidoc + +distclean-asciidoc: + $(gen_verbose) rm -rf doc/html/ doc/guide.pdf doc/man3/ doc/man7/ + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: bootstrap bootstrap-lib bootstrap-rel new list-templates + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Bootstrap targets:" \ + " bootstrap Generate a skeleton of an OTP application" \ + " bootstrap-lib Generate a skeleton of an OTP library" \ + " bootstrap-rel Generate the files needed to build a release" \ + " new-app n=NAME Create a new local OTP application NAME" \ + " new-lib n=NAME Create a new local OTP library NAME" \ + " new t=TPL n=NAME Generate a module NAME based on the template TPL" \ + " new t=T n=N in=APP Generate a module NAME based on the template TPL in APP" \ + " list-templates List available templates" + +# Bootstrap templates. + +define bs_appsrc +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]}, + {mod, {$p_app, []}}, + {env, []} +]}. +endef + +define bs_appsrc_lib +{application, $p, [ + {description, ""}, + {vsn, "0.1.0"}, + {id, "git"}, + {modules, []}, + {registered, []}, + {applications, [ + kernel, + stdlib + ]} +]}. +endef + +ifdef SP +define bs_Makefile +PROJECT = $p +PROJECT_DESCRIPTION = New project +PROJECT_VERSION = 0.0.1 + +# Whitespace to be used when creating files from templates. +SP = $(SP) + +include erlang.mk +endef +else +define bs_Makefile +PROJECT = $p +include erlang.mk +endef +endif + +define bs_apps_Makefile +PROJECT = $p +include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(APPS_DIR)/app)/erlang.mk +endef + +define bs_app +-module($p_app). +-behaviour(application). + +-export([start/2]). +-export([stop/1]). + +start(_Type, _Args) -> + $p_sup:start_link(). + +stop(_State) -> + ok. +endef + +define bs_relx_config +{release, {$p_release, "1"}, [$p]}. +{extended_start_script, true}. +{sys_config, "rel/sys.config"}. +{vm_args, "rel/vm.args"}. +endef + +define bs_sys_config +[ +]. +endef + +define bs_vm_args +-name $p@127.0.0.1 +-setcookie $p +-heart +endef + +# Normal templates. + +define tpl_supervisor +-module($(n)). +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +init([]) -> + Procs = [], + {ok, {{one_for_one, 1, 5}, Procs}}. +endef + +define tpl_gen_server +-module($(n)). +-behaviour(gen_server). + +%% API. +-export([start_link/0]). + +%% gen_server. +-export([init/1]). +-export([handle_call/3]). +-export([handle_cast/2]). +-export([handle_info/2]). +-export([terminate/2]). +-export([code_change/3]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_server:start_link(?MODULE, [], []). + +%% gen_server. + +init([]) -> + {ok, #state{}}. + +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +endef + +define tpl_cowboy_http +-module($(n)). +-behaviour(cowboy_http_handler). + +-export([init/3]). +-export([handle/2]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {ok, Req, #state{}}. + +handle(Req, State=#state{}) -> + {ok, Req2} = cowboy_req:reply(200, Req), + {ok, Req2, State}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_gen_fsm +-module($(n)). +-behaviour(gen_fsm). + +%% API. +-export([start_link/0]). + +%% gen_fsm. +-export([init/1]). +-export([state_name/2]). +-export([handle_event/3]). +-export([state_name/3]). +-export([handle_sync_event/4]). +-export([handle_info/3]). +-export([terminate/3]). +-export([code_change/4]). + +-record(state, { +}). + +%% API. + +-spec start_link() -> {ok, pid()}. +start_link() -> + gen_fsm:start_link(?MODULE, [], []). + +%% gen_fsm. + +init([]) -> + {ok, state_name, #state{}}. + +state_name(_Event, StateData) -> + {next_state, state_name, StateData}. + +handle_event(_Event, StateName, StateData) -> + {next_state, StateName, StateData}. + +state_name(_Event, _From, StateData) -> + {reply, ignored, state_name, StateData}. + +handle_sync_event(_Event, _From, StateName, StateData) -> + {reply, ignored, StateName, StateData}. + +handle_info(_Info, StateName, StateData) -> + {next_state, StateName, StateData}. + +terminate(_Reason, _StateName, _StateData) -> + ok. + +code_change(_OldVsn, StateName, StateData, _Extra) -> + {ok, StateName, StateData}. +endef + +define tpl_cowboy_loop +-module($(n)). +-behaviour(cowboy_loop_handler). + +-export([init/3]). +-export([info/3]). +-export([terminate/3]). + +-record(state, { +}). + +init(_, Req, _Opts) -> + {loop, Req, #state{}, 5000, hibernate}. + +info(_Info, Req, State) -> + {loop, Req, State, hibernate}. + +terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_cowboy_rest +-module($(n)). + +-export([init/3]). +-export([content_types_provided/2]). +-export([get_html/2]). + +init(_, _Req, _Opts) -> + {upgrade, protocol, cowboy_rest}. + +content_types_provided(Req, State) -> + {[{{<<"text">>, <<"html">>, '*'}, get_html}], Req, State}. + +get_html(Req, State) -> + {<<"This is REST!">>, Req, State}. +endef + +define tpl_cowboy_ws +-module($(n)). +-behaviour(cowboy_websocket_handler). + +-export([init/3]). +-export([websocket_init/3]). +-export([websocket_handle/3]). +-export([websocket_info/3]). +-export([websocket_terminate/3]). + +-record(state, { +}). + +init(_, _, _) -> + {upgrade, protocol, cowboy_websocket}. + +websocket_init(_, Req, _Opts) -> + Req2 = cowboy_req:compact(Req), + {ok, Req2, #state{}}. + +websocket_handle({text, Data}, Req, State) -> + {reply, {text, Data}, Req, State}; +websocket_handle({binary, Data}, Req, State) -> + {reply, {binary, Data}, Req, State}; +websocket_handle(_Frame, Req, State) -> + {ok, Req, State}. + +websocket_info(_Info, Req, State) -> + {ok, Req, State}. + +websocket_terminate(_Reason, _Req, _State) -> + ok. +endef + +define tpl_ranch_protocol +-module($(n)). +-behaviour(ranch_protocol). + +-export([start_link/4]). +-export([init/4]). + +-type opts() :: []. +-export_type([opts/0]). + +-record(state, { + socket :: inet:socket(), + transport :: module() +}). + +start_link(Ref, Socket, Transport, Opts) -> + Pid = spawn_link(?MODULE, init, [Ref, Socket, Transport, Opts]), + {ok, Pid}. + +-spec init(ranch:ref(), inet:socket(), module(), opts()) -> ok. +init(Ref, Socket, Transport, _Opts) -> + ok = ranch:accept_ack(Ref), + loop(#state{socket=Socket, transport=Transport}). + +loop(State) -> + loop(State). +endef + +# Plugin-specific targets. + +define render_template + $(verbose) printf -- '$(subst $(newline),\n,$(subst %,%%,$(subst ','\'',$(subst $(tab),$(WS),$(call $(1))))))\n' > $(2) +endef + +ifndef WS +ifdef SP +WS = $(subst a,,a $(wordlist 1,$(SP),a a a a a a a a a a a a a a a a a a a a)) +else +WS = $(tab) +endif +endif + +bootstrap: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(eval n := $(PROJECT)_sup) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc,src/$(PROJECT).app.src) +endif + $(call render_template,bs_app,src/$(PROJECT)_app.erl) + $(call render_template,tpl_supervisor,src/$(PROJECT)_sup.erl) + +bootstrap-lib: +ifneq ($(wildcard src/),) + $(error Error: src/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_Makefile,Makefile) + $(verbose) mkdir src/ +ifdef LEGACY + $(call render_template,bs_appsrc_lib,src/$(PROJECT).app.src) +endif + +bootstrap-rel: +ifneq ($(wildcard relx.config),) + $(error Error: relx.config already exists) +endif +ifneq ($(wildcard rel/),) + $(error Error: rel/ directory already exists) +endif + $(eval p := $(PROJECT)) + $(call render_template,bs_relx_config,relx.config) + $(verbose) mkdir rel/ + $(call render_template,bs_sys_config,rel/sys.config) + $(call render_template,bs_vm_args,rel/vm.args) + +new-app: +ifndef in + $(error Usage: $(MAKE) new-app in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(eval n := $(in)_sup) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc,$(APPS_DIR)/$p/src/$p.app.src) +endif + $(call render_template,bs_app,$(APPS_DIR)/$p/src/$p_app.erl) + $(call render_template,tpl_supervisor,$(APPS_DIR)/$p/src/$p_sup.erl) + +new-lib: +ifndef in + $(error Usage: $(MAKE) new-lib in=APP) +endif +ifneq ($(wildcard $(APPS_DIR)/$in),) + $(error Error: Application $in already exists) +endif + $(eval p := $(in)) + $(verbose) mkdir -p $(APPS_DIR)/$p/src/ + $(call render_template,bs_apps_Makefile,$(APPS_DIR)/$p/Makefile) +ifdef LEGACY + $(call render_template,bs_appsrc_lib,$(APPS_DIR)/$p/src/$p.app.src) +endif + +new: +ifeq ($(wildcard src/)$(in),) + $(error Error: src/ directory does not exist) +endif +ifndef t + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifndef tpl_$(t) + $(error Unknown template) +endif +ifndef n + $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in= +else + $(call render_template,tpl_$(t),src/$(n).erl) +endif + +list-templates: + $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) + +# Copyright (c) 2014-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: clean-c_src distclean-c_src-env + +# Configuration. + +C_SRC_DIR ?= $(CURDIR)/c_src +C_SRC_ENV ?= $(C_SRC_DIR)/env.mk +C_SRC_OUTPUT ?= $(CURDIR)/priv/$(PROJECT).so +C_SRC_TYPE ?= shared + +# System type and C compiler/flags. + +ifeq ($(PLATFORM),darwin) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -arch x86_64 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -arch x86_64 -finline-functions -Wall + LDFLAGS ?= -arch x86_64 -flat_namespace -undefined suppress +else ifeq ($(PLATFORM),freebsd) + CC ?= cc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +else ifeq ($(PLATFORM),linux) + CC ?= gcc + CFLAGS ?= -O3 -std=c99 -finline-functions -Wall -Wmissing-prototypes + CXXFLAGS ?= -O3 -finline-functions -Wall +endif + +CFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) +CXXFLAGS += -fPIC -I $(ERTS_INCLUDE_DIR) -I $(ERL_INTERFACE_INCLUDE_DIR) + +LDLIBS += -L $(ERL_INTERFACE_LIB_DIR) -lerl_interface -lei + +# Verbosity. + +c_verbose_0 = @echo " C " $(?F); +c_verbose = $(c_verbose_$(V)) + +cpp_verbose_0 = @echo " CPP " $(?F); +cpp_verbose = $(cpp_verbose_$(V)) + +link_verbose_0 = @echo " LD " $(@F); +link_verbose = $(link_verbose_$(V)) + +# Targets. + +ifeq ($(wildcard $(C_SRC_DIR)),) +else ifneq ($(wildcard $(C_SRC_DIR)/Makefile),) +app:: app-c_src + +test-build:: app-c_src + +app-c_src: + $(MAKE) -C $(C_SRC_DIR) + +clean:: + $(MAKE) -C $(C_SRC_DIR) clean + +else + +ifeq ($(SOURCES),) +SOURCES := $(sort $(foreach pat,*.c *.C *.cc *.cpp,$(call core_find,$(C_SRC_DIR)/,$(pat)))) +endif +OBJECTS = $(addsuffix .o, $(basename $(SOURCES))) + +COMPILE_C = $(c_verbose) $(CC) $(CFLAGS) $(CPPFLAGS) -c +COMPILE_CPP = $(cpp_verbose) $(CXX) $(CXXFLAGS) $(CPPFLAGS) -c + +app:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +test-build:: $(C_SRC_ENV) $(C_SRC_OUTPUT) + +$(C_SRC_OUTPUT): $(OBJECTS) + $(verbose) mkdir -p priv/ + $(link_verbose) $(CC) $(OBJECTS) \ + $(LDFLAGS) $(if $(filter $(C_SRC_TYPE),shared),-shared) $(LDLIBS) \ + -o $(C_SRC_OUTPUT) + +%.o: %.c + $(COMPILE_C) $(OUTPUT_OPTION) $< + +%.o: %.cc + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.C + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +%.o: %.cpp + $(COMPILE_CPP) $(OUTPUT_OPTION) $< + +clean:: clean-c_src + +clean-c_src: + $(gen_verbose) rm -f $(C_SRC_OUTPUT) $(OBJECTS) + +endif + +ifneq ($(wildcard $(C_SRC_DIR)),) +$(C_SRC_ENV): + $(verbose) $(ERL) -eval "file:write_file(\"$(C_SRC_ENV)\", \ + io_lib:format( \ + \"ERTS_INCLUDE_DIR ?= ~s/erts-~s/include/~n\" \ + \"ERL_INTERFACE_INCLUDE_DIR ?= ~s~n\" \ + \"ERL_INTERFACE_LIB_DIR ?= ~s~n\", \ + [code:root_dir(), erlang:system_info(version), \ + code:lib_dir(erl_interface, include), \ + code:lib_dir(erl_interface, lib)])), \ + halt()." + +distclean:: distclean-c_src-env + +distclean-c_src-env: + $(gen_verbose) rm -f $(C_SRC_ENV) + +-include $(C_SRC_ENV) +endif + +# Templates. + +define bs_c_nif +#include "erl_nif.h" + +static int loads = 0; + +static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info) +{ + /* Initialize private data. */ + *priv_data = NULL; + + loads++; + + return 0; +} + +static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info) +{ + /* Convert the private data to the new version. */ + *priv_data = *old_priv_data; + + loads++; + + return 0; +} + +static void unload(ErlNifEnv* env, void* priv_data) +{ + if (loads == 1) { + /* Destroy the private data. */ + } + + loads--; +} + +static ERL_NIF_TERM hello(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{ + if (enif_is_atom(env, argv[0])) { + return enif_make_tuple2(env, + enif_make_atom(env, "hello"), + argv[0]); + } + + return enif_make_tuple2(env, + enif_make_atom(env, "error"), + enif_make_atom(env, "badarg")); +} + +static ErlNifFunc nif_funcs[] = { + {"hello", 1, hello} +}; + +ERL_NIF_INIT($n, nif_funcs, load, NULL, upgrade, unload) +endef + +define bs_erl_nif +-module($n). + +-export([hello/1]). + +-on_load(on_load/0). +on_load() -> + PrivDir = case code:priv_dir(?MODULE) of + {error, _} -> + AppPath = filename:dirname(filename:dirname(code:which(?MODULE))), + filename:join(AppPath, "priv"); + Path -> + Path + end, + erlang:load_nif(filename:join(PrivDir, atom_to_list(?MODULE)), 0). + +hello(_) -> + erlang:nif_error({not_loaded, ?MODULE}). +endef + +new-nif: +ifneq ($(wildcard $(C_SRC_DIR)/$n.c),) + $(error Error: $(C_SRC_DIR)/$n.c already exists) +endif +ifneq ($(wildcard src/$n.erl),) + $(error Error: src/$n.erl already exists) +endif +ifdef in + $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new-nif n=$n in= +else + $(verbose) mkdir -p $(C_SRC_DIR) src/ + $(call render_template,bs_c_nif,$(C_SRC_DIR)/$n.c) + $(call render_template,bs_erl_nif,src/$n.erl) +endif + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ci ci-setup distclean-kerl + +KERL ?= $(CURDIR)/kerl +export KERL + +KERL_URL ?= https://raw.githubusercontent.com/yrashk/kerl/master/kerl + +OTP_GIT ?= https://github.com/erlang/otp + +CI_INSTALL_DIR ?= $(HOME)/erlang +CI_OTP ?= + +ifeq ($(strip $(CI_OTP)),) +ci:: +else +ci:: $(addprefix ci-,$(CI_OTP)) + +ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP)) + +ci-setup:: + +ci_verbose_0 = @echo " CI " $(1); +ci_verbose = $(ci_verbose_$(V)) + +define ci_target +ci-$(1): $(CI_INSTALL_DIR)/$(1) + $(ci_verbose) \ + PATH="$(CI_INSTALL_DIR)/$(1)/bin:$(PATH)" \ + CI_OTP_RELEASE="$(1)" \ + CT_OPTS="-label $(1)" \ + $(MAKE) clean ci-setup tests +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_target,$(otp)))) + +define ci_otp_target +ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),) +$(CI_INSTALL_DIR)/$(1): $(KERL) + $(KERL) build git $(OTP_GIT) $(1) $(1) + $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1) +endif +endef + +$(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp)))) + +$(KERL): + $(gen_verbose) $(call core_http_get,$(KERL),$(KERL_URL)) + $(verbose) chmod +x $(KERL) + +help:: + $(verbose) printf "%s\n" "" \ + "Continuous Integration targets:" \ + " ci Run '$(MAKE) tests' on all configured Erlang versions." \ + "" \ + "The CI_OTP variable must be defined with the Erlang versions" \ + "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3" + +distclean:: distclean-kerl + +distclean-kerl: + $(gen_verbose) rm -rf $(KERL) +endif + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: ct distclean-ct + +# Configuration. + +CT_OPTS ?= +ifneq ($(wildcard $(TEST_DIR)),) + CT_SUITES ?= $(sort $(subst _SUITE.erl,,$(notdir $(call core_find,$(TEST_DIR)/,*_SUITE.erl)))) +else + CT_SUITES ?= +endif + +# Core targets. + +tests:: ct + +distclean:: distclean-ct + +help:: + $(verbose) printf "%s\n" "" \ + "Common_test targets:" \ + " ct Run all the common_test suites for this project" \ + "" \ + "All your common_test suites have their associated targets." \ + "A suite named http_SUITE can be ran using the ct-http target." + +# Plugin-specific targets. + +CT_RUN = ct_run \ + -no_auto_compile \ + -noinput \ + -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(TEST_DIR) \ + -dir $(TEST_DIR) \ + -logdir $(CURDIR)/logs + +ifeq ($(CT_SUITES),) +ct: +else +ct: test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) +endif + +define ct_suite_target +ct-$(1): test-build + $(verbose) mkdir -p $(CURDIR)/logs/ + $(gen_verbose) $(CT_RUN) -suite $(addsuffix _SUITE,$(1)) $(CT_OPTS) +endef + +$(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) + +distclean-ct: + $(gen_verbose) rm -rf $(CURDIR)/logs/ + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: plt distclean-plt dialyze + +# Configuration. + +DIALYZER_PLT ?= $(CURDIR)/.$(PROJECT).plt +export DIALYZER_PLT + +PLT_APPS ?= +DIALYZER_DIRS ?= --src -r src +DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions \ + -Wunmatched_returns # -Wunderspecs + +# Core targets. + +check:: dialyze + +distclean:: distclean-plt + +help:: + $(verbose) printf "%s\n" "" \ + "Dialyzer targets:" \ + " plt Build a PLT file for this project" \ + " dialyze Analyze the project using Dialyzer" + +# Plugin-specific targets. + +$(DIALYZER_PLT): deps app + $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS) + +plt: $(DIALYZER_PLT) + +distclean-plt: + $(gen_verbose) rm -f $(DIALYZER_PLT) + +ifneq ($(wildcard $(DIALYZER_PLT)),) +dialyze: +else +dialyze: $(DIALYZER_PLT) +endif + $(verbose) dialyzer --no_native $(DIALYZER_DIRS) $(DIALYZER_OPTS) + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: elvis distclean-elvis + +# Configuration. + +ELVIS_CONFIG ?= $(CURDIR)/elvis.config + +ELVIS ?= $(CURDIR)/elvis +export ELVIS + +ELVIS_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis +ELVIS_CONFIG_URL ?= https://github.com/inaka/elvis/releases/download/0.2.5/elvis.config +ELVIS_OPTS ?= + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Elvis targets:" \ + " elvis Run Elvis using the local elvis.config or download the default otherwise" + +distclean:: distclean-elvis + +# Plugin-specific targets. + +$(ELVIS): + $(gen_verbose) $(call core_http_get,$(ELVIS),$(ELVIS_URL)) + $(verbose) chmod +x $(ELVIS) + +$(ELVIS_CONFIG): + $(verbose) $(call core_http_get,$(ELVIS_CONFIG),$(ELVIS_CONFIG_URL)) + +elvis: $(ELVIS) $(ELVIS_CONFIG) + $(verbose) $(ELVIS) rock -c $(ELVIS_CONFIG) $(ELVIS_OPTS) + +distclean-elvis: + $(gen_verbose) rm -rf $(ELVIS) + +# Copyright (c) 2014 Dave Cottlehuber +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: distclean-escript escript + +# Configuration. + +ESCRIPT_NAME ?= $(PROJECT) +ESCRIPT_COMMENT ?= This is an -*- erlang -*- file + +ESCRIPT_BEAMS ?= "ebin/*", "deps/*/ebin/*" +ESCRIPT_SYS_CONFIG ?= "rel/sys.config" +ESCRIPT_EMU_ARGS ?= -pa . \ + -sasl errlog_type error \ + -escript main $(ESCRIPT_NAME) +ESCRIPT_SHEBANG ?= /usr/bin/env escript +ESCRIPT_STATIC ?= "deps/*/priv/**", "priv/**" + +# Core targets. + +distclean:: distclean-escript + +help:: + $(verbose) printf "%s\n" "" \ + "Escript targets:" \ + " escript Build an executable escript archive" \ + +# Plugin-specific targets. + +# Based on https://github.com/synrc/mad/blob/master/src/mad_bundle.erl +# Copyright (c) 2013 Maxim Sokhatsky, Synrc Research Center +# Modified MIT License, https://github.com/synrc/mad/blob/master/LICENSE : +# Software may only be used for the great good and the true happiness of all +# sentient beings. + +define ESCRIPT_RAW +'Read = fun(F) -> {ok, B} = file:read_file(filename:absname(F)), B end,'\ +'Files = fun(L) -> A = lists:concat([filelib:wildcard(X)||X<- L ]),'\ +' [F || F <- A, not filelib:is_dir(F) ] end,'\ +'Squash = fun(L) -> [{filename:basename(F), Read(F) } || F <- L ] end,'\ +'Zip = fun(A, L) -> {ok,{_,Z}} = zip:create(A, L, [{compress,all},memory]), Z end,'\ +'Ez = fun(Escript) ->'\ +' Static = Files([$(ESCRIPT_STATIC)]),'\ +' Beams = Squash(Files([$(ESCRIPT_BEAMS), $(ESCRIPT_SYS_CONFIG)])),'\ +' Archive = Beams ++ [{ "static.gz", Zip("static.gz", Static)}],'\ +' escript:create(Escript, [ $(ESCRIPT_OPTIONS)'\ +' {archive, Archive, [memory]},'\ +' {shebang, "$(ESCRIPT_SHEBANG)"},'\ +' {comment, "$(ESCRIPT_COMMENT)"},'\ +' {emu_args, " $(ESCRIPT_EMU_ARGS)"}'\ +' ]),'\ +' file:change_mode(Escript, 8#755)'\ +'end,'\ +'Ez("$(ESCRIPT_NAME)"),'\ +'halt().' +endef + +ESCRIPT_COMMAND = $(subst ' ',,$(ESCRIPT_RAW)) + +escript:: distclean-escript deps app + $(gen_verbose) $(ERL) -eval $(ESCRIPT_COMMAND) + +distclean-escript: + $(gen_verbose) rm -f $(ESCRIPT_NAME) + +# Copyright (c) 2014, Enrique Fernandez +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: eunit + +# Configuration + +EUNIT_OPTS ?= + +# Core targets. + +tests:: eunit + +help:: + $(verbose) printf "%s\n" "" \ + "EUnit targets:" \ + " eunit Run all the EUnit tests for this project" + +# Plugin-specific targets. + +define eunit.erl + case "$(COVER)" of + "" -> ok; + _ -> + case cover:compile_beam_directory("ebin") of + {error, _} -> halt(1); + _ -> ok + end + end, + case eunit:test([$(call comma_list,$(1))], [$(EUNIT_OPTS)]) of + ok -> ok; + error -> halt(2) + end, + case "$(COVER)" of + "" -> ok; + _ -> + cover:export("eunit.coverdata") + end, + halt() +endef + +EUNIT_EBIN_MODS = $(notdir $(basename $(call core_find,ebin/,*.beam))) +EUNIT_TEST_MODS = $(notdir $(basename $(call core_find,$(TEST_DIR)/,*.beam))) +EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \ + $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),{module,'$(mod)'}) + +eunit: test-build + $(gen_verbose) $(ERL) -pa $(TEST_DIR) $(DEPS_DIR)/*/ebin ebin \ + -eval "$(subst $(newline),,$(subst ",\",$(call eunit.erl,$(EUNIT_MODS))))" + +# Copyright (c) 2013-2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: relx-rel distclean-relx-rel distclean-relx run + +# Configuration. + +RELX ?= $(CURDIR)/relx +RELX_CONFIG ?= $(CURDIR)/relx.config + +RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.5.0/relx +RELX_OPTS ?= +RELX_OUTPUT_DIR ?= _rel + +ifeq ($(firstword $(RELX_OPTS)),-o) + RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) +else + RELX_OPTS += -o $(RELX_OUTPUT_DIR) +endif + +# Core targets. + +ifeq ($(IS_DEP),) +ifneq ($(wildcard $(RELX_CONFIG)),) +rel:: relx-rel +endif +endif + +distclean:: distclean-relx-rel distclean-relx + +# Plugin-specific targets. + +$(RELX): + $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) + $(verbose) chmod +x $(RELX) + +relx-rel: $(RELX) rel-deps app + $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) + +distclean-relx-rel: + $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) + +distclean-relx: + $(gen_verbose) rm -rf $(RELX) + +# Run target. + +ifeq ($(wildcard $(RELX_CONFIG)),) +run: +else + +define get_relx_release.erl + {ok, Config} = file:consult("$(RELX_CONFIG)"), + {release, {Name, _}, _} = lists:keyfind(release, 1, Config), + io:format("~s", [Name]), + halt(0). +endef + +RELX_RELEASE = `$(call erlang,$(get_relx_release.erl))` + +run: all + $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_RELEASE)/bin/$(RELX_RELEASE) console + +help:: + $(verbose) printf "%s\n" "" \ + "Relx targets:" \ + " run Compile the project, build the release and run it" + +endif + +# Copyright (c) 2014, M Robert Martin +# Copyright (c) 2015, Loïc Hoguin +# This file is contributed to erlang.mk and subject to the terms of the ISC License. + +.PHONY: shell + +# Configuration. + +SHELL_ERL ?= erl +SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin +SHELL_OPTS ?= + +ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) + +# Core targets + +help:: + $(verbose) printf "%s\n" "" \ + "Shell targets:" \ + " shell Run an erlang shell with SHELL_OPTS or reasonable default" + +# Plugin-specific targets. + +$(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) + +build-shell-deps: $(ALL_SHELL_DEPS_DIRS) + $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done + +shell: build-shell-deps + $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS) + +# Copyright (c) 2015, Loïc Hoguin +# This file is part of erlang.mk and subject to the terms of the ISC License. + +ifeq ($(filter triq,$(DEPS) $(TEST_DEPS)),triq) +.PHONY: triq + +# Targets. + +tests:: triq + +define triq_check.erl + code:add_pathsa(["$(CURDIR)/ebin", "$(DEPS_DIR)/*/ebin"]), + try + case $(1) of + all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]); + module -> triq:check($(2)); + function -> triq:check($(2)) + end + of + true -> halt(0); + _ -> halt(1) + catch error:undef -> + io:format("Undefined property or module~n"), + halt(0) + end. +endef + +ifdef t +ifeq (,$(findstring :,$(t))) +triq: test-build + $(verbose) $(call erlang,$(call triq_check.erl,module,$(t))) +else +triq: test-build + $(verbose) echo Testing $(t)/0 + $(verbose) $(call erlang,$(call triq_check.erl,function,$(t)())) +endif +else +triq: test-build + $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam)))))) + $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES))) +endif +endif + +# Copyright (c) 2015, Erlang Solutions Ltd. +# This file is part of erlang.mk and subject to the terms of the ISC License. + +.PHONY: xref distclean-xref + +# Configuration. + +ifeq ($(XREF_CONFIG),) + XREF_ARGS := +else + XREF_ARGS := -c $(XREF_CONFIG) +endif + +XREFR ?= $(CURDIR)/xrefr +export XREFR + +XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr + +# Core targets. + +help:: + $(verbose) printf "%s\n" "" \ + "Xref targets:" \ + " xref Run Xrefr using $XREF_CONFIG as config file if defined" + +distclean:: distclean-xref + +# Plugin-specific targets. + +$(XREFR): + $(gen_verbose) $(call core_http_get,$(XREFR),$(XREFR_URL)) + $(verbose) chmod +x $(XREFR) + +xref: deps app $(XREFR) + $(gen_verbose) $(XREFR) $(XREFR_ARGS) + +distclean-xref: + $(gen_verbose) rm -rf $(XREFR) + +# Copyright 2015, Viktor Söderqvist +# This file is part of erlang.mk and subject to the terms of the ISC License. + +COVER_REPORT_DIR = cover + +# Hook in coverage to ct + +ifdef COVER +ifdef CT_RUN +# All modules in 'ebin' +COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam))) + +test-build:: $(TEST_DIR)/ct.cover.spec + +$(TEST_DIR)/ct.cover.spec: + $(verbose) echo Cover mods: $(COVER_MODS) + $(gen_verbose) printf "%s\n" \ + '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \ + '{export,"$(CURDIR)/ct.coverdata"}.' > $@ + +CT_RUN += -cover $(TEST_DIR)/ct.cover.spec +endif +endif + +# Core targets + +ifdef COVER +ifneq ($(COVER_REPORT_DIR),) +tests:: + $(verbose) $(MAKE) --no-print-directory cover-report +endif +endif + +clean:: coverdata-clean + +ifneq ($(COVER_REPORT_DIR),) +distclean:: cover-report-clean +endif + +help:: + $(verbose) printf "%s\n" "" \ + "Cover targets:" \ + " cover-report Generate a HTML coverage report from previously collected" \ + " cover data." \ + " all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \ + "" \ + "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \ + "target tests additionally generates a HTML coverage report from the combined" \ + "coverdata files from each of these testing tools. HTML reports can be disabled" \ + "by setting COVER_REPORT_DIR to empty." + +# Plugin specific targets + +COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata)) + +.PHONY: coverdata-clean +coverdata-clean: + $(gen_verbose) rm -f *.coverdata ct.cover.spec + +# Merge all coverdata files into one. +all.coverdata: $(COVERDATA) + $(gen_verbose) $(ERL) -eval ' \ + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \ + cover:export("$@"), halt(0).' + +# These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to +# empty if you want the coverdata files but not the HTML report. +ifneq ($(COVER_REPORT_DIR),) + +.PHONY: cover-report-clean cover-report + +cover-report-clean: + $(gen_verbose) rm -rf $(COVER_REPORT_DIR) + +ifeq ($(COVERDATA),) +cover-report: +else + +# Modules which include eunit.hrl always contain one line without coverage +# because eunit defines test/0 which is never called. We compensate for this. +EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \ + grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ + | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq)) + +define cover_report.erl + $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) + Ms = cover:imported_modules(), + [cover:analyse_to_file(M, "$(COVER_REPORT_DIR)/" ++ atom_to_list(M) + ++ ".COVER.html", [html]) || M <- Ms], + Report = [begin {ok, R} = cover:analyse(M, module), R end || M <- Ms], + EunitHrlMods = [$(EUNIT_HRL_MODS)], + Report1 = [{M, {Y, case lists:member(M, EunitHrlMods) of + true -> N - 1; false -> N end}} || {M, {Y, N}} <- Report], + TotalY = lists:sum([Y || {_, {Y, _}} <- Report1]), + TotalN = lists:sum([N || {_, {_, N}} <- Report1]), + TotalPerc = round(100 * TotalY / (TotalY + TotalN)), + {ok, F} = file:open("$(COVER_REPORT_DIR)/index.html", [write]), + io:format(F, "~n" + "~n" + "Coverage report~n" + "~n", []), + io:format(F, "

Coverage

~n

Total: ~p%

~n", [TotalPerc]), + io:format(F, "~n", []), + [io:format(F, "" + "~n", + [M, M, round(100 * Y / (Y + N))]) || {M, {Y, N}} <- Report1], + How = "$(subst $(space),$(comma)$(space),$(basename $(COVERDATA)))", + Date = "$(shell date -u "+%Y-%m-%dT%H:%M:%SZ")", + io:format(F, "
ModuleCoverage
~p~p%
~n" + "

Generated using ~s and erlang.mk on ~s.

~n" + "", [How, Date]), + halt(). +endef + +cover-report: + $(gen_verbose) mkdir -p $(COVER_REPORT_DIR) + $(gen_verbose) $(call erlang,$(cover_report.erl)) + +endif +endif # ifneq ($(COVER_REPORT_DIR),) + +# Copyright (c) 2013-2015, Loïc Hoguin +# Copyright (c) 2015, Jean-Sébastien Pédron +# This file is part of erlang.mk and subject to the terms of the ISC License. + +# Fetch dependencies (without building them). + +.PHONY: fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps \ + fetch-shell-deps + +ifneq ($(SKIP_DEPS),) +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: + @: +else +# By default, we fetch "normal" dependencies. They are also included no +# matter the type of requested dependencies. +# +# $(ALL_DEPS_DIRS) includes $(BUILD_DEPS). +fetch-deps: $(ALL_DEPS_DIRS) +fetch-doc-deps: $(ALL_DEPS_DIRS) $(ALL_DOC_DEPS_DIRS) +fetch-rel-deps: $(ALL_DEPS_DIRS) $(ALL_REL_DEPS_DIRS) +fetch-test-deps: $(ALL_DEPS_DIRS) $(ALL_TEST_DEPS_DIRS) +fetch-shell-deps: $(ALL_DEPS_DIRS) $(ALL_SHELL_DEPS_DIRS) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(filter doc,$(DEP_TYPES)),) +fetch-deps: $(ALL_DOC_DEPS_DIRS) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +fetch-deps: $(ALL_REL_DEPS_DIRS) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +fetch-deps: $(ALL_TEST_DEPS_DIRS) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +fetch-deps: $(ALL_SHELL_DEPS_DIRS) +endif + +fetch-deps fetch-doc-deps fetch-rel-deps fetch-test-deps fetch-shell-deps: +ifndef IS_APP + $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ + $(MAKE) -C $$dep $@ IS_APP=1 || exit $$?; \ + done +endif +ifneq ($(IS_DEP),1) + $(verbose) rm -f $(ERLANG_MK_TMP)/$@.log +endif + $(verbose) mkdir -p $(ERLANG_MK_TMP) + $(verbose) for dep in $^ ; do \ + if ! grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/$@.log; then \ + echo $$dep >> $(ERLANG_MK_TMP)/$@.log; \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C $$dep fetch-deps IS_DEP=1 || exit $$?; \ + fi \ + fi \ + done +endif # ifneq ($(SKIP_DEPS),) + +# List dependencies recursively. + +.PHONY: list-deps list-doc-deps list-rel-deps list-test-deps \ + list-shell-deps + +ifneq ($(SKIP_DEPS),) +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): + $(verbose) :> $@ +else +LIST_DIRS = $(ALL_DEPS_DIRS) +LIST_DEPS = $(BUILD_DEPS) $(DEPS) + +$(ERLANG_MK_RECURSIVE_DEPS_LIST): fetch-deps + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DIRS += $(ALL_DOC_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): LIST_DEPS += $(DOC_DEPS) +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-doc-deps +else +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DIRS += $(ALL_REL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): LIST_DEPS += $(REL_DEPS) +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-rel-deps +else +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DIRS += $(ALL_TEST_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): LIST_DEPS += $(TEST_DEPS) +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-test-deps +else +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST): fetch-deps +endif + +ifneq ($(IS_DEP),1) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DIRS += $(ALL_SHELL_DEPS_DIRS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): LIST_DEPS += $(SHELL_DEPS) +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-shell-deps +else +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): fetch-deps +endif + +$(ERLANG_MK_RECURSIVE_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) \ +$(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST): +ifneq ($(IS_DEP),1) + $(verbose) rm -f $@.orig +endif +ifndef IS_APP + $(verbose) for app in $(filter-out $(CURDIR),$(ALL_APPS_DIRS)); do \ + $(MAKE) -C "$$app" --no-print-directory $@ IS_APP=1 || :; \ + done +endif + $(verbose) for dep in $(filter-out $(CURDIR),$(LIST_DIRS)); do \ + if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ + $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ + $(MAKE) -C "$$dep" --no-print-directory $@ IS_DEP=1; \ + fi; \ + done + $(verbose) for dep in $(LIST_DEPS); do \ + echo $(DEPS_DIR)/$$dep; \ + done >> $@.orig +ifndef IS_APP +ifneq ($(IS_DEP),1) + $(verbose) sort < $@.orig | uniq > $@ + $(verbose) rm -f $@.orig +endif +endif +endif # ifneq ($(SKIP_DEPS),) + +ifneq ($(SKIP_DEPS),) +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + @: +else +list-deps: $(ERLANG_MK_RECURSIVE_DEPS_LIST) +list-doc-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +list-rel-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +list-test-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +list-shell-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) + +# Allow to use fetch-deps and $(DEP_TYPES) to fetch multiple types of +# dependencies with a single target. +ifneq ($(IS_DEP),1) +ifneq ($(filter doc,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_DOC_DEPS_LIST) +endif +ifneq ($(filter rel,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_REL_DEPS_LIST) +endif +ifneq ($(filter test,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_TEST_DEPS_LIST) +endif +ifneq ($(filter shell,$(DEP_TYPES)),) +list-deps: $(ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST) +endif +endif + +list-deps list-doc-deps list-rel-deps list-test-deps list-shell-deps: + $(verbose) cat $^ | sort | uniq +endif # ifneq ($(SKIP_DEPS),) diff --git a/rabbitmq-server/generate_app b/rabbitmq-server/generate_app deleted file mode 100644 index fb0eb1e..0000000 --- a/rabbitmq-server/generate_app +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -main([InFile, OutFile | SrcDirs]) -> - Modules = [list_to_atom(filename:basename(F, ".erl")) || - SrcDir <- SrcDirs, - F <- filelib:wildcard("*.erl", SrcDir)], - {ok, [{application, Application, Properties}]} = file:consult(InFile), - NewProperties = - case proplists:get_value(modules, Properties) of - [] -> lists:keyreplace(modules, 1, Properties, {modules, Modules}); - _ -> Properties - end, - file:write_file( - OutFile, - io_lib:format("~p.~n", [{application, Application, NewProperties}])). diff --git a/rabbitmq-server/generate_deps b/rabbitmq-server/generate_deps deleted file mode 100644 index ddfca81..0000000 --- a/rabbitmq-server/generate_deps +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- --mode(compile). - -%% We expect the list of Erlang source and header files to arrive on -%% stdin, with the entries colon-separated. -main([TargetFile, EbinDir]) -> - ErlsAndHrls = [ string:strip(S,left) || - S <- string:tokens(io:get_line(""), ":\n")], - ErlFiles = [F || F <- ErlsAndHrls, lists:suffix(".erl", F)], - Modules = sets:from_list( - [list_to_atom(filename:basename(FileName, ".erl")) || - FileName <- ErlFiles]), - HrlFiles = [F || F <- ErlsAndHrls, lists:suffix(".hrl", F)], - IncludeDirs = lists:usort([filename:dirname(Path) || Path <- HrlFiles]), - Headers = sets:from_list(HrlFiles), - Deps = lists:foldl( - fun (Path, Deps1) -> - dict:store(Path, detect_deps(IncludeDirs, EbinDir, - Modules, Headers, Path), - Deps1) - end, dict:new(), ErlFiles), - {ok, Hdl} = file:open(TargetFile, [write, delayed_write]), - dict:fold( - fun (_Path, [], ok) -> - ok; - (Path, Dep, ok) -> - Module = filename:basename(Path, ".erl"), - ok = file:write(Hdl, [EbinDir, "/", Module, ".beam: ", - Path]), - ok = sets:fold(fun (E, ok) -> file:write(Hdl, [" ", E]) end, - ok, Dep), - file:write(Hdl, ["\n"]) - end, ok, Deps), - ok = file:write(Hdl, [TargetFile, ": ", escript:script_name(), "\n"]), - ok = file:sync(Hdl), - ok = file:close(Hdl). - -detect_deps(IncludeDirs, EbinDir, Modules, Headers, Path) -> - {ok, Forms} = epp:parse_file(Path, IncludeDirs, [{use_specs, true}]), - lists:foldl( - fun ({attribute, _LineNumber, Attribute, Behaviour}, Deps) - when Attribute =:= behaviour orelse Attribute =:= behavior -> - case sets:is_element(Behaviour, Modules) of - true -> sets:add_element( - [EbinDir, "/", atom_to_list(Behaviour), ".beam"], - Deps); - false -> Deps - end; - ({attribute, _LineNumber, file, {FileName, _LineNumber1}}, Deps) -> - case sets:is_element(FileName, Headers) of - true -> sets:add_element(FileName, Deps); - false -> Deps - end; - (_Form, Deps) -> - Deps - end, sets:new(), Forms). diff --git a/rabbitmq-server/git-revisions.txt b/rabbitmq-server/git-revisions.txt new file mode 100644 index 0000000..3b59b71 --- /dev/null +++ b/rabbitmq-server/git-revisions.txt @@ -0,0 +1,30 @@ +rabbit a99d5939c8557384760db3783b59385af7d3db9a stable +amqp_client 208bc6a37972afd6d21cd3355f80723efe86417c stable +cowboy b8e4115eb13488c517d8d8ef33c47d0eaa7838c6 1.0.3 +cowlib 7d8a571b1e50602d701ca203fbf28036b2cf80f5 1.0.1 +mochiweb d024b4a5804fe4e0061c4ed2d1c52bdd168995e9 v2.13.0 +rabbit_common da7529cbee789f36835162220697e55c1a6f5dbb stable +rabbitmq_amqp1_0 0bec8f2ddae21e1a2b3b00b4ff8fa8fa1dc07ebe rabbitmq_v3_6_1_rc1 +rabbitmq_auth_backend_ldap a864e6dc4bca7e8cdb18482cb00565ee917dd6a9 rabbitmq_v3_6_1_rc1 +rabbitmq_auth_mechanism_ssl b2f9f009af90ddca32a131f2cdfab5d77c7826b8 rabbitmq_v3_6_1_rc1 +rabbitmq_codegen 4c4992c458f74d1c2b6398419c0f6e724cb823e1 rabbitmq_v3_6_1_rc1 +rabbitmq_consistent_hash_exchange d30068db8b87d894d0b0a15d413e9584985f7aa6 rabbitmq_v3_6_1_rc1 +rabbitmq_event_exchange 03a8efb1cd53b32d25156f0f60e6b52f6d273df5 stable +rabbitmq_federation 3040e494b4fd9201111ee25dc3fbb723a54d885d rabbitmq_v3_6_1_rc1 +rabbitmq_federation_management 8fbc8483b816c1518378c84d9391ccf44ff44caf rabbitmq_v3_6_1_rc2 +rabbitmq_management 34f817d58b82d07f2b64e254ca2f2bd57443aebe rabbitmq_v3_6_1_rc2 +rabbitmq_management_agent 749e57f5b8aaa5320b4ea56657890ace4d636eea rabbitmq_v3_6_1_rc1 +rabbitmq_management_visualiser 70b61685aac2455c4004366a1da2f8896aa7673a rabbitmq_v3_6_1_rc1 +rabbitmq_mqtt e4d29d12eaeadd424640f0d6db1d8fa804f78168 rabbitmq_v3_6_1_rc1 +rabbitmq_recent_history_exchange 9b8068d8cb3a336ad11a349f3916b08b5dcce6db rabbitmq_v3_6_1_rc1 +rabbitmq_sharding 79c0759630af4b9c6bc06f405db7304b70636526 rabbitmq_v3_6_1_rc1 +rabbitmq_shovel 0cfdb35707fd4b011fc0d75006ecc08a71098958 rabbitmq_v3_6_1_rc1 +rabbitmq_shovel_management 0f1e67d9c9ca62a3cfdbf118d5499fe66507bcd6 rabbitmq_v3_6_1_rc1 +rabbitmq_stomp 2e70958ba55852e981c588396d5719c3756a10f0 stable +rabbitmq_tracing edd796bfdb3482502c7a1afc7dbd1db572d9c984 rabbitmq_v3_6_1_rc1 +rabbitmq_web_dispatch 3bbb422d74c705419b9169429e9294743d83da25 rabbitmq_v3_6_1_rc1 +rabbitmq_web_stomp 928da18104446e6ea4af6825bb8c597651ff8bfa rabbitmq_v3_6_1_rc1 +rabbitmq_web_stomp_examples 7f884a9e3edd44e5fa564e329482786fe0aaebc9 rabbitmq_v3_6_1_rc1 +ranch a5d2efcde9a34ad38ab89a26d98ea5335e88625a 1.2.1 +sockjs 7776c2a9d882306b01442b4136e226ef3509436a master +webmachine 6b5210c0ed07159f43222255e05a90bbef6c8cbe diff --git a/rabbitmq-server/include/rabbit_cli.hrl b/rabbitmq-server/include/rabbit_cli.hrl index 737bb4e..2e687e2 100644 --- a/rabbitmq-server/include/rabbit_cli.hrl +++ b/rabbitmq-server/include/rabbit_cli.hrl @@ -48,3 +48,14 @@ -define(ONLINE_DEF, {?ONLINE_OPT, flag}). -define(RPC_TIMEOUT, infinity). + +%% Subset of standartized exit codes from sysexits.h, see +%% https://github.com/rabbitmq/rabbitmq-server/issues/396 for discussion. +-define(EX_OK , 0). +-define(EX_USAGE , 64). % Bad command-line arguments. +-define(EX_DATAERR , 65). % Wrong data in command-line arguments. +-define(EX_NOUSER , 67). % The user specified does not exist. +-define(EX_UNAVAILABLE, 69). % Could not connect to the target node. +-define(EX_SOFTWARE , 70). % Failed to execute command. +-define(EX_TEMPFAIL , 75). % Temporary error (e.g. something has timed out). +-define(EX_CONFIG , 78). % Misconfiguration detected diff --git a/rabbitmq-server/plugins-src/Makefile b/rabbitmq-server/plugins-src/Makefile deleted file mode 100644 index 4ab8c86..0000000 --- a/rabbitmq-server/plugins-src/Makefile +++ /dev/null @@ -1,240 +0,0 @@ -.PHONY: default -default: - @echo No default target && false - -REPOS:= \ - rabbitmq-server \ - rabbitmq-codegen \ - rabbitmq-java-client \ - rabbitmq-dotnet-client \ - rabbitmq-test \ - cowboy-wrapper \ - eldap-wrapper \ - mochiweb-wrapper \ - rabbitmq-amqp1.0 \ - rabbitmq-auth-backend-ldap \ - rabbitmq-auth-mechanism-ssl \ - rabbitmq-consistent-hash-exchange \ - rabbitmq-erlang-client \ - rabbitmq-federation \ - rabbitmq-federation-management \ - rabbitmq-management \ - rabbitmq-management-agent \ - rabbitmq-management-visualiser \ - rabbitmq-metronome \ - rabbitmq-web-dispatch \ - rabbitmq-mqtt \ - rabbitmq-shovel \ - rabbitmq-shovel-management \ - rabbitmq-stomp \ - rabbitmq-toke \ - rabbitmq-tracing \ - rabbitmq-web-stomp \ - rabbitmq-web-stomp-examples \ - sockjs-erlang-wrapper \ - toke \ - webmachine-wrapper - -BRANCH:=master - -UMBRELLA_REPO_FETCH:=$(shell git remote -v 2>/dev/null | awk '/^origin\t.+ \(fetch\)$$/ { print $$2; }') -ifdef UMBRELLA_REPO_FETCH -GIT_CORE_REPOBASE_FETCH:=$(shell dirname $(UMBRELLA_REPO_FETCH)) -GIT_CORE_SUFFIX_FETCH:=$(suffix $(UMBRELLA_REPO_FETCH)) -else -GIT_CORE_REPOBASE_FETCH:=https://github.com/rabbitmq -GIT_CORE_SUFFIX_FETCH:=.git -endif - -UMBRELLA_REPO_PUSH:=$(shell git remote -v 2>/dev/null | awk '/^origin\t.+ \(push\)$$/ { print $$2; }') -ifdef UMBRELLA_REPO_PUSH -GIT_CORE_REPOBASE_PUSH:=$(shell dirname $(UMBRELLA_REPO_PUSH)) -GIT_CORE_SUFFIX_PUSH:=$(suffix $(UMBRELLA_REPO_PUSH)) -else -GIT_CORE_REPOBASE_PUSH:=git@github.com:rabbitmq -GIT_CORE_SUFFIX_PUSH:=.git -endif - -VERSION:=0.0.0 - -ifndef VERBOSE -QUIET:=@ -endif - -#---------------------------------- - -all: - $(MAKE) -f all-packages.mk all-packages VERSION=$(VERSION) - -test: - $(MAKE) -f all-packages.mk test-all-packages VERSION=$(VERSION) - -release: - $(MAKE) -f all-packages.mk all-releasable VERSION=$(VERSION) - -clean: - $(MAKE) -f all-packages.mk clean-all-packages - -check-xref: - $(MAKE) -f all-packages.mk check-xref-packages - -plugins-dist: release - rm -rf $(PLUGINS_DIST_DIR) - mkdir -p $(PLUGINS_DIST_DIR) - $(MAKE) -f all-packages.mk copy-releasable VERSION=$(VERSION) PLUGINS_DIST_DIR=$(PLUGINS_DIST_DIR) - -plugins-srcdist: - rm -rf $(PLUGINS_SRC_DIST_DIR) - mkdir -p $(PLUGINS_SRC_DIST_DIR)/licensing - - rsync -a --exclude '.git*' rabbitmq-erlang-client $(PLUGINS_SRC_DIST_DIR)/ - touch $(PLUGINS_SRC_DIST_DIR)/rabbitmq-erlang-client/.srcdist_done - - rsync -a --exclude '.git*' rabbitmq-server $(PLUGINS_SRC_DIST_DIR)/ - touch $(PLUGINS_SRC_DIST_DIR)/rabbitmq-server/.srcdist_done - - $(MAKE) -f all-packages.mk copy-srcdist VERSION=$(VERSION) PLUGINS_SRC_DIST_DIR=$(PLUGINS_SRC_DIST_DIR) - cp Makefile *.mk generate* $(PLUGINS_SRC_DIST_DIR)/ - echo "This is the released version of rabbitmq-public-umbrella. \ -You can clone the full version with: git clone https://github.com/rabbitmq/rabbitmq-public-umbrella.git" > $(PLUGINS_SRC_DIST_DIR)/README - - PRESERVE_CLONE_DIR=1 $(MAKE) -C $(PLUGINS_SRC_DIST_DIR) clean - rm -rf $(PLUGINS_SRC_DIST_DIR)/rabbitmq-server - -#---------------------------------- -# Convenience aliases - -.PHONY: co -co: checkout - -.PHONY: ci -ci: checkin - -.PHONY: up -up: update - -.PHONY: st -st: status - -.PHONY: up_c -up_c: named_update - -#---------------------------------- - -$(REPOS): - $(QUIET)retries=5; \ - umbrella_branch="$$(git branch | awk '/^\* / { print $$2; }')"; \ - if test "$$umbrella_branch" = "stable"; then \ - branch_arg="-b $$umbrella_branch"; \ - fi; \ - while ! git clone $$branch_arg $(GIT_CORE_REPOBASE_FETCH)/$@$(GIT_CORE_SUFFIX_FETCH); do \ - retries=$$((retries - 1)); \ - if test "$$retries" = 0; then break; fi; \ - sleep 1; \ - done - $(QUIET)test -d $@ - $(QUIET)global_user_name="$$(git config --global user.name)"; \ - global_user_email="$$(git config --global user.email)"; \ - user_name="$$(git config user.name)"; \ - user_email="$$(git config user.email)"; \ - cd $@ && \ - git remote set-url --push origin $(GIT_CORE_REPOBASE_PUSH)/$@$(GIT_CORE_SUFFIX_PUSH) && \ - if test "$$global_user_name" != "$$user_name"; then git config user.name "$$user_name"; fi && \ - if test "$$global_user_email" != "$$user_email"; then git config user.email "$$user_email"; fi - - -.PHONY: checkout -checkout: $(REPOS) - -.PHONY: list-repos -list-repos: - @for repo in $(REPOS); do echo $$repo; done - -.PHONY: sync-gituser -sync-gituser: - @global_user_name="$$(git config --global user.name)"; \ - global_user_email="$$(git config --global user.email)"; \ - user_name="$$(git config user.name)"; \ - user_email="$$(git config user.email)"; \ - for repo in $(REPOS); do \ - cd $$repo && \ - git config --unset user.name && \ - git config --unset user.email && \ - if test "$$global_user_name" != "$$user_name"; then git config user.name "$$user_name"; fi && \ - if test "$$global_user_email" != "$$user_email"; then git config user.email "$$user_email"; fi && \ - cd ..; done - -.PHONY: sync-gitremote -sync-gitremote: - @for repo in $(REPOS); do \ - cd $$repo && \ - git remote set-url origin $(GIT_CORE_REPOBASE_FETCH)/$$repo$(GIT_CORE_SUFFIX_FETCH) && \ - git remote set-url --push origin $(GIT_CORE_REPOBASE_PUSH)/$$repo$(GIT_CORE_SUFFIX_PUSH) && \ - cd ..; done - -#---------------------------------- -# Subrepository management - - -# $(1) is the target -# $(2) is the target dependency. Can use % to get current REPO -# $(3) is the target body. Can use % to get current REPO -define repo_target - -.PHONY: $(1) -$(1): $(2) - $(3) - -endef - -# $(1) is the list of repos -# $(2) is the suffix -# $(3) is the target dependency. Can use % to get current REPO -# $(4) is the target body. Can use % to get current REPO -define repo_targets -$(foreach REPO,$(1),$(call repo_target,$(REPO)+$(2),\ - $(patsubst %,$(3),$(REPO)),$(patsubst %,$(4),$(REPO)))) -endef - -# Do not allow status to fork with -j otherwise output will be garbled -.PHONY: status -status: checkout - @for repo in . $(REPOS); do \ - echo "$$repo:"; \ - cd "$$repo" && git status -s && cd - >/dev/null; \ - done - -.PHONY: pull -pull: $(foreach DIR,. $(REPOS),$(DIR)+pull) - -$(eval $(call repo_targets,. $(REPOS),pull,| %,\ - (cd % && git fetch -p && \ - (! git symbolic-ref -q HEAD || git pull --ff-only)))) - -.PHONY: update -update: pull - -.PHONY: named_update -named_update: $(foreach DIR,. $(REPOS),$(DIR)+named_update) - -$(eval $(call repo_targets,. $(REPOS),named_update,| %,\ - (cd % && git fetch -p && git checkout $(BRANCH) && \ - (! git symbolic-ref -q HEAD || git pull --ff-only)))) - -.PHONY: tag -tag: $(foreach DIR,. $(REPOS),$(DIR)+tag) - -$(eval $(call repo_targets,. $(REPOS),tag,| %,\ - (cd % && git tag $(TAG)))) - -.PHONY: push -push: $(foreach DIR,. $(REPOS),$(DIR)+push) - -$(eval $(call repo_targets,. $(REPOS),push,| %,\ - (cd % && git push && git push --tags))) - -.PHONY: checkin -checkin: $(foreach DIR,. $(REPOS),$(DIR)+checkin) - -$(eval $(call repo_targets,. $(REPOS),checkin,| %,\ - (cd % && (test -z "$$$$(git status -s -uno)" || git commit -a)))) diff --git a/rabbitmq-server/plugins-src/README b/rabbitmq-server/plugins-src/README deleted file mode 100644 index 58177d4..0000000 --- a/rabbitmq-server/plugins-src/README +++ /dev/null @@ -1 +0,0 @@ -This is the released version of rabbitmq-public-umbrella. You can clone the full version with: git clone https://github.com/rabbitmq/rabbitmq-public-umbrella.git diff --git a/rabbitmq-server/plugins-src/all-packages.mk b/rabbitmq-server/plugins-src/all-packages.mk deleted file mode 100644 index 1d02a3d..0000000 --- a/rabbitmq-server/plugins-src/all-packages.mk +++ /dev/null @@ -1,13 +0,0 @@ -UMBRELLA_BASE_DIR:=. - -include common.mk - -CHAIN_TESTS:=true - -# Pull in all the packages -$(foreach PACKAGE_MK,$(wildcard */package.mk),$(eval $(call do_package,$(call canonical_path,$(patsubst %/,%,$(dir $(PACKAGE_MK))))))) - -# ...and the non-integrated ones -$(foreach V,$(.VARIABLES),$(if $(filter NON_INTEGRATED_%,$(filter-out NON_INTEGRATED_DEPS_%,$V)),$(eval $(call do_package,$(subst NON_INTEGRATED_,,$V))))) - -test-all-packages: $(CHAINED_TESTS) diff --git a/rabbitmq-server/plugins-src/common.mk b/rabbitmq-server/plugins-src/common.mk deleted file mode 100644 index d8ed4f8..0000000 --- a/rabbitmq-server/plugins-src/common.mk +++ /dev/null @@ -1,143 +0,0 @@ -# Various global definitions - -# UMBRELLA_BASE_DIR should be set to the path of the -# rabbitmq-public-umbrella directory before this file is included. - -# Make version check -REQUIRED_MAKE_VERSION:=3.81 -ifneq ($(shell ( echo "$(MAKE_VERSION)" ; echo "$(REQUIRED_MAKE_VERSION)" ) | sort -t. -n | head -1),$(REQUIRED_MAKE_VERSION)) -$(error GNU make version $(REQUIRED_MAKE_VERSION) required) -endif - -# This is the standard trick for making pattern substitution work -# (amongst others) when the replacement needs to include a comma. -COMMA:=, - -# Global settings that can be overridden on the command line - -# These ones are expected to be passed down to the sub-makes invoked -# for non-integrated packages -VERSION ?= 0.0.0 -ERL ?= erl -ERL_OPTS ?= -ERLC ?= erlc -ERLC_OPTS ?= -Wall +debug_info -TMPDIR ?= /tmp - -NODENAME ?= rabbit-test -ERL_CALL ?= erl_call -ERL_CALL_OPTS ?= -sname $(NODENAME) -e - -# Where we put all the files produced when running tests. -TEST_TMPDIR=$(TMPDIR)/rabbitmq-test - -# Callable functions - -# Convert a package name to the corresponding erlang app name -define package_to_app_name -$(subst -,_,$(1)) -endef - -# If the variable named $(1) holds a non-empty value, return it. -# Otherwise, set the variable to $(2) and return that value. -define memoize -$(if $($(1)),$($(1)),$(eval $(1):=$(2))$(2)) -endef - -# Return a canonical form for the path in $(1) -# -# Absolute path names can be a bit verbose. This provides a way to -# canonicalize path names with more concise results. -define canonical_path -$(call memoize,SHORT_$(realpath $(1)),$(1)) -endef - -# Convert a package name to a path name -define package_to_path -$(call canonical_path,$(UMBRELLA_BASE_DIR)/$(1)) -endef - -# Produce a cp command to copy from $(1) to $(2), unless $(1) is -# empty, in which case do nothing. -# -# The optional $(3) gives a suffix to append to the command, if a -# command is produced. -define copy -$(if $(1),cp -r $(1) $(2)$(if $(3), $(3))) -endef - -# Produce the makefile fragment for the package with path in $(1), if -# it hasn't already been visited. The path should have been -# canonicalized via canonical_path. -define do_package -# Have we already visited this package? If so, skip it -ifndef DONE_$(1) -PACKAGE_DIR:=$(1) -include $(UMBRELLA_BASE_DIR)/do-package.mk -endif -endef - -# This is used to chain test rules, so that test-all-packages works in -# the presence of 'make -j' -define chain_test -$(if $(CHAIN_TESTS),$(CHAINED_TESTS)$(eval CHAINED_TESTS+=$(1))) -endef - -# Mark the non-integrated repos -NON_INTEGRATED_$(call package_to_path,rabbitmq-server):=true -NON_INTEGRATED_$(call package_to_path,rabbitmq-erlang-client):=true -NON_INTEGRATED_$(call package_to_path,rabbitmq-java-client):=true -NON_INTEGRATED_$(call package_to_path,rabbitmq-dotnet-client):=true -NON_INTEGRATED_DEPS_$(call package_to_path,rabbitmq-erlang-client):=rabbitmq-server - -# Where the coverage package lives -COVERAGE_PATH:=$(call package_to_path,coverage) - -# Where the rabbitmq-server package lives -RABBITMQ_SERVER_PATH=$(call package_to_path,rabbitmq-server) - -# Cleaning support -ifndef MAKECMDGOALS -TESTABLEGOALS:=$(.DEFAULT_GOAL) -else -TESTABLEGOALS:=$(MAKECMDGOALS) -endif - -# The CLEANING variable can be used to determine whether the top-level -# goal is cleaning related. In particular, it can be used to prevent -# including generated files when cleaning, which might otherwise -# trigger undesirable activity. -ifeq "$(strip $(patsubst clean%,,$(patsubst %clean,,$(TESTABLEGOALS))))" "" -CLEANING:=true -endif - -# Include a generated makefile fragment -# -# Note that this includes using "-include", and thus make will proceed -# even if an error occurs while the fragment is being re-made (we -# don't use "include" becuase it will produce a superfluous error -# message when the fragment is re-made because it doesn't exist). -# Thus you should also list the fragment as a dependency of any rules -# that will refer to the contents of the fragment. -define safe_include -ifndef CLEANING --include $(1) - -# If we fail to make the fragment, make will just loop trying to -# create it. So we have to explicitly catch that case. -$$(if $$(MAKE_RESTARTS),$$(if $$(wildcard $(1)),,$$(error Failed to produce $(1)))) - -endif -endef - -# This is not the make default, but it is a good idea -.DELETE_ON_ERROR: - -# Declarations for global targets -.PHONY: all-releasable copy-releasable copy-srcdist all-packages clean-all-packages -all-releasable:: -copy-releasable:: -copy-srcdist:: -all-packages:: -clean-all-packages:: -check-xref-packages:: diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/.srcdist_done b/rabbitmq-server/plugins-src/cowboy-wrapper/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0001-R12-fake-iodata-type.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0001-R12-fake-iodata-type.patch deleted file mode 100644 index f1d8e6a..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/0001-R12-fake-iodata-type.patch +++ /dev/null @@ -1,40 +0,0 @@ -From c2303fb756eeb8bd92dc04764970a43f59940208 Mon Sep 17 00:00:00 2001 -From: Marek Majkowski -Date: Thu, 26 Jan 2012 12:48:41 +0000 -Subject: [PATCH 1/7] R12 - Fake iodata() type - ---- - include/http.hrl | 2 +- - src/cowboy_http.erl | 3 ++- - 2 files changed, 3 insertions(+), 2 deletions(-) - -diff --git a/include/http.hrl b/include/http.hrl -index c66f2b0..c98f873 100644 ---- a/include/http.hrl -+++ b/include/http.hrl -@@ -47,7 +47,7 @@ - %% Response. - resp_state = waiting :: locked | waiting | chunks | done, - resp_headers = [] :: cowboy_http:headers(), -- resp_body = <<>> :: iodata() | {non_neg_integer(), -+ resp_body = <<>> :: cowboy_http:fake_iodata() | {non_neg_integer(), - fun(() -> {sent, non_neg_integer()})}, - - %% Functions. -diff --git a/src/cowboy_http.erl b/src/cowboy_http.erl -index 32b0ca9..95a7334 100644 ---- a/src/cowboy_http.erl -+++ b/src/cowboy_http.erl -@@ -46,7 +46,8 @@ - | 'Expires' | 'Last-Modified' | 'Accept-Ranges' | 'Set-Cookie' - | 'Set-Cookie2' | 'X-Forwarded-For' | 'Cookie' | 'Keep-Alive' - | 'Proxy-Connection' | binary(). ---type headers() :: [{header(), iodata()}]. -+-type fake_iodata() :: iolist() | binary(). -+-type headers() :: [{header(), fake_iodata()}]. - -type status() :: non_neg_integer() | binary(). - - -export_type([method/0, uri/0, version/0, header/0, headers/0, status/0]). --- -1.7.0.4 - diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0002-R12-drop-all-references-to-boolean-type.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0002-R12-drop-all-references-to-boolean-type.patch deleted file mode 100644 index aaeedd6..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/0002-R12-drop-all-references-to-boolean-type.patch +++ /dev/null @@ -1,165 +0,0 @@ -From 257e64326ad786d19328d343da0ff7d29adbae4e Mon Sep 17 00:00:00 2001 -From: Marek Majkowski -Date: Thu, 26 Jan 2012 12:51:30 +0000 -Subject: [PATCH 2/7] R12 - drop all references to boolean() type - ---- - src/cowboy_cookies.erl | 8 -------- - src/cowboy_http.erl | 1 - - src/cowboy_http_protocol.erl | 3 +-- - src/cowboy_http_req.erl | 2 -- - src/cowboy_http_static.erl | 5 ----- - src/cowboy_http_websocket.erl | 2 +- - 6 files changed, 2 insertions(+), 19 deletions(-) - -diff --git a/src/cowboy_cookies.erl b/src/cowboy_cookies.erl -index 6818a86..7f5ab60 100644 ---- a/src/cowboy_cookies.erl -+++ b/src/cowboy_cookies.erl -@@ -112,7 +112,6 @@ cookie(Key, Value, Options) when is_binary(Key) - %% Internal. - - %% @doc Check if a character is a white space character. ---spec is_whitespace(char()) -> boolean(). - is_whitespace($\s) -> true; - is_whitespace($\t) -> true; - is_whitespace($\r) -> true; -@@ -120,7 +119,6 @@ is_whitespace($\n) -> true; - is_whitespace(_) -> false. - - %% @doc Check if a character is a seperator. ---spec is_separator(char()) -> boolean(). - is_separator(C) when C < 32 -> true; - is_separator($\s) -> true; - is_separator($\t) -> true; -@@ -144,7 +142,6 @@ is_separator($}) -> true; - is_separator(_) -> false. - - %% @doc Check if a binary has an ASCII seperator character. ---spec has_seperator(binary()) -> boolean(). - has_seperator(<<>>) -> - false; - has_seperator(<<$/, Rest/binary>>) -> -@@ -228,7 +225,6 @@ read_quoted(<>, Acc) -> - read_quoted(Rest, <>). - - %% @doc Drop characters while a function returns true. ---spec binary_dropwhile(fun((char()) -> boolean()), binary()) -> binary(). - binary_dropwhile(_F, <<"">>) -> - <<"">>; - binary_dropwhile(F, String) -> -@@ -246,8 +242,6 @@ skip_whitespace(String) -> - binary_dropwhile(fun is_whitespace/1, String). - - %% @doc Split a binary when the current character causes F to return true. ---spec binary_splitwith(fun((char()) -> boolean()), binary(), binary()) -- -> {binary(), binary()}. - binary_splitwith(_F, Head, <<>>) -> - {Head, <<>>}; - binary_splitwith(F, Head, Tail) -> -@@ -260,8 +254,6 @@ binary_splitwith(F, Head, Tail) -> - end. - - %% @doc Split a binary with a function returning true or false on each char. ---spec binary_splitwith(fun((char()) -> boolean()), binary()) -- -> {binary(), binary()}. - binary_splitwith(F, String) -> - binary_splitwith(F, <<>>, String). - -diff --git a/src/cowboy_http.erl b/src/cowboy_http.erl -index 95a7334..d7261c8 100644 ---- a/src/cowboy_http.erl -+++ b/src/cowboy_http.erl -@@ -755,7 +755,6 @@ urlencode(Bin, Opts) -> - Upper = proplists:get_value(upper, Opts, false), - urlencode(Bin, <<>>, Plus, Upper). - ---spec urlencode(binary(), binary(), boolean(), boolean()) -> binary(). - urlencode(<>, Acc, P=Plus, U=Upper) -> - if C >= $0, C =< $9 -> urlencode(Rest, <>, P, U); - C >= $A, C =< $Z -> urlencode(Rest, <>, P, U); -diff --git a/src/cowboy_http_protocol.erl b/src/cowboy_http_protocol.erl -index baee081..b80745f 100644 ---- a/src/cowboy_http_protocol.erl -+++ b/src/cowboy_http_protocol.erl -@@ -55,7 +55,7 @@ - max_line_length :: integer(), - timeout :: timeout(), - buffer = <<>> :: binary(), -- hibernate = false :: boolean(), -+ hibernate = false, - loop_timeout = infinity :: timeout(), - loop_timeout_ref :: undefined | reference() - }). -@@ -440,7 +440,6 @@ format_header(Field) when byte_size(Field) =< 20; byte_size(Field) > 32 -> - format_header(Field) -> - format_header(Field, true, <<>>). - ---spec format_header(binary(), boolean(), binary()) -> binary(). - format_header(<<>>, _Any, Acc) -> - Acc; - %% Replicate a bug in OTP for compatibility reasons when there's a - right -diff --git a/src/cowboy_http_req.erl b/src/cowboy_http_req.erl -index 92d96ad..d729d6c 100644 ---- a/src/cowboy_http_req.erl -+++ b/src/cowboy_http_req.erl -@@ -515,13 +515,11 @@ set_resp_body_fun(StreamLen, StreamFun, Req) -> - - - %% @doc Return whether the given header has been set for the response. ---spec has_resp_header(cowboy_http:header(), #http_req{}) -> boolean(). - has_resp_header(Name, #http_req{resp_headers=RespHeaders}) -> - NameBin = header_to_binary(Name), - lists:keymember(NameBin, 1, RespHeaders). - - %% @doc Return whether a body has been set for the response. ---spec has_resp_body(#http_req{}) -> boolean(). - has_resp_body(#http_req{resp_body={Length, _}}) -> - Length > 0; - has_resp_body(#http_req{resp_body=RespBody}) -> -diff --git a/src/cowboy_http_static.erl b/src/cowboy_http_static.erl -index 0ee996a..d370046 100644 ---- a/src/cowboy_http_static.erl -+++ b/src/cowboy_http_static.erl -@@ -207,8 +207,6 @@ allowed_methods(Req, State) -> - {['GET', 'HEAD'], Req, State}. - - %% @private ---spec malformed_request(#http_req{}, #state{}) -> -- {boolean(), #http_req{}, #state{}}. - malformed_request(Req, #state{filepath=error}=State) -> - {true, Req, State}; - malformed_request(Req, State) -> -@@ -216,8 +214,6 @@ malformed_request(Req, State) -> - - - %% @private Check if the resource exists under the document root. ---spec resource_exists(#http_req{}, #state{}) -> -- {boolean(), #http_req{}, #state{}}. - resource_exists(Req, #state{fileinfo={error, _}}=State) -> - {false, Req, State}; - resource_exists(Req, #state{fileinfo={ok, Fileinfo}}=State) -> -@@ -227,7 +223,6 @@ resource_exists(Req, #state{fileinfo={ok, Fileinfo}}=State) -> - %% @private - %% Access to a file resource is forbidden if it exists and the local node does - %% not have permission to read it. Directory listings are always forbidden. ---spec forbidden(#http_req{}, #state{}) -> {boolean(), #http_req{}, #state{}}. - forbidden(Req, #state{fileinfo={_, #file_info{type=directory}}}=State) -> - {true, Req, State}; - forbidden(Req, #state{fileinfo={error, eacces}}=State) -> -diff --git a/src/cowboy_http_websocket.erl b/src/cowboy_http_websocket.erl -index 0f0204c..5f59891 100644 ---- a/src/cowboy_http_websocket.erl -+++ b/src/cowboy_http_websocket.erl -@@ -54,7 +54,7 @@ - timeout = infinity :: timeout(), - timeout_ref = undefined :: undefined | reference(), - messages = undefined :: undefined | {atom(), atom(), atom()}, -- hibernate = false :: boolean(), -+ hibernate = false, - eop :: undefined | tuple(), %% hixie-76 specific. - origin = undefined :: undefined | binary() %% hixie-76 specific. - }). --- -1.7.0.4 - diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0003-R12-drop-all-references-to-reference-type.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0003-R12-drop-all-references-to-reference-type.patch deleted file mode 100644 index e0ebae9..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/0003-R12-drop-all-references-to-reference-type.patch +++ /dev/null @@ -1,55 +0,0 @@ -From 4db80ab7bacf04502ad2d29d4760e04a6d787a83 Mon Sep 17 00:00:00 2001 -From: Marek Majkowski -Date: Thu, 26 Jan 2012 12:52:23 +0000 -Subject: [PATCH 3/7] R12: drop all references to reference() type - ---- - src/cowboy_http_protocol.erl | 2 +- - src/cowboy_http_websocket.erl | 2 +- - src/cowboy_listener.erl | 2 +- - 3 files changed, 3 insertions(+), 3 deletions(-) - -diff --git a/src/cowboy_http_protocol.erl b/src/cowboy_http_protocol.erl -index b80745f..0183785 100644 ---- a/src/cowboy_http_protocol.erl -+++ b/src/cowboy_http_protocol.erl -@@ -57,7 +57,7 @@ - buffer = <<>> :: binary(), - hibernate = false, - loop_timeout = infinity :: timeout(), -- loop_timeout_ref :: undefined | reference() -+ loop_timeout_ref - }). - - %% API. -diff --git a/src/cowboy_http_websocket.erl b/src/cowboy_http_websocket.erl -index 5f59891..5100213 100644 ---- a/src/cowboy_http_websocket.erl -+++ b/src/cowboy_http_websocket.erl -@@ -52,7 +52,7 @@ - opts :: any(), - challenge = undefined :: undefined | binary() | {binary(), binary()}, - timeout = infinity :: timeout(), -- timeout_ref = undefined :: undefined | reference(), -+ timeout_ref = undefined, - messages = undefined :: undefined | {atom(), atom(), atom()}, - hibernate = false, - eop :: undefined | tuple(), %% hixie-76 specific. -diff --git a/src/cowboy_listener.erl b/src/cowboy_listener.erl -index c19d079..86e87f1 100644 ---- a/src/cowboy_listener.erl -+++ b/src/cowboy_listener.erl -@@ -23,8 +23,8 @@ - - -record(state, { - req_pools = [] :: [{atom(), non_neg_integer()}], -- reqs_table :: ets:tid(), -- queue = [] :: [{pid(), reference()}] -+ reqs_table, -+ queue = [] - }). - - %% API. --- -1.7.0.4 - diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0004-R12-drop-references-to-iodata-type.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0004-R12-drop-references-to-iodata-type.patch deleted file mode 100644 index d6f097c..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/0004-R12-drop-references-to-iodata-type.patch +++ /dev/null @@ -1,50 +0,0 @@ -From dfb750f491208a8e30cab0fa701dd866d60734b8 Mon Sep 17 00:00:00 2001 -From: Marek Majkowski -Date: Thu, 26 Jan 2012 12:53:08 +0000 -Subject: [PATCH 4/7] R12: drop references to iodata() type - ---- - src/cowboy_http_req.erl | 6 ------ - 1 files changed, 0 insertions(+), 6 deletions(-) - -diff --git a/src/cowboy_http_req.erl b/src/cowboy_http_req.erl -index d729d6c..64e757c 100644 ---- a/src/cowboy_http_req.erl -+++ b/src/cowboy_http_req.erl -@@ -478,8 +478,6 @@ set_resp_cookie(Name, Value, Options, Req) -> - set_resp_header(HeaderName, HeaderValue, Req). - - %% @doc Add a header to the response. ---spec set_resp_header(cowboy_http:header(), iodata(), #http_req{}) -- -> {ok, #http_req{}}. - set_resp_header(Name, Value, Req=#http_req{resp_headers=RespHeaders}) -> - NameBin = header_to_binary(Name), - {ok, Req#http_req{resp_headers=[{NameBin, Value}|RespHeaders]}}. -@@ -489,7 +487,6 @@ set_resp_header(Name, Value, Req=#http_req{resp_headers=RespHeaders}) -> - %% The body set here is ignored if the response is later sent using - %% anything other than reply/2 or reply/3. The response body is expected - %% to be a binary or an iolist. ---spec set_resp_body(iodata(), #http_req{}) -> {ok, #http_req{}}. - set_resp_body(Body, Req) -> - {ok, Req#http_req{resp_body=Body}}. - -@@ -537,8 +534,6 @@ reply(Status, Headers, Req=#http_req{resp_body=Body}) -> - reply(Status, Headers, Body, Req). - - %% @doc Send a reply to the client. ---spec reply(cowboy_http:status(), cowboy_http:headers(), iodata(), #http_req{}) -- -> {ok, #http_req{}}. - reply(Status, Headers, Body, Req=#http_req{socket=Socket, - transport=Transport, connection=Connection, pid=ReqPid, - method=Method, resp_state=waiting, resp_headers=RespHeaders}) -> -@@ -586,7 +581,6 @@ chunked_reply(Status, Headers, Req=#http_req{socket=Socket, - %% @doc Send a chunk of data. - %% - %% A chunked reply must have been initiated before calling this function. ---spec chunk(iodata(), #http_req{}) -> ok | {error, atom()}. - chunk(_Data, #http_req{socket=_Socket, transport=_Transport, method='HEAD'}) -> - ok; - chunk(Data, #http_req{socket=Socket, transport=Transport, resp_state=chunks}) -> --- -1.7.0.4 - diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0005-R12-drop-references-to-Default-any-type.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0005-R12-drop-references-to-Default-any-type.patch deleted file mode 100644 index 5fc06fd..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/0005-R12-drop-references-to-Default-any-type.patch +++ /dev/null @@ -1,52 +0,0 @@ -From c7aef1d044a1e83fcd6be7a83b2c763c0366d4f8 Mon Sep 17 00:00:00 2001 -From: Marek Majkowski -Date: Thu, 26 Jan 2012 12:53:36 +0000 -Subject: [PATCH 5/7] R12: drop references to Default:any() type - ---- - src/cowboy_http_req.erl | 8 -------- - 1 files changed, 0 insertions(+), 8 deletions(-) - -diff --git a/src/cowboy_http_req.erl b/src/cowboy_http_req.erl -index 64e757c..c884f5a 100644 ---- a/src/cowboy_http_req.erl -+++ b/src/cowboy_http_req.erl -@@ -147,8 +147,6 @@ qs_val(Name, Req) when is_binary(Name) -> - - %% @doc Return the query string value for the given key, or a default if - %% missing. ---spec qs_val(binary(), #http_req{}, Default) -- -> {binary() | true | Default, #http_req{}} when Default::any(). - qs_val(Name, Req=#http_req{raw_qs=RawQs, qs_vals=undefined, - urldecode={URLDecFun, URLDecArg}}, Default) when is_binary(Name) -> - QsVals = parse_qs(RawQs, fun(Bin) -> URLDecFun(Bin, URLDecArg) end), -@@ -180,8 +178,6 @@ binding(Name, Req) when is_atom(Name) -> - - %% @doc Return the binding value for the given key obtained when matching - %% the host and path against the dispatch list, or a default if missing. ---spec binding(atom(), #http_req{}, Default) -- -> {binary() | Default, #http_req{}} when Default::any(). - binding(Name, Req, Default) when is_atom(Name) -> - case lists:keyfind(Name, 1, Req#http_req.bindings) of - {Name, Value} -> {Value, Req}; -@@ -200,8 +196,6 @@ header(Name, Req) when is_atom(Name) orelse is_binary(Name) -> - header(Name, Req, undefined). - - %% @doc Return the header value for the given key, or a default if missing. ---spec header(atom() | binary(), #http_req{}, Default) -- -> {binary() | Default, #http_req{}} when Default::any(). - header(Name, Req, Default) when is_atom(Name) orelse is_binary(Name) -> - case lists:keyfind(Name, 1, Req#http_req.headers) of - {Name, Value} -> {Value, Req}; -@@ -313,8 +307,6 @@ cookie(Name, Req) when is_binary(Name) -> - - %% @doc Return the cookie value for the given key, or a default if - %% missing. ---spec cookie(binary(), #http_req{}, Default) -- -> {binary() | true | Default, #http_req{}} when Default::any(). - cookie(Name, Req=#http_req{cookies=undefined}, Default) when is_binary(Name) -> - case header('Cookie', Req) of - {undefined, Req2} -> --- -1.7.0.4 - diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0006-Use-erlang-integer_to_list-and-lists-max-instead-of-.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0006-Use-erlang-integer_to_list-and-lists-max-instead-of-.patch deleted file mode 100644 index 183ebd2..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/0006-Use-erlang-integer_to_list-and-lists-max-instead-of-.patch +++ /dev/null @@ -1,62 +0,0 @@ -From 81106c53b80f5d0fa441b893048bbdc6c9e2c4f0 Mon Sep 17 00:00:00 2001 -From: Marek Majkowski -Date: Thu, 26 Jan 2012 12:54:31 +0000 -Subject: [PATCH 6/7] Use erlang:integer_to_list and lists:max instead of bifs - ---- - src/cowboy_http_req.erl | 2 +- - src/cowboy_http_static.erl | 2 +- - src/cowboy_multipart.erl | 4 ++-- - 3 files changed, 4 insertions(+), 4 deletions(-) - -diff --git a/src/cowboy_http_req.erl b/src/cowboy_http_req.erl -index c884f5a..bf4ac7a 100644 ---- a/src/cowboy_http_req.erl -+++ b/src/cowboy_http_req.erl -@@ -576,7 +576,7 @@ chunked_reply(Status, Headers, Req=#http_req{socket=Socket, - chunk(_Data, #http_req{socket=_Socket, transport=_Transport, method='HEAD'}) -> - ok; - chunk(Data, #http_req{socket=Socket, transport=Transport, resp_state=chunks}) -> -- Transport:send(Socket, [integer_to_list(iolist_size(Data), 16), -+ Transport:send(Socket, [erlang:integer_to_list(iolist_size(Data), 16), - <<"\r\n">>, Data, <<"\r\n">>]). - - %% @doc Send an upgrade reply. -diff --git a/src/cowboy_http_static.erl b/src/cowboy_http_static.erl -index d370046..da3bd33 100644 ---- a/src/cowboy_http_static.erl -+++ b/src/cowboy_http_static.erl -@@ -412,7 +412,7 @@ attr_etag_function(Args, Attrs) -> - - -spec attr_etag_function([etagarg()], [fileattr()], [binary()]) -> binary(). - attr_etag_function(_Args, [], Acc) -> -- list_to_binary(integer_to_list(erlang:crc32(Acc), 16)); -+ list_to_binary(erlang:integer_to_list(erlang:crc32(Acc), 16)); - attr_etag_function(Args, [H|T], Acc) -> - {_, Value} = lists:keyfind(H, 1, Args), - attr_etag_function(Args, T, [term_to_binary(Value)|Acc]). -diff --git a/src/cowboy_multipart.erl b/src/cowboy_multipart.erl -index b7aeb54..c9b5b6c 100644 ---- a/src/cowboy_multipart.erl -+++ b/src/cowboy_multipart.erl -@@ -105,7 +105,7 @@ parse_boundary_eol(Bin, Pattern) -> - cowboy_http:whitespace(Rest, Fun); - nomatch -> - % CRLF not found in the given binary. -- RestStart = max(byte_size(Bin) - 1, 0), -+ RestStart = lists:max([byte_size(Bin) - 1, 0]), - <<_:RestStart/binary, Rest/binary>> = Bin, - more(Rest, fun (NewBin) -> parse_boundary_eol(NewBin, Pattern) end) - end. -@@ -175,7 +175,7 @@ skip(Bin, Pattern = {P, PSize}) -> - parse_boundary_tail(Rest, Pattern); - nomatch -> - % Boundary not found, need more data. -- RestStart = max(byte_size(Bin) - PSize + 1, 0), -+ RestStart = lists:max([byte_size(Bin) - PSize + 1, 0]), - <<_:RestStart/binary, Rest/binary>> = Bin, - more(Rest, fun (NewBin) -> skip(NewBin, Pattern) end) - end. --- -1.7.0.4 - diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0007-R12-type-definitions-must-be-ordered.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0007-R12-type-definitions-must-be-ordered.patch deleted file mode 100644 index 1b1f3de..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/0007-R12-type-definitions-must-be-ordered.patch +++ /dev/null @@ -1,37 +0,0 @@ -From 547731d5490b36f1239a99e6c4acc1964e724a6e Mon Sep 17 00:00:00 2001 -From: Marek Majkowski -Date: Thu, 26 Jan 2012 12:54:49 +0000 -Subject: [PATCH 7/7] R12 - type definitions must be ordered - ---- - src/cowboy_multipart.erl | 10 +++++----- - 1 files changed, 5 insertions(+), 5 deletions(-) - -diff --git a/src/cowboy_multipart.erl b/src/cowboy_multipart.erl -index c9b5b6c..0bd123a 100644 ---- a/src/cowboy_multipart.erl -+++ b/src/cowboy_multipart.erl -@@ -15,15 +15,15 @@ - %% @doc Multipart parser. - -module(cowboy_multipart). - ---type part_parser() :: parser(more(part_result())). -+-type part_parser() :: any(). - -type parser(T) :: fun((binary()) -> T). - -type more(T) :: T | {more, parser(T)}. ---type part_result() :: headers() | eof. ---type headers() :: {headers, http_headers(), body_cont()}. -+-type part_result() :: any(). -+-type headers() :: any(). - -type http_headers() :: [{atom() | binary(), binary()}]. ---type body_cont() :: cont(more(body_result())). -+-type body_cont() :: any(). - -type cont(T) :: fun(() -> T). ---type body_result() :: {body, binary(), body_cont()} | end_of_part(). -+-type body_result() :: any(). - -type end_of_part() :: {end_of_part, cont(more(part_result()))}. - -type disposition() :: {binary(), [{binary(), binary()}]}. - --- -1.7.0.4 - diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/0008-sec-websocket-protocol.patch b/rabbitmq-server/plugins-src/cowboy-wrapper/0008-sec-websocket-protocol.patch deleted file mode 100644 index 494c6b8..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/0008-sec-websocket-protocol.patch +++ /dev/null @@ -1,16 +0,0 @@ -diff --git a/src/cowboy_http_req.erl b/src/cowboy_http_req.erl -index 92d96ad..dd772df 100644 ---- a/src/cowboy_http_req.erl -+++ b/src/cowboy_http_req.erl -@@ -288,6 +282,11 @@ parse_header(Name, Req, Default) when Name =:= 'Upgrade' -> - fun (Value) -> - cowboy_http:nonempty_list(Value, fun cowboy_http:token_ci/2) - end); -+parse_header(Name, Req, Default) when Name =:= <<"sec-websocket-protocol">> -> -+ parse_header(Name, Req, Default, -+ fun (Value) -> -+ cowboy_http:nonempty_list(Value, fun cowboy_http:token/2) -+ end); - parse_header(Name, Req, Default) -> - {Value, Req2} = header(Name, Req, Default), - {undefined, Value, Req2}. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/Makefile b/rabbitmq-server/plugins-src/cowboy-wrapper/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/README.md b/rabbitmq-server/plugins-src/cowboy-wrapper/README.md deleted file mode 100644 index e1f1d5e..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/README.md +++ /dev/null @@ -1 +0,0 @@ -Cowboy requires R14 diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/.done b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/.done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/.travis.yml b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/.travis.yml deleted file mode 100644 index f04becf..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/.travis.yml +++ /dev/null @@ -1,7 +0,0 @@ -language: erlang -otp_release: - - R15B - - R14B04 - - R14B03 - - R14B02 -script: "make tests" diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/AUTHORS b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/AUTHORS deleted file mode 100644 index a07a69d..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/AUTHORS +++ /dev/null @@ -1,18 +0,0 @@ -Cowboy is available thanks to the work of: - -Loïc Hoguin -Anthony Ramine -Magnus Klaar -Paul Oliver -Steven Gravell -Tom Burdick -Hunter Morris -Yurii Rashkovskii -Ali Sabil -Hans Ulrich Niedermann -Jesper Louis Andersen -Mathieu Lecarme -Max Lapshin -Michiel Hakvoort -Ori Bar -Alisdair Sullivan diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/CHANGELOG.md b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/CHANGELOG.md deleted file mode 100644 index a4b815b..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/CHANGELOG.md +++ /dev/null @@ -1,213 +0,0 @@ -CHANGELOG -========= - -0.4.0 ------ - -* Set the cowboy_listener process priority to high - - As it is the central process used by all incoming requests - we need to set its priority to high to avoid timeouts that - would happen otherwise when reaching a huge number of - concurrent requests. - -* Add cowboy:child_spec/6 for embedding in other applications - -* Add cowboy_http_rest, an experimental REST protocol support - - Based on the Webmachine diagram and documentation. It is a - new implementation, not a port, therefore a few changes have - been made. However all the callback names are the same and - should behave similarly to Webmachine. - - There is currently no documentation other than the Webmachine - resource documentation and the comments found in cowboy_http_rest, - which itself should be fairly easy to read and understand. - -* Add cowboy_http_static, an experimental static file handler - - Makes use of the aforementioned REST protocol support to - deliver files with proper content type and cache headers. - - Note that this uses the new file:sendfile support when - appropriate, which currently requires the VM to be started - with the +A option defined, else errors may randomly appear. - -* Add cowboy_bstr module for binary strings related functions - -* Add cowboy_http module for HTTP parsing functions - - This module so far contains various functions for HTTP header - parsing along with URL encoding and decoding. - -* Remove quoted from the default dependencies - - This should make Cowboy much easier to compile and use by default. - It is of course still possible to use quoted as your URL decoding - library in Cowboy thanks to the newly added urldecode option. - -* Fix supervisor spec for non dynamic modules to allow upgrades to complete - -* Add cowboy:accept_ack/1 for a cleaner handling of the shoot message - - Before, when the listener accepted a connection, the newly created - process was waiting for a message containing the atom 'shoot' before - proceeding. This has been replaced by the cowboy:accept_ack/1 function. - - This function should be used where 'shoot' was received because the - contents of the message have changed (and could change again in the - distant future). - -* Update binary parsing expressions to avoid hype crashes - - More specifically, /bits was replaced by /binary. - -* Rename the type cowboy_dispatcher:path_tokens/0 to tokens/0 - -* Remove the cowboy_clock:date/0, time/0 and datetime/0 types - - The calendar module exports those same types properly since R14B04. - -* Add cacertfile configuration option to cowboy_ssl_transport - -* Add cowboy_protocol behaviour - -* Remove -Wbehaviours dialyzer option unavailable in R15B - -* Many tests and specs improvements - -### cowboy_http_req - -* Fix a crash when reading the request body - -* Add parse_header/2 and parse_header/3 - - The following headers can now be semantically parsed: Connection, Accept, - Accept-Charset, Accept-Encoding, Accept-Language, Content-Length, - Content-Type, If-Match, If-None-Match, If-Modified-Since, - If-Unmodified-Since, Upgrade - -* Add set_resp_header/3, set_resp_cookie/4 and set_resp_body/2 - - These functions allow handlers to set response headers and body - without having to reply directly. - -* Add set_resp_body_fun/3 - - This function allows handlers to stream the body of the response - using the given fun. The size of the response must be known beforehand. - -* Add transport/1 to obtain the transport and socket for the request - - This allows handlers to have low-level socket access in those cases - where they do need it, like when streaming a response body with - set_resp_body_fun/3. - -* Add peer_addr/1 - - This function tries to guess the real peer IP based on the HTTP - headers received. - -* Add meta/2 and meta/3 to save useful protocol information - - Currently used to save the Websocket protocol version currently used, - and to save request information in the REST protocol handler. - -* Add reply/2 and reply/3 aliases to reply/4 - -* Add upgrade_reply/3 for protocol upgrades - -### cowboy_http_protocol - -* Add the {urldecode, fun urldecode/2} option - - Added when quoted was removed from the default build. Can be used to - tell Cowboy to use quoted or any other URL decoding routine. - -* Add the max_keepalive option - -* Add the max_line_length option - -* Allow HTTP handlers to stop during init/3 - - To do so they can return {shutdown, Req, State}. - -* Add loops support in HTTP handlers for proper long-polling support - - A loop can be entered by returning either of {loop, Req, State}, - {loop, Req, State, hibernate}, {loop, Req, State, Timeout} or - {loop, Req, State, Timeout, hibernate} from init/3. - - Loops are useful when we cannot reply immediately and instead - are waiting for an Erlang message to be able to complete the request, - as would typically be done for long-polling. - - Loop support in the protocol means that timeouts and hibernating - are well tested and handled so you can use those options without - worrying. It is recommended to set the timeout option. - - When a loop is started, handle/2 will never be called so it does - not need to be defined. When the request process receives an Erlang - message, it will call the info/3 function with the message as the - first argument. - - Like in OTP, you do need to set timeout and hibernate again when - returning from info/3 to enable them until the next call. - -* Fix the sending of 500 errors when handlers crash - - Now we send an error response when no response has been sent, - and do nothing more than close the connection if anything - did get sent. - -* Fix a crash when the server is sent HTTP responses - -* Fix HTTP timeouts handling when the Request-Line wasn't received - -* Fix the handling of the max number of empty lines between requests - -* Fix the handling of HEAD requests - -* Fix HTTP/1.0 Host header handling - -* Reply status 400 if we receive an unexpected value or error for headers - -* Properly close when the application sends "Connection: close" header - -* Close HTTP connections on all errors - -* Improve the error message for HTTP handlers - -### cowboy_http_websocket - -* Add websocket support for all versions up to RFC 6455 - - Support isn't perfect yet according to the specifications, but - is working against all currently known client implementations. - -* Allow websocket_init/3 to return with the hibernate option set - -* Add {shutdown, Req} return value to websocket_init/3 to fail an upgrade - -* Fix websocket timeout handling - -* Fix error messages: wrong callback name was reported on error - -* Fix byte-by-byte websocket handling - -* Fix an issue when using hixie-76 with certain proxies - -* Fix a crash in the hixie-76 handshake - -* Fix the handshake when SSL is used on port 443 - -* Fix a crash in the handshake when cowboy_http_req:compact/1 is used - -* Fix handshake when a query string is present - -* Fix a crash when the Upgrade header contains more than one token - -0.2.0 ------ - -* Initial release. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/Makefile b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/Makefile deleted file mode 100644 index e5524f4..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/Makefile +++ /dev/null @@ -1,36 +0,0 @@ -# See LICENSE for licensing information. - -DIALYZER = dialyzer -REBAR = rebar - -all: app - -app: deps - @$(REBAR) compile - -deps: - @$(REBAR) get-deps - -clean: - @$(REBAR) clean - rm -f test/*.beam - rm -f erl_crash.dump - -tests: clean app eunit ct - -eunit: - @$(REBAR) eunit skip_deps=true - -ct: - @$(REBAR) ct skip_deps=true - -build-plt: - @$(DIALYZER) --build_plt --output_plt .cowboy_dialyzer.plt \ - --apps kernel stdlib sasl inets crypto public_key ssl - -dialyze: - @$(DIALYZER) --src src --plt .cowboy_dialyzer.plt -Werror_handling \ - -Wrace_conditions -Wunmatched_returns # -Wunderspecs - -docs: - @$(REBAR) doc skip_deps=true diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/README.md b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/README.md deleted file mode 100644 index d5950b9..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/README.md +++ /dev/null @@ -1,290 +0,0 @@ -Cowboy -====== - -Cowboy is a small, fast and modular HTTP server written in Erlang. - -Cowboy is also a socket acceptor pool, able to accept connections -for any kind of TCP protocol. - -Goals ------ - -Cowboy aims to provide the following advantages: - -* **Small** code base. -* Damn **fast**. -* **Modular**: transport and protocol handlers are replaceable. -* **Binary HTTP** for greater speed and lower memory usage. -* Easy to **embed** inside another application. -* Selectively **dispatch** requests to handlers, allowing you to send some - requests to your embedded code and others to a FastCGI application in - PHP or Ruby. -* No parameterized module. No process dictionary. **Clean** Erlang code. - -The server is currently in early development. Comments and suggestions are -more than welcome. To contribute, either open bug reports, or fork the project -and send us pull requests with new or improved functionality. You should -discuss your plans with us before doing any serious work, though, to avoid -duplicating efforts. - -Quick start ------------ - -* Add Cowboy as a rebar or agner dependency to your application. -* Start Cowboy and add one or more listeners. -* Write handlers for your application. -* Check out [examples](https://github.com/extend/cowboy_examples)! - -Getting Started ---------------- - -At heart, Cowboy is nothing more than an TCP acceptor pool. All it does is -accept connections received on a given port and using a given transport, -like TCP or SSL, and forward them to a request handler for the given -protocol. Acceptors and request handlers are of course supervised -automatically. - -It just so happens that Cowboy also includes an HTTP protocol handler. -But Cowboy does nothing by default. You need to explicitly ask Cowboy -to listen on a port with your chosen transport and protocol handlers. -To do so, you must start a listener. - -A listener is a special kind of supervisor that manages both the -acceptor pool and the request processes. It is named and can thus be -started and stopped at will. - -An acceptor pool is a pool of processes whose only role is to accept -new connections. It's good practice to have many of these processes -as they are very cheap and allow much quicker response when you get -many connections. Of course, as with everything else, you should -**benchmark** before you decide what's best for you. - -Cowboy includes a TCP transport handler for HTTP and an SSL transport -handler for HTTPS. The transport handlers can of course be reused for -other protocols like FTP or IRC. - -The HTTP protocol requires one last thing to continue: dispatching rules. -Don't worry about it right now though and continue reading, it'll all -be explained. - -You can start and stop listeners by calling `cowboy:start_listener/6` and -`cowboy:stop_listener/1` respectively. - -The following example demonstrates the startup of a very simple listener. - -``` erlang -application:start(cowboy), -Dispatch = [ - %% {Host, list({Path, Handler, Opts})} - {'_', [{'_', my_handler, []}]} -], -%% Name, NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts -cowboy:start_listener(my_http_listener, 100, - cowboy_tcp_transport, [{port, 8080}], - cowboy_http_protocol, [{dispatch, Dispatch}] -). -``` - -This is not enough though, you must also write the my_handler module -to process the incoming HTTP requests. Of course Cowboy comes with -predefined handlers for specific tasks but most of the time you'll -want to write your own handlers for your application. - -Following is an example of a "Hello World!" HTTP handler. - -``` erlang --module(my_handler). --export([init/3, handle/2, terminate/2]). - -init({tcp, http}, Req, Opts) -> - {ok, Req, undefined_state}. - -handle(Req, State) -> - {ok, Req2} = cowboy_http_req:reply(200, [], <<"Hello World!">>, Req), - {ok, Req2, State}. - -terminate(Req, State) -> - ok. -``` - -You can also write handlers that do not reply directly. Instead, such handlers -will wait for an Erlang message from another process and only reply when -receiving such message, or timeout if it didn't arrive in time. - -This is especially useful for long-polling functionality, as Cowboy will handle -process hibernation and timeouts properly, preventing mistakes if you were to -write the code yourself. An handler of that kind can be defined like this: - -``` erlang --module(my_loop_handler). --export([init/3, info/3, terminate/2]). - --define(TIMEOUT, 60000). - -init({tcp, http}, Req, Opts) -> - {loop, Req, undefined_state, ?TIMEOUT, hibernate}. - -info({reply, Body}, Req, State) -> - {ok, Req2} = cowboy_http_req:reply(200, [], Body, Req), - {ok, Req2, State}; -info(Message, Req, State) -> - {loop, Req, State, hibernate}. - -terminate(Req, State) -> - ok. -``` - -It is of course possible to combine both type of handlers together as long as -you return the proper tuple from init/3. - -**Note**: versions prior to `0.4.0` used the -[quoted](https://github.com/klaar/quoted.erl) library instead of the built in -`cowboy_http:urldecode/2` function. If you want to retain this you must add it -as a dependency to your application and add the following cowboy_http_protocol -option: - -``` erlang - {urldecode, {fun quoted:from_url/2, quoted:make([])}} -``` - -Continue reading to learn how to dispatch rules and handle requests. - -Dispatch rules --------------- - -Cowboy allows you to dispatch HTTP requests directly to a specific handler -based on the hostname and path information from the request. It also lets -you define static options for the handler directly in the rules. - -To match the hostname and path, Cowboy requires a list of tokens. For -example, to match the "dev-extend.eu" domain name, you must specify -`[<<"dev-extend">>, <<"eu">>]`. Or, to match the "/path/to/my/resource" -you must use `[<<"path">>, <<"to">>, <<"my">>, <<"resource">>]`. All the -tokens must be given as binary. - -You can use the special token `'_'` (the atom underscore) to indicate that -you accept anything in that position. For example if you have both -"dev-extend.eu" and "dev-extend.fr" domains, you can use the match spec -`[<<"dev-extend">>, '_']` to match any top level extension. - -Finally, you can also match multiple leading segments of the domain name and -multiple trailing segments of the request path using the atom `'...'` (the atom -ellipsis) respectively as the first host token or the last path token. For -example, host rule `['...', <<"dev-extend">>, <<"eu">>]` can match both -"cowboy.bugs.dev-extend.eu" and "dev-extend.eu" and path rule -`[<<"projects">>, '...']` can match both "/projects" and -"/projects/cowboy/issues/42". The host leading segments and the path trailing -segments can later be retrieved through `cowboy_http_req:host_info/1` and -`cowboy_http_req:path_info/1`. - -Any other atom used as a token will bind the value to this atom when -matching. To follow on our hostnames example, `[<<"dev-extend">>, ext]` -would bind the values `<<"eu">>` and `<<"fr">>` to the ext atom, that you -can later retrieve in your handler by calling `cowboy_http_req:binding/{2,3}`. - -You can also accept any match spec by using the atom `'_'` directly instead of -a list of tokens. Our hello world example above uses this to forward all -requests to a single handler. - -There is currently no way to match multiple tokens at once. - -Requests handling ------------------ - -Requests are passed around in the Request variable. Although they are -defined as a record, it is recommended to access them only through the -cowboy_http_req module API. - -You can retrieve the HTTP method, HTTP version, peer address and port, -host tokens, raw host, used port, path tokens, raw path, query string -values, bound values from the dispatch step, header values from the -request. You can also read the request body, if any, optionally parsing -it as a query string. Finally, the request allows you to send a response -to the client. - -See the cowboy_http_req module for more information. - -Websockets ----------- - -The Websocket protocol is built upon the HTTP protocol. It first sends -an HTTP request for an handshake, performs it and then switches -to Websocket. Therefore you need to write a standard HTTP handler to -confirm the handshake should be completed and then the Websocket-specific -callbacks. - -A simple handler doing nothing but sending a repetitive message using -Websocket would look like this: - -``` erlang --module(my_ws_handler). --export([init/3]). --export([websocket_init/3, websocket_handle/3, - websocket_info/3, websocket_terminate/3]). - -init({tcp, http}, Req, Opts) -> - {upgrade, protocol, cowboy_http_websocket}. - -websocket_init(TransportName, Req, _Opts) -> - erlang:start_timer(1000, self(), <<"Hello!">>), - {ok, Req, undefined_state}. - -websocket_handle({text, Msg}, Req, State) -> - {reply, {text, << "That's what she said! ", Msg/binary >>}, Req, State}; -websocket_handle(_Data, Req, State) -> - {ok, Req, State}. - -websocket_info({timeout, _Ref, Msg}, Req, State) -> - erlang:start_timer(1000, self(), <<"How' you doin'?">>), - {reply, {text, Msg}, Req, State}; -websocket_info(_Info, Req, State) -> - {ok, Req, State}. - -websocket_terminate(_Reason, _Req, _State) -> - ok. -``` - -Of course you can have an HTTP handler doing both HTTP and Websocket -handling, but for the sake of this example we're ignoring the HTTP -part entirely. - -As the Websocket protocol is still a draft the API is subject to change -regularly when support to the most recent drafts gets added. Features may -be added, changed or removed before the protocol gets finalized. Cowboy -tries to implement all drafts transparently and give a single interface to -handle them all, however. - -Using Cowboy with other protocols ---------------------------------- - -One of the strengths of Cowboy is of course that you can use it with any -protocol you want. The only downside is that if it's not HTTP, you'll -probably have to write the protocol handler yourself. - -The only exported function a protocol handler needs is the start_link/4 -function, with arguments ListenerPid, Socket, Transport and Opts. ListenerPid -is the pid to the listener's gen_server, managing the connections. Socket is of -course the client socket; Transport is the module name of the chosen transport -handler and Opts is protocol options defined when starting the listener. - -After initializing your protocol, it is recommended to call the -function cowboy:accept_ack/1 with the ListenerPid as argument, -as it will ensure Cowboy has been able to fully initialize the socket. -Anything you do past this point is up to you! - -If you need to change some socket options, like enabling raw mode for example, -you can call the Transport:setopts/2 function. It is the protocol's -responsability to manage the socket usage, there should be no need for an user -to specify that kind of options while starting a listener. - -You should definitely look at the cowboy_http_protocol module for a great -example of fast request handling if you need to. Otherwise it's probably -safe to use `{active, once}` mode and handle everything as it comes. - -Note that while you technically can run a protocol handler directly as a -gen_server or a gen_fsm, it's probably not a good idea, as the only call -you'll ever receive from Cowboy is the start_link/4 call. On the other -hand, feel free to write a very basic protocol handler which then forwards -requests to a gen_server or gen_fsm. By doing so however you must take -care to supervise their processes as Cowboy only knows about the protocol -handler itself. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/doc/overview.edoc b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/doc/overview.edoc deleted file mode 100644 index 56648c4..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/doc/overview.edoc +++ /dev/null @@ -1,4 +0,0 @@ -@author Loïc Hoguin -@copyright 2011 Loïc Hoguin -@version HEAD -@title Small, fast, modular HTTP server. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/include/http.hrl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/include/http.hrl deleted file mode 100644 index c98f873..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/include/http.hrl +++ /dev/null @@ -1,55 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% Copyright (c) 2011, Anthony Ramine -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - --record(http_req, { - %% Transport. - socket = undefined :: undefined | inet:socket(), - transport = undefined :: undefined | module(), - connection = keepalive :: keepalive | close, - - %% Request. - pid = undefined :: pid(), - method = 'GET' :: cowboy_http:method(), - version = {1, 1} :: cowboy_http:version(), - peer = undefined :: undefined | {inet:ip_address(), inet:ip_port()}, - host = undefined :: undefined | cowboy_dispatcher:tokens(), - host_info = undefined :: undefined | cowboy_dispatcher:tokens(), - raw_host = undefined :: undefined | binary(), - port = undefined :: undefined | inet:ip_port(), - path = undefined :: undefined | '*' | cowboy_dispatcher:tokens(), - path_info = undefined :: undefined | cowboy_dispatcher:tokens(), - raw_path = undefined :: undefined | binary(), - qs_vals = undefined :: undefined | list({binary(), binary() | true}), - raw_qs = undefined :: undefined | binary(), - bindings = undefined :: undefined | cowboy_dispatcher:bindings(), - headers = [] :: cowboy_http:headers(), - p_headers = [] :: [any()], %% @todo Improve those specs. - cookies = undefined :: undefined | [{binary(), binary()}], - meta = [] :: [{atom(), any()}], - - %% Request body. - body_state = waiting :: waiting | done | - {multipart, non_neg_integer(), fun()}, - buffer = <<>> :: binary(), - - %% Response. - resp_state = waiting :: locked | waiting | chunks | done, - resp_headers = [] :: cowboy_http:headers(), - resp_body = <<>> :: cowboy_http:fake_iodata() | {non_neg_integer(), - fun(() -> {sent, non_neg_integer()})}, - - %% Functions. - urldecode :: {fun((binary(), T) -> binary()), T} -}). diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/rebar.config b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/rebar.config deleted file mode 100644 index 82d1fca..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/rebar.config +++ /dev/null @@ -1,12 +0,0 @@ -{cover_enabled, true}. -{deps, [ - {proper, "1.0", - {git, "git://github.com/manopapad/proper.git", {tag, "v1.0"}}} -]}. -{eunit_opts, [verbose, {report, {eunit_surefire, [{dir, "."}]}}]}. -{erl_opts, [ -%% bin_opt_info, -%% warn_missing_spec, - warnings_as_errors, - warn_export_all -]}. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy.erl deleted file mode 100644 index 6defeea..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy.erl +++ /dev/null @@ -1,85 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @doc Cowboy API to start and stop listeners. --module(cowboy). - --export([start_listener/6, stop_listener/1, child_spec/6, accept_ack/1]). - -%% @doc Start a listener for the given transport and protocol. -%% -%% A listener is effectively a pool of NbAcceptors acceptors. -%% Acceptors accept connections on the given Transport and forward -%% requests to the given Protocol handler. Both transport and protocol -%% modules can be given options through the TransOpts and the -%% ProtoOpts arguments. Available options are documented in the -%% listen transport function and in the protocol module of your choice. -%% -%% All acceptor and request processes are supervised by the listener. -%% -%% It is recommended to set a large enough number of acceptors to improve -%% performance. The exact number depends of course on your hardware, on the -%% protocol used and on the number of expected simultaneous connections. -%% -%% The Transport option max_connections allows you to define -%% the maximum number of simultaneous connections for this listener. It defaults -%% to 1024. See cowboy_listener for more details on limiting the number -%% of connections. -%% -%% Although Cowboy includes a cowboy_http_protocol handler, other -%% handlers can be created for different protocols like IRC, FTP and more. -%% -%% Ref can be used to stop the listener later on. --spec start_listener(any(), non_neg_integer(), module(), any(), module(), any()) - -> {ok, pid()}. -start_listener(Ref, NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts) - when is_integer(NbAcceptors) andalso is_atom(Transport) - andalso is_atom(Protocol) -> - supervisor:start_child(cowboy_sup, child_spec(Ref, NbAcceptors, - Transport, TransOpts, Protocol, ProtoOpts)). - -%% @doc Stop a listener identified by Ref. -%% @todo Currently request processes aren't terminated with the listener. --spec stop_listener(any()) -> ok | {error, not_found}. -stop_listener(Ref) -> - case supervisor:terminate_child(cowboy_sup, {cowboy_listener_sup, Ref}) of - ok -> - supervisor:delete_child(cowboy_sup, {cowboy_listener_sup, Ref}); - {error, Reason} -> - {error, Reason} - end. - -%% @doc Return a child spec suitable for embedding. -%% -%% When you want to embed cowboy in another application, you can use this -%% function to create a ChildSpec suitable for use in a supervisor. -%% The parameters are the same as in start_listener/6 but rather -%% than hooking the listener to the cowboy internal supervisor, it just returns -%% the spec. --spec child_spec(any(), non_neg_integer(), module(), any(), module(), any()) - -> supervisor:child_spec(). -child_spec(Ref, NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts) - when is_integer(NbAcceptors) andalso is_atom(Transport) - andalso is_atom(Protocol) -> - {{cowboy_listener_sup, Ref}, {cowboy_listener_sup, start_link, [ - NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts - ]}, permanent, 5000, supervisor, [cowboy_listener_sup]}. - -%% @doc Acknowledge the accepted connection. -%% -%% Effectively used to make sure the socket control has been given to -%% the protocol process before starting to use it. --spec accept_ack(pid()) -> ok. -accept_ack(ListenerPid) -> - receive {shoot, ListenerPid} -> ok end. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_acceptor.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_acceptor.erl deleted file mode 100644 index 4cb9fa7..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_acceptor.erl +++ /dev/null @@ -1,59 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @private --module(cowboy_acceptor). - --export([start_link/7]). %% API. --export([acceptor/7]). %% Internal. - -%% API. - --spec start_link(inet:socket(), module(), module(), any(), - non_neg_integer(), pid(), pid()) -> {ok, pid()}. -start_link(LSocket, Transport, Protocol, Opts, - MaxConns, ListenerPid, ReqsSup) -> - Pid = spawn_link(?MODULE, acceptor, - [LSocket, Transport, Protocol, Opts, MaxConns, ListenerPid, ReqsSup]), - {ok, Pid}. - -%% Internal. - --spec acceptor(inet:socket(), module(), module(), any(), - non_neg_integer(), pid(), pid()) -> no_return(). -acceptor(LSocket, Transport, Protocol, Opts, MaxConns, ListenerPid, ReqsSup) -> - case Transport:accept(LSocket, 2000) of - {ok, CSocket} -> - {ok, Pid} = supervisor:start_child(ReqsSup, - [ListenerPid, CSocket, Transport, Protocol, Opts]), - Transport:controlling_process(CSocket, Pid), - {ok, NbConns} = cowboy_listener:add_connection(ListenerPid, - default, Pid), - Pid ! {shoot, ListenerPid}, - limit_reqs(ListenerPid, NbConns, MaxConns); - {error, timeout} -> - ignore; - {error, _Reason} -> - %% @todo Probably do something here. If the socket was closed, - %% we may want to try and listen again on the port? - ignore - end, - ?MODULE:acceptor(LSocket, Transport, Protocol, Opts, - MaxConns, ListenerPid, ReqsSup). - --spec limit_reqs(pid(), non_neg_integer(), non_neg_integer()) -> ok. -limit_reqs(_ListenerPid, NbConns, MaxConns) when NbConns =< MaxConns -> - ok; -limit_reqs(ListenerPid, _NbConns, MaxConns) -> - cowboy_listener:wait(ListenerPid, default, MaxConns). diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_acceptors_sup.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_acceptors_sup.erl deleted file mode 100644 index 17849a6..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_acceptors_sup.erl +++ /dev/null @@ -1,43 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @private --module(cowboy_acceptors_sup). --behaviour(supervisor). - --export([start_link/7]). %% API. --export([init/1]). %% supervisor. - -%% API. - --spec start_link(non_neg_integer(), module(), any(), - module(), any(), pid(), pid()) -> {ok, pid()}. -start_link(NbAcceptors, Transport, TransOpts, - Protocol, ProtoOpts, ListenerPid, ReqsPid) -> - supervisor:start_link(?MODULE, [NbAcceptors, Transport, TransOpts, - Protocol, ProtoOpts, ListenerPid, ReqsPid]). - -%% supervisor. - --spec init(list()) -> {ok, {{one_for_one, 10, 10}, list()}}. -init([NbAcceptors, Transport, TransOpts, - Protocol, ProtoOpts, ListenerPid, ReqsPid]) -> - {ok, LSocket} = Transport:listen(TransOpts), - MaxConns = proplists:get_value(max_connections, TransOpts, 1024), - Procs = [{{acceptor, self(), N}, {cowboy_acceptor, start_link, [ - LSocket, Transport, Protocol, ProtoOpts, - MaxConns, ListenerPid, ReqsPid - ]}, permanent, brutal_kill, worker, []} - || N <- lists:seq(1, NbAcceptors)], - {ok, {{one_for_one, 10, 10}, Procs}}. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_cookies.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_cookies.erl deleted file mode 100644 index 7f5ab60..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_cookies.erl +++ /dev/null @@ -1,392 +0,0 @@ -%% Copyright 2007 Mochi Media, Inc. -%% Copyright 2011 Thomas Burdick -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @doc HTTP Cookie parsing and generating (RFC 2965). - --module(cowboy_cookies). - --export([parse_cookie/1, cookie/3, cookie/2]). %% API. - -%% Types. --type kv() :: {Name::binary(), Value::binary()}. --type kvlist() :: [kv()]. --type cookie_option() :: {max_age, integer()} - | {local_time, calendar:datetime()} - | {domain, binary()} | {path, binary()} - | {secure, true | false} | {http_only, true | false}. --export_type([kv/0, kvlist/0, cookie_option/0]). - --define(QUOTE, $\"). - --include_lib("eunit/include/eunit.hrl"). - -%% API. - -%% @doc Parse the contents of a Cookie header field, ignoring cookie -%% attributes, and return a simple property list. --spec parse_cookie(binary()) -> kvlist(). -parse_cookie(<<>>) -> - []; -parse_cookie(Cookie) when is_binary(Cookie) -> - parse_cookie(Cookie, []). - -%% @equiv cookie(Key, Value, []) --spec cookie(binary(), binary()) -> kv(). -cookie(Key, Value) when is_binary(Key) andalso is_binary(Value) -> - cookie(Key, Value, []). - -%% @doc Generate a Set-Cookie header field tuple. --spec cookie(binary(), binary(), [cookie_option()]) -> kv(). -cookie(Key, Value, Options) when is_binary(Key) - andalso is_binary(Value) andalso is_list(Options) -> - Cookie = <<(any_to_binary(Key))/binary, "=", - (quote(Value))/binary, "; Version=1">>, - %% Set-Cookie: - %% Comment, Domain, Max-Age, Path, Secure, Version - ExpiresPart = - case proplists:get_value(max_age, Options) of - undefined -> - <<"">>; - RawAge -> - When = case proplists:get_value(local_time, Options) of - undefined -> - calendar:local_time(); - LocalTime -> - LocalTime - end, - Age = case RawAge < 0 of - true -> - 0; - false -> - RawAge - end, - AgeBinary = quote(Age), - CookieDate = age_to_cookie_date(Age, When), - <<"; Expires=", CookieDate/binary, - "; Max-Age=", AgeBinary/binary>> - end, - SecurePart = - case proplists:get_value(secure, Options) of - true -> - <<"; Secure">>; - _ -> - <<"">> - end, - DomainPart = - case proplists:get_value(domain, Options) of - undefined -> - <<"">>; - Domain -> - <<"; Domain=", (quote(Domain))/binary>> - end, - PathPart = - case proplists:get_value(path, Options) of - undefined -> - <<"">>; - Path -> - <<"; Path=", (quote(Path))/binary>> - end, - HttpOnlyPart = - case proplists:get_value(http_only, Options) of - true -> - <<"; HttpOnly">>; - _ -> - <<"">> - end, - CookieParts = <>, - {<<"Set-Cookie">>, CookieParts}. - -%% Internal. - -%% @doc Check if a character is a white space character. -is_whitespace($\s) -> true; -is_whitespace($\t) -> true; -is_whitespace($\r) -> true; -is_whitespace($\n) -> true; -is_whitespace(_) -> false. - -%% @doc Check if a character is a seperator. -is_separator(C) when C < 32 -> true; -is_separator($\s) -> true; -is_separator($\t) -> true; -is_separator($() -> true; -is_separator($)) -> true; -is_separator($<) -> true; -is_separator($>) -> true; -is_separator($@) -> true; -is_separator($,) -> true; -is_separator($;) -> true; -is_separator($:) -> true; -is_separator($\\) -> true; -is_separator(?QUOTE) -> true; -is_separator($/) -> true; -is_separator($[) -> true; -is_separator($]) -> true; -is_separator($?) -> true; -is_separator($=) -> true; -is_separator(${) -> true; -is_separator($}) -> true; -is_separator(_) -> false. - -%% @doc Check if a binary has an ASCII seperator character. -has_seperator(<<>>) -> - false; -has_seperator(<<$/, Rest/binary>>) -> - has_seperator(Rest); -has_seperator(<>) -> - case is_separator(C) of - true -> - true; - false -> - has_seperator(Rest) - end. - -%% @doc Convert to a binary and raise an error if quoting is required. Quoting -%% is broken in different ways for different browsers. Its better to simply -%% avoiding doing it at all. -%% @end --spec quote(term()) -> binary(). -quote(V0) -> - V = any_to_binary(V0), - case has_seperator(V) of - true -> - erlang:error({cookie_quoting_required, V}); - false -> - V - end. - --spec add_seconds(integer(), calendar:datetime()) -> calendar:datetime(). -add_seconds(Secs, LocalTime) -> - Greg = calendar:datetime_to_gregorian_seconds(LocalTime), - calendar:gregorian_seconds_to_datetime(Greg + Secs). - --spec age_to_cookie_date(integer(), calendar:datetime()) -> binary(). -age_to_cookie_date(Age, LocalTime) -> - cowboy_clock:rfc2109(add_seconds(Age, LocalTime)). - --spec parse_cookie(binary(), kvlist()) -> kvlist(). -parse_cookie(<<>>, Acc) -> - lists:reverse(Acc); -parse_cookie(String, Acc) -> - {{Token, Value}, Rest} = read_pair(String), - Acc1 = case Token of - <<"">> -> - Acc; - <<"$", _R/binary>> -> - Acc; - _ -> - [{Token, Value} | Acc] - end, - parse_cookie(Rest, Acc1). - --spec read_pair(binary()) -> {{binary(), binary()}, binary()}. -read_pair(String) -> - {Token, Rest} = read_token(skip_whitespace(String)), - {Value, Rest1} = read_value(skip_whitespace(Rest)), - {{Token, Value}, skip_past_separator(Rest1)}. - --spec read_value(binary()) -> {binary(), binary()}. -read_value(<<"=", Value/binary>>) -> - Value1 = skip_whitespace(Value), - case Value1 of - <> -> - read_quoted(Value1); - _ -> - read_token(Value1) - end; -read_value(String) -> - {<<"">>, String}. - --spec read_quoted(binary()) -> {binary(), binary()}. -read_quoted(<>) -> - read_quoted(String, <<"">>). - --spec read_quoted(binary(), binary()) -> {binary(), binary()}. -read_quoted(<<"">>, Acc) -> - {Acc, <<"">>}; -read_quoted(<>, Acc) -> - {Acc, Rest}; -read_quoted(<<$\\, Any, Rest/binary>>, Acc) -> - read_quoted(Rest, <>); -read_quoted(<>, Acc) -> - read_quoted(Rest, <>). - -%% @doc Drop characters while a function returns true. -binary_dropwhile(_F, <<"">>) -> - <<"">>; -binary_dropwhile(F, String) -> - <> = String, - case F(C) of - true -> - binary_dropwhile(F, Rest); - false -> - String - end. - -%% @doc Remove leading whitespace. --spec skip_whitespace(binary()) -> binary(). -skip_whitespace(String) -> - binary_dropwhile(fun is_whitespace/1, String). - -%% @doc Split a binary when the current character causes F to return true. -binary_splitwith(_F, Head, <<>>) -> - {Head, <<>>}; -binary_splitwith(F, Head, Tail) -> - <> = Tail, - case F(C) of - true -> - {Head, Tail}; - false -> - binary_splitwith(F, <>, NTail) - end. - -%% @doc Split a binary with a function returning true or false on each char. -binary_splitwith(F, String) -> - binary_splitwith(F, <<>>, String). - -%% @doc Split the binary when the next seperator is found. --spec read_token(binary()) -> {binary(), binary()}. -read_token(String) -> - binary_splitwith(fun is_separator/1, String). - -%% @doc Return string after ; or , characters. --spec skip_past_separator(binary()) -> binary(). -skip_past_separator(<<"">>) -> - <<"">>; -skip_past_separator(<<";", Rest/binary>>) -> - Rest; -skip_past_separator(<<",", Rest/binary>>) -> - Rest; -skip_past_separator(<<_C, Rest/binary>>) -> - skip_past_separator(Rest). - --spec any_to_binary(binary() | string() | atom() | integer()) -> binary(). -any_to_binary(V) when is_binary(V) -> - V; -any_to_binary(V) when is_list(V) -> - erlang:list_to_binary(V); -any_to_binary(V) when is_atom(V) -> - erlang:atom_to_binary(V, latin1); -any_to_binary(V) when is_integer(V) -> - list_to_binary(integer_to_list(V)). - -%% Tests. - --ifdef(TEST). - -quote_test() -> - %% ?assertError eunit macro is not compatible with coverage module - _ = try quote(<<":wq">>) - catch error:{cookie_quoting_required, <<":wq">>} -> ok - end, - ?assertEqual(<<"foo">>,quote(foo)), - ok. - -parse_cookie_test() -> - %% RFC example - C1 = <<"$Version=\"1\"; Customer=\"WILE_E_COYOTE\"; $Path=\"/acme\"; - Part_Number=\"Rocket_Launcher_0001\"; $Path=\"/acme\"; - Shipping=\"FedEx\"; $Path=\"/acme\"">>, - ?assertEqual( - [{<<"Customer">>,<<"WILE_E_COYOTE">>}, - {<<"Part_Number">>,<<"Rocket_Launcher_0001">>}, - {<<"Shipping">>,<<"FedEx">>}], - parse_cookie(C1)), - %% Potential edge cases - ?assertEqual( - [{<<"foo">>, <<"x">>}], - parse_cookie(<<"foo=\"\\x\"">>)), - ?assertEqual( - [], - parse_cookie(<<"=">>)), - ?assertEqual( - [{<<"foo">>, <<"">>}, {<<"bar">>, <<"">>}], - parse_cookie(<<" foo ; bar ">>)), - ?assertEqual( - [{<<"foo">>, <<"">>}, {<<"bar">>, <<"">>}], - parse_cookie(<<"foo=;bar=">>)), - ?assertEqual( - [{<<"foo">>, <<"\";">>}, {<<"bar">>, <<"">>}], - parse_cookie(<<"foo = \"\\\";\";bar ">>)), - ?assertEqual( - [{<<"foo">>, <<"\";bar">>}], - parse_cookie(<<"foo=\"\\\";bar">>)), - ?assertEqual( - [], - parse_cookie(<<"">>)), - ?assertEqual( - [{<<"foo">>, <<"bar">>}, {<<"baz">>, <<"wibble">>}], - parse_cookie(<<"foo=bar , baz=wibble ">>)), - ok. - -domain_test() -> - ?assertEqual( - {<<"Set-Cookie">>, - <<"Customer=WILE_E_COYOTE; " - "Version=1; " - "Domain=acme.com; " - "HttpOnly">>}, - cookie(<<"Customer">>, <<"WILE_E_COYOTE">>, - [{http_only, true}, {domain, <<"acme.com">>}])), - ok. - -local_time_test() -> - {<<"Set-Cookie">>, B} = cookie(<<"Customer">>, <<"WILE_E_COYOTE">>, - [{max_age, 111}, {secure, true}]), - - ?assertMatch( - [<<"Customer=WILE_E_COYOTE">>, - <<" Version=1">>, - <<" Expires=", _R/binary>>, - <<" Max-Age=111">>, - <<" Secure">>], - binary:split(B, <<";">>, [global])), - ok. - --spec cookie_test() -> no_return(). %% Not actually true, just a bad option. -cookie_test() -> - C1 = {<<"Set-Cookie">>, - <<"Customer=WILE_E_COYOTE; " - "Version=1; " - "Path=/acme">>}, - C1 = cookie(<<"Customer">>, <<"WILE_E_COYOTE">>, [{path, <<"/acme">>}]), - - C1 = cookie(<<"Customer">>, <<"WILE_E_COYOTE">>, - [{path, <<"/acme">>}, {badoption, <<"negatory">>}]), - - {<<"Set-Cookie">>,<<"=NoKey; Version=1">>} - = cookie(<<"">>, <<"NoKey">>, []), - {<<"Set-Cookie">>,<<"=NoKey; Version=1">>} - = cookie(<<"">>, <<"NoKey">>), - LocalTime = calendar:universal_time_to_local_time( - {{2007, 5, 15}, {13, 45, 33}}), - C2 = {<<"Set-Cookie">>, - <<"Customer=WILE_E_COYOTE; " - "Version=1; " - "Expires=Tue, 15 May 2007 13:45:33 GMT; " - "Max-Age=0">>}, - C2 = cookie(<<"Customer">>, <<"WILE_E_COYOTE">>, - [{max_age, -111}, {local_time, LocalTime}]), - C3 = {<<"Set-Cookie">>, - <<"Customer=WILE_E_COYOTE; " - "Version=1; " - "Expires=Wed, 16 May 2007 13:45:50 GMT; " - "Max-Age=86417">>}, - C3 = cookie(<<"Customer">>, <<"WILE_E_COYOTE">>, - [{max_age, 86417}, {local_time, LocalTime}]), - ok. - --endif. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_dispatcher.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_dispatcher.erl deleted file mode 100644 index 22f6e1e..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_dispatcher.erl +++ /dev/null @@ -1,309 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% Copyright (c) 2011, Anthony Ramine -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @doc Dispatch requests according to a hostname and path. --module(cowboy_dispatcher). - --export([split_host/1, split_path/2, match/3]). %% API. - --type bindings() :: list({atom(), binary()}). --type tokens() :: list(binary()). --type match_rule() :: '_' | '*' | list(binary() | '_' | '...' | atom()). --type dispatch_path() :: list({match_rule(), module(), any()}). --type dispatch_rule() :: {Host::match_rule(), Path::dispatch_path()}. --type dispatch_rules() :: list(dispatch_rule()). - --export_type([bindings/0, tokens/0, dispatch_rules/0]). - --include_lib("eunit/include/eunit.hrl"). - -%% API. - -%% @doc Split a hostname into a list of tokens. --spec split_host(binary()) - -> {tokens(), binary(), undefined | inet:ip_port()}. -split_host(<<>>) -> - {[], <<>>, undefined}; -split_host(Host) -> - case binary:split(Host, <<":">>) of - [Host] -> - {binary:split(Host, <<".">>, [global, trim]), Host, undefined}; - [Host2, Port] -> - {binary:split(Host2, <<".">>, [global, trim]), Host2, - list_to_integer(binary_to_list(Port))} - end. - -%% @doc Split a path into a list of path segments. -%% -%% Following RFC2396, this function may return path segments containing any -%% character, including / if, and only if, a / was escaped -%% and part of a path segment. --spec split_path(binary(), fun((binary()) -> binary())) -> - {tokens(), binary(), binary()}. -split_path(Path, URLDec) -> - case binary:split(Path, <<"?">>) of - [Path] -> {do_split_path(Path, <<"/">>, URLDec), Path, <<>>}; - [<<>>, Qs] -> {[], <<>>, Qs}; - [Path2, Qs] -> {do_split_path(Path2, <<"/">>, URLDec), Path2, Qs} - end. - --spec do_split_path(binary(), <<_:8>>, fun((binary()) -> binary())) -> tokens(). -do_split_path(RawPath, Separator, URLDec) -> - EncodedPath = case binary:split(RawPath, Separator, [global, trim]) of - [<<>>|Path] -> Path; - Path -> Path - end, - [URLDec(Token) || Token <- EncodedPath]. - -%% @doc Match hostname tokens and path tokens against dispatch rules. -%% -%% It is typically used for matching tokens for the hostname and path of -%% the request against a global dispatch rule for your listener. -%% -%% Dispatch rules are a list of {Hostname, PathRules} tuples, with -%% PathRules being a list of {Path, HandlerMod, HandlerOpts}. -%% -%% Hostname and Path are match rules and can be either the -%% atom '_', which matches everything for a single token, the atom -%% '*', which matches everything for the rest of the tokens, or a -%% list of tokens. Each token can be either a binary, the atom '_', -%% the atom '...' or a named atom. A binary token must match exactly, -%% '_' matches everything for a single token, '...' matches -%% everything for the rest of the tokens and a named atom will bind the -%% corresponding token value and return it. -%% -%% The list of hostname tokens is reversed before matching. For example, if -%% we were to match "www.dev-extend.eu", we would first match "eu", then -%% "dev-extend", then "www". This means that in the context of hostnames, -%% the '...' atom matches properly the lower levels of the domain -%% as would be expected. -%% -%% When a result is found, this function will return the handler module and -%% options found in the dispatch list, a key-value list of bindings and -%% the tokens that were matched by the '...' atom for both the -%% hostname and path. --spec match(Host::tokens(), Path::tokens(), dispatch_rules()) - -> {ok, module(), any(), bindings(), - HostInfo::undefined | tokens(), - PathInfo::undefined | tokens()} - | {error, notfound, host} | {error, notfound, path}. -match(_Host, _Path, []) -> - {error, notfound, host}; -match(_Host, Path, [{'_', PathMatchs}|_Tail]) -> - match_path(Path, PathMatchs, [], undefined); -match(Host, Path, [{HostMatch, PathMatchs}|Tail]) -> - case try_match(host, Host, HostMatch) of - false -> - match(Host, Path, Tail); - {true, HostBinds, undefined} -> - match_path(Path, PathMatchs, HostBinds, undefined); - {true, HostBinds, HostInfo} -> - match_path(Path, PathMatchs, HostBinds, lists:reverse(HostInfo)) - end. - --spec match_path(tokens(), dispatch_path(), bindings(), - HostInfo::undefined | tokens()) - -> {ok, module(), any(), bindings(), - HostInfo::undefined | tokens(), - PathInfo::undefined | tokens()} - | {error, notfound, path}. -match_path(_Path, [], _HostBinds, _HostInfo) -> - {error, notfound, path}; -match_path(_Path, [{'_', Handler, Opts}|_Tail], HostBinds, HostInfo) -> - {ok, Handler, Opts, HostBinds, HostInfo, undefined}; -match_path('*', [{'*', Handler, Opts}|_Tail], HostBinds, HostInfo) -> - {ok, Handler, Opts, HostBinds, HostInfo, undefined}; -match_path(Path, [{PathMatch, Handler, Opts}|Tail], HostBinds, HostInfo) -> - case try_match(path, Path, PathMatch) of - false -> - match_path(Path, Tail, HostBinds, HostInfo); - {true, PathBinds, PathInfo} -> - {ok, Handler, Opts, HostBinds ++ PathBinds, HostInfo, PathInfo} - end. - -%% Internal. - --spec try_match(host | path, tokens(), match_rule()) - -> {true, bindings(), undefined | tokens()} | false. -try_match(host, List, Match) -> - list_match(lists:reverse(List), lists:reverse(Match), []); -try_match(path, List, Match) -> - list_match(List, Match, []). - --spec list_match(tokens(), match_rule(), bindings()) - -> {true, bindings(), undefined | tokens()} | false. -%% Atom '...' matches any trailing path, stop right now. -list_match(List, ['...'], Binds) -> - {true, Binds, List}; -%% Atom '_' matches anything, continue. -list_match([_E|Tail], ['_'|TailMatch], Binds) -> - list_match(Tail, TailMatch, Binds); -%% Both values match, continue. -list_match([E|Tail], [E|TailMatch], Binds) -> - list_match(Tail, TailMatch, Binds); -%% Bind E to the variable name V and continue. -list_match([E|Tail], [V|TailMatch], Binds) when is_atom(V) -> - list_match(Tail, TailMatch, [{V, E}|Binds]); -%% Match complete. -list_match([], [], Binds) -> - {true, Binds, undefined}; -%% Values don't match, stop. -list_match(_List, _Match, _Binds) -> - false. - -%% Tests. - --ifdef(TEST). - -split_host_test_() -> - %% {Host, Result} - Tests = [ - {<<"">>, {[], <<"">>, undefined}}, - {<<".........">>, {[], <<".........">>, undefined}}, - {<<"*">>, {[<<"*">>], <<"*">>, undefined}}, - {<<"cowboy.dev-extend.eu">>, - {[<<"cowboy">>, <<"dev-extend">>, <<"eu">>], - <<"cowboy.dev-extend.eu">>, undefined}}, - {<<"dev-extend..eu">>, - {[<<"dev-extend">>, <<>>, <<"eu">>], - <<"dev-extend..eu">>, undefined}}, - {<<"dev-extend.eu">>, - {[<<"dev-extend">>, <<"eu">>], <<"dev-extend.eu">>, undefined}}, - {<<"dev-extend.eu:8080">>, - {[<<"dev-extend">>, <<"eu">>], <<"dev-extend.eu">>, 8080}}, - {<<"a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z">>, - {[<<"a">>, <<"b">>, <<"c">>, <<"d">>, <<"e">>, <<"f">>, <<"g">>, - <<"h">>, <<"i">>, <<"j">>, <<"k">>, <<"l">>, <<"m">>, <<"n">>, - <<"o">>, <<"p">>, <<"q">>, <<"r">>, <<"s">>, <<"t">>, <<"u">>, - <<"v">>, <<"w">>, <<"x">>, <<"y">>, <<"z">>], - <<"a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z">>, - undefined}} - ], - [{H, fun() -> R = split_host(H) end} || {H, R} <- Tests]. - -split_host_fail_test_() -> - Tests = [ - <<"dev-extend.eu:owns">>, - <<"dev-extend.eu: owns">>, - <<"dev-extend.eu:42fun">>, - <<"dev-extend.eu: 42fun">>, - <<"dev-extend.eu:42 fun">>, - <<"dev-extend.eu:fun 42">>, - <<"dev-extend.eu: 42">>, - <<":owns">>, - <<":42 fun">> - ], - [{H, fun() -> case catch split_host(H) of - {'EXIT', _Reason} -> ok - end end} || H <- Tests]. - -split_path_test_() -> - %% {Path, Result, QueryString} - Tests = [ - {<<"?">>, [], <<"">>, <<"">>}, - {<<"???">>, [], <<"">>, <<"??">>}, - {<<"/">>, [], <<"/">>, <<"">>}, - {<<"/users">>, [<<"users">>], <<"/users">>, <<"">>}, - {<<"/users?">>, [<<"users">>], <<"/users">>, <<"">>}, - {<<"/users?a">>, [<<"users">>], <<"/users">>, <<"a">>}, - {<<"/users/42/friends?a=b&c=d&e=notsure?whatever">>, - [<<"users">>, <<"42">>, <<"friends">>], - <<"/users/42/friends">>, <<"a=b&c=d&e=notsure?whatever">>}, - {<<"/users/a+b/c%21d?e+f=g+h">>, - [<<"users">>, <<"a b">>, <<"c!d">>], - <<"/users/a+b/c%21d">>, <<"e+f=g+h">>} - ], - URLDecode = fun(Bin) -> cowboy_http:urldecode(Bin, crash) end, - [{P, fun() -> {R, RawP, Qs} = split_path(P, URLDecode) end} - || {P, R, RawP, Qs} <- Tests]. - -match_test_() -> - Dispatch = [ - {[<<"www">>, '_', <<"dev-extend">>, <<"eu">>], [ - {[<<"users">>, '_', <<"mails">>], match_any_subdomain_users, []} - ]}, - {[<<"dev-extend">>, <<"eu">>], [ - {[<<"users">>, id, <<"friends">>], match_extend_users_friends, []}, - {'_', match_extend, []} - ]}, - {[<<"dev-extend">>, var], [ - {[<<"threads">>, var], match_duplicate_vars, - [we, {expect, two}, var, here]} - ]}, - {[<<"erlang">>, ext], [ - {'_', match_erlang_ext, []} - ]}, - {'_', [ - {[<<"users">>, id, <<"friends">>], match_users_friends, []}, - {'_', match_any, []} - ]} - ], - %% {Host, Path, Result} - Tests = [ - {[<<"any">>], [], {ok, match_any, [], []}}, - {[<<"www">>, <<"any">>, <<"dev-extend">>, <<"eu">>], - [<<"users">>, <<"42">>, <<"mails">>], - {ok, match_any_subdomain_users, [], []}}, - {[<<"www">>, <<"dev-extend">>, <<"eu">>], - [<<"users">>, <<"42">>, <<"mails">>], {ok, match_any, [], []}}, - {[<<"www">>, <<"dev-extend">>, <<"eu">>], [], {ok, match_any, [], []}}, - {[<<"www">>, <<"any">>, <<"dev-extend">>, <<"eu">>], - [<<"not_users">>, <<"42">>, <<"mails">>], {error, notfound, path}}, - {[<<"dev-extend">>, <<"eu">>], [], {ok, match_extend, [], []}}, - {[<<"dev-extend">>, <<"eu">>], [<<"users">>, <<"42">>, <<"friends">>], - {ok, match_extend_users_friends, [], [{id, <<"42">>}]}}, - {[<<"erlang">>, <<"fr">>], '_', - {ok, match_erlang_ext, [], [{ext, <<"fr">>}]}}, - {[<<"any">>], [<<"users">>, <<"444">>, <<"friends">>], - {ok, match_users_friends, [], [{id, <<"444">>}]}}, - {[<<"dev-extend">>, <<"fr">>], [<<"threads">>, <<"987">>], - {ok, match_duplicate_vars, [we, {expect, two}, var, here], - [{var, <<"fr">>}, {var, <<"987">>}]}} - ], - [{lists:flatten(io_lib:format("~p, ~p", [H, P])), fun() -> - {ok, Handler, Opts, Binds, undefined, undefined} = match(H, P, Dispatch) - end} || {H, P, {ok, Handler, Opts, Binds}} <- Tests]. - -match_info_test_() -> - Dispatch = [ - {[<<"www">>, <<"dev-extend">>, <<"eu">>], [ - {[<<"pathinfo">>, <<"is">>, <<"next">>, '...'], match_path, []} - ]}, - {['...', <<"dev-extend">>, <<"eu">>], [ - {'_', match_any, []} - ]} - ], - Tests = [ - {[<<"dev-extend">>, <<"eu">>], [], - {ok, match_any, [], [], [], undefined}}, - {[<<"bugs">>, <<"dev-extend">>, <<"eu">>], [], - {ok, match_any, [], [], [<<"bugs">>], undefined}}, - {[<<"cowboy">>, <<"bugs">>, <<"dev-extend">>, <<"eu">>], [], - {ok, match_any, [], [], [<<"cowboy">>, <<"bugs">>], undefined}}, - {[<<"www">>, <<"dev-extend">>, <<"eu">>], - [<<"pathinfo">>, <<"is">>, <<"next">>], - {ok, match_path, [], [], undefined, []}}, - {[<<"www">>, <<"dev-extend">>, <<"eu">>], - [<<"pathinfo">>, <<"is">>, <<"next">>, <<"path_info">>], - {ok, match_path, [], [], undefined, [<<"path_info">>]}}, - {[<<"www">>, <<"dev-extend">>, <<"eu">>], - [<<"pathinfo">>, <<"is">>, <<"next">>, <<"foo">>, <<"bar">>], - {ok, match_path, [], [], undefined, [<<"foo">>, <<"bar">>]}} - ], - [{lists:flatten(io_lib:format("~p, ~p", [H, P])), fun() -> - R = match(H, P, Dispatch) - end} || {H, P, R} <- Tests]. - --endif. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_handler.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_handler.erl deleted file mode 100644 index b220b09..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_handler.erl +++ /dev/null @@ -1,48 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @doc Handler for HTTP requests. -%% -%% HTTP handlers must implement three callbacks: init/3, -%% handle/2 and terminate/2, called one after another in -%% that order. -%% -%% init/3 is meant for initialization. It receives information about -%% the transport and protocol used, along with the handler options from the -%% dispatch list, and allows you to upgrade the protocol if needed. You can -%% define a request-wide state here. -%% -%% handle/2 is meant for handling the request. It receives the -%% request and the state previously defined. -%% -%% terminate/2 is meant for cleaning up. It also receives the -%% request and the state previously defined. -%% -%% You do not have to read the request body or even send a reply if you do -%% not need to. Cowboy will properly handle these cases and clean-up afterwards. -%% In doubt it'll simply close the connection. -%% -%% Note that when upgrading the connection to WebSocket you do not need to -%% define the handle/2 and terminate/2 callbacks. --module(cowboy_http_handler). - --export([behaviour_info/1]). - -%% @private --spec behaviour_info(_) - -> undefined | [{handle, 2} | {init, 3} | {terminate, 2}, ...]. -behaviour_info(callbacks) -> - [{init, 3}, {handle, 2}, {terminate, 2}]; -behaviour_info(_Other) -> - undefined. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_protocol.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_protocol.erl deleted file mode 100644 index 0183785..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_protocol.erl +++ /dev/null @@ -1,472 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% Copyright (c) 2011, Anthony Ramine -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @doc HTTP protocol handler. -%% -%% The available options are: -%%
-%%
dispatch
The dispatch list for this protocol.
-%%
max_empty_lines
Max number of empty lines before a request. -%% Defaults to 5.
-%%
timeout
Time in milliseconds before an idle -%% connection is closed. Defaults to 5000 milliseconds.
-%%
urldecode
Function and options argument to use when decoding -%% URL encoded strings. Defaults to `{fun cowboy_http:urldecode/2, crash}'. -%%
-%%
-%% -%% Note that there is no need to monitor these processes when using Cowboy as -%% an application as it already supervises them under the listener supervisor. -%% -%% @see cowboy_dispatcher -%% @see cowboy_http_handler --module(cowboy_http_protocol). --behaviour(cowboy_protocol). - --export([start_link/4]). %% API. --export([init/4, parse_request/1, handler_loop/3]). %% FSM. - --include("include/http.hrl"). --include_lib("eunit/include/eunit.hrl"). - --record(state, { - listener :: pid(), - socket :: inet:socket(), - transport :: module(), - dispatch :: cowboy_dispatcher:dispatch_rules(), - handler :: {module(), any()}, - urldecode :: {fun((binary(), T) -> binary()), T}, - req_empty_lines = 0 :: integer(), - max_empty_lines :: integer(), - req_keepalive = 1 :: integer(), - max_keepalive :: integer(), - max_line_length :: integer(), - timeout :: timeout(), - buffer = <<>> :: binary(), - hibernate = false, - loop_timeout = infinity :: timeout(), - loop_timeout_ref -}). - -%% API. - -%% @doc Start an HTTP protocol process. --spec start_link(pid(), inet:socket(), module(), any()) -> {ok, pid()}. -start_link(ListenerPid, Socket, Transport, Opts) -> - Pid = spawn_link(?MODULE, init, [ListenerPid, Socket, Transport, Opts]), - {ok, Pid}. - -%% FSM. - -%% @private --spec init(pid(), inet:socket(), module(), any()) -> ok | none(). -init(ListenerPid, Socket, Transport, Opts) -> - Dispatch = proplists:get_value(dispatch, Opts, []), - MaxEmptyLines = proplists:get_value(max_empty_lines, Opts, 5), - MaxKeepalive = proplists:get_value(max_keepalive, Opts, infinity), - MaxLineLength = proplists:get_value(max_line_length, Opts, 4096), - Timeout = proplists:get_value(timeout, Opts, 5000), - URLDecDefault = {fun cowboy_http:urldecode/2, crash}, - URLDec = proplists:get_value(urldecode, Opts, URLDecDefault), - ok = cowboy:accept_ack(ListenerPid), - wait_request(#state{listener=ListenerPid, socket=Socket, transport=Transport, - dispatch=Dispatch, max_empty_lines=MaxEmptyLines, - max_keepalive=MaxKeepalive, max_line_length=MaxLineLength, - timeout=Timeout, urldecode=URLDec}). - -%% @private --spec parse_request(#state{}) -> ok | none(). -%% We limit the length of the Request-line to MaxLength to avoid endlessly -%% reading from the socket and eventually crashing. -parse_request(State=#state{buffer=Buffer, max_line_length=MaxLength}) -> - case erlang:decode_packet(http_bin, Buffer, []) of - {ok, Request, Rest} -> request(Request, State#state{buffer=Rest}); - {more, _Length} when byte_size(Buffer) > MaxLength -> - error_terminate(413, State); - {more, _Length} -> wait_request(State); - {error, _Reason} -> error_terminate(400, State) - end. - --spec wait_request(#state{}) -> ok | none(). -wait_request(State=#state{socket=Socket, transport=Transport, - timeout=T, buffer=Buffer}) -> - case Transport:recv(Socket, 0, T) of - {ok, Data} -> parse_request(State#state{ - buffer= << Buffer/binary, Data/binary >>}); - {error, _Reason} -> terminate(State) - end. - --spec request({http_request, cowboy_http:method(), cowboy_http:uri(), - cowboy_http:version()}, #state{}) -> ok | none(). -request({http_request, _Method, _URI, Version}, State) - when Version =/= {1, 0}, Version =/= {1, 1} -> - error_terminate(505, State); -request({http_request, Method, {abs_path, AbsPath}, Version}, - State=#state{socket=Socket, transport=Transport, - urldecode={URLDecFun, URLDecArg}=URLDec}) -> - URLDecode = fun(Bin) -> URLDecFun(Bin, URLDecArg) end, - {Path, RawPath, Qs} = cowboy_dispatcher:split_path(AbsPath, URLDecode), - ConnAtom = version_to_connection(Version), - parse_header(#http_req{socket=Socket, transport=Transport, - connection=ConnAtom, pid=self(), method=Method, version=Version, - path=Path, raw_path=RawPath, raw_qs=Qs, urldecode=URLDec}, State); -request({http_request, Method, '*', Version}, - State=#state{socket=Socket, transport=Transport, urldecode=URLDec}) -> - ConnAtom = version_to_connection(Version), - parse_header(#http_req{socket=Socket, transport=Transport, - connection=ConnAtom, pid=self(), method=Method, version=Version, - path='*', raw_path= <<"*">>, raw_qs= <<>>, urldecode=URLDec}, State); -request({http_request, _Method, _URI, _Version}, State) -> - error_terminate(501, State); -request({http_error, <<"\r\n">>}, - State=#state{req_empty_lines=N, max_empty_lines=N}) -> - error_terminate(400, State); -request({http_error, <<"\r\n">>}, State=#state{req_empty_lines=N}) -> - parse_request(State#state{req_empty_lines=N + 1}); -request(_Any, State) -> - error_terminate(400, State). - --spec parse_header(#http_req{}, #state{}) -> ok | none(). -parse_header(Req, State=#state{buffer=Buffer, max_line_length=MaxLength}) -> - case erlang:decode_packet(httph_bin, Buffer, []) of - {ok, Header, Rest} -> header(Header, Req, State#state{buffer=Rest}); - {more, _Length} when byte_size(Buffer) > MaxLength -> - error_terminate(413, State); - {more, _Length} -> wait_header(Req, State); - {error, _Reason} -> error_terminate(400, State) - end. - --spec wait_header(#http_req{}, #state{}) -> ok | none(). -wait_header(Req, State=#state{socket=Socket, - transport=Transport, timeout=T, buffer=Buffer}) -> - case Transport:recv(Socket, 0, T) of - {ok, Data} -> parse_header(Req, State#state{ - buffer= << Buffer/binary, Data/binary >>}); - {error, timeout} -> error_terminate(408, State); - {error, closed} -> terminate(State) - end. - --spec header({http_header, integer(), cowboy_http:header(), any(), binary()} - | http_eoh, #http_req{}, #state{}) -> ok | none(). -header({http_header, _I, 'Host', _R, RawHost}, Req=#http_req{ - transport=Transport, host=undefined}, State) -> - RawHost2 = cowboy_bstr:to_lower(RawHost), - case catch cowboy_dispatcher:split_host(RawHost2) of - {Host, RawHost3, undefined} -> - Port = default_port(Transport:name()), - dispatch(fun parse_header/2, Req#http_req{ - host=Host, raw_host=RawHost3, port=Port, - headers=[{'Host', RawHost3}|Req#http_req.headers]}, State); - {Host, RawHost3, Port} -> - dispatch(fun parse_header/2, Req#http_req{ - host=Host, raw_host=RawHost3, port=Port, - headers=[{'Host', RawHost3}|Req#http_req.headers]}, State); - {'EXIT', _Reason} -> - error_terminate(400, State) - end; -%% Ignore Host headers if we already have it. -header({http_header, _I, 'Host', _R, _V}, Req, State) -> - parse_header(Req, State); -header({http_header, _I, 'Connection', _R, Connection}, - Req=#http_req{headers=Headers}, State) -> - Req2 = Req#http_req{headers=[{'Connection', Connection}|Headers]}, - {ConnTokens, Req3} - = cowboy_http_req:parse_header('Connection', Req2), - ConnAtom = cowboy_http:connection_to_atom(ConnTokens), - parse_header(Req3#http_req{connection=ConnAtom}, State); -header({http_header, _I, Field, _R, Value}, Req, State) -> - Field2 = format_header(Field), - parse_header(Req#http_req{headers=[{Field2, Value}|Req#http_req.headers]}, - State); -%% The Host header is required in HTTP/1.1. -header(http_eoh, #http_req{version={1, 1}, host=undefined}, State) -> - error_terminate(400, State); -%% It is however optional in HTTP/1.0. -header(http_eoh, Req=#http_req{version={1, 0}, transport=Transport, - host=undefined}, State=#state{buffer=Buffer}) -> - Port = default_port(Transport:name()), - dispatch(fun handler_init/2, Req#http_req{host=[], raw_host= <<>>, - port=Port, buffer=Buffer}, State#state{buffer= <<>>}); -header(http_eoh, Req, State=#state{buffer=Buffer}) -> - handler_init(Req#http_req{buffer=Buffer}, State#state{buffer= <<>>}); -header(_Any, _Req, State) -> - error_terminate(400, State). - --spec dispatch(fun((#http_req{}, #state{}) -> ok), - #http_req{}, #state{}) -> ok | none(). -dispatch(Next, Req=#http_req{host=Host, path=Path}, - State=#state{dispatch=Dispatch}) -> - %% @todo We should allow a configurable chain of handlers here to - %% allow things like url rewriting, site-wide authentication, - %% optional dispatching, and more. It would default to what - %% we are doing so far. - case cowboy_dispatcher:match(Host, Path, Dispatch) of - {ok, Handler, Opts, Binds, HostInfo, PathInfo} -> - Next(Req#http_req{host_info=HostInfo, path_info=PathInfo, - bindings=Binds}, State#state{handler={Handler, Opts}}); - {error, notfound, host} -> - error_terminate(400, State); - {error, notfound, path} -> - error_terminate(404, State) - end. - --spec handler_init(#http_req{}, #state{}) -> ok | none(). -handler_init(Req, State=#state{transport=Transport, - handler={Handler, Opts}}) -> - try Handler:init({Transport:name(), http}, Req, Opts) of - {ok, Req2, HandlerState} -> - handler_handle(HandlerState, Req2, State); - {loop, Req2, HandlerState} -> - handler_before_loop(HandlerState, Req2, State); - {loop, Req2, HandlerState, hibernate} -> - handler_before_loop(HandlerState, Req2, - State#state{hibernate=true}); - {loop, Req2, HandlerState, Timeout} -> - handler_before_loop(HandlerState, Req2, - State#state{loop_timeout=Timeout}); - {loop, Req2, HandlerState, Timeout, hibernate} -> - handler_before_loop(HandlerState, Req2, - State#state{hibernate=true, loop_timeout=Timeout}); - {shutdown, Req2, HandlerState} -> - handler_terminate(HandlerState, Req2, State); - %% @todo {upgrade, transport, Module} - {upgrade, protocol, Module} -> - upgrade_protocol(Req, State, Module) - catch Class:Reason -> - error_terminate(500, State), - error_logger:error_msg( - "** Handler ~p terminating in init/3~n" - " for the reason ~p:~p~n" - "** Options were ~p~n" - "** Request was ~p~n** Stacktrace: ~p~n~n", - [Handler, Class, Reason, Opts, Req, erlang:get_stacktrace()]) - end. - --spec upgrade_protocol(#http_req{}, #state{}, atom()) -> ok | none(). -upgrade_protocol(Req, State=#state{listener=ListenerPid, - handler={Handler, Opts}}, Module) -> - case Module:upgrade(ListenerPid, Handler, Opts, Req) of - {UpgradeRes, Req2} -> next_request(Req2, State, UpgradeRes); - _Any -> terminate(State) - end. - --spec handler_handle(any(), #http_req{}, #state{}) -> ok | none(). -handler_handle(HandlerState, Req, State=#state{handler={Handler, Opts}}) -> - try Handler:handle(Req, HandlerState) of - {ok, Req2, HandlerState2} -> - terminate_request(HandlerState2, Req2, State) - catch Class:Reason -> - error_logger:error_msg( - "** Handler ~p terminating in handle/2~n" - " for the reason ~p:~p~n" - "** Options were ~p~n** Handler state was ~p~n" - "** Request was ~p~n** Stacktrace: ~p~n~n", - [Handler, Class, Reason, Opts, - HandlerState, Req, erlang:get_stacktrace()]), - handler_terminate(HandlerState, Req, State), - error_terminate(500, State) - end. - -%% We don't listen for Transport closes because that would force us -%% to receive data and buffer it indefinitely. --spec handler_before_loop(any(), #http_req{}, #state{}) -> ok | none(). -handler_before_loop(HandlerState, Req, State=#state{hibernate=true}) -> - State2 = handler_loop_timeout(State), - erlang:hibernate(?MODULE, handler_loop, - [HandlerState, Req, State2#state{hibernate=false}]); -handler_before_loop(HandlerState, Req, State) -> - State2 = handler_loop_timeout(State), - handler_loop(HandlerState, Req, State2). - -%% Almost the same code can be found in cowboy_http_websocket. --spec handler_loop_timeout(#state{}) -> #state{}. -handler_loop_timeout(State=#state{loop_timeout=infinity}) -> - State#state{loop_timeout_ref=undefined}; -handler_loop_timeout(State=#state{loop_timeout=Timeout, - loop_timeout_ref=PrevRef}) -> - _ = case PrevRef of undefined -> ignore; PrevRef -> - erlang:cancel_timer(PrevRef) end, - TRef = make_ref(), - erlang:send_after(Timeout, self(), {?MODULE, timeout, TRef}), - State#state{loop_timeout_ref=TRef}. - --spec handler_loop(any(), #http_req{}, #state{}) -> ok | none(). -handler_loop(HandlerState, Req, State=#state{loop_timeout_ref=TRef}) -> - receive - {?MODULE, timeout, TRef} -> - terminate_request(HandlerState, Req, State); - {?MODULE, timeout, OlderTRef} when is_reference(OlderTRef) -> - handler_loop(HandlerState, Req, State); - Message -> - handler_call(HandlerState, Req, State, Message) - end. - --spec handler_call(any(), #http_req{}, #state{}, any()) -> ok | none(). -handler_call(HandlerState, Req, State=#state{handler={Handler, Opts}}, - Message) -> - try Handler:info(Message, Req, HandlerState) of - {ok, Req2, HandlerState2} -> - terminate_request(HandlerState2, Req2, State); - {loop, Req2, HandlerState2} -> - handler_before_loop(HandlerState2, Req2, State); - {loop, Req2, HandlerState2, hibernate} -> - handler_before_loop(HandlerState2, Req2, - State#state{hibernate=true}) - catch Class:Reason -> - error_logger:error_msg( - "** Handler ~p terminating in info/3~n" - " for the reason ~p:~p~n" - "** Options were ~p~n** Handler state was ~p~n" - "** Request was ~p~n** Stacktrace: ~p~n~n", - [Handler, Class, Reason, Opts, - HandlerState, Req, erlang:get_stacktrace()]), - handler_terminate(HandlerState, Req, State), - error_terminate(500, State) - end. - --spec handler_terminate(any(), #http_req{}, #state{}) -> ok. -handler_terminate(HandlerState, Req, #state{handler={Handler, Opts}}) -> - try - Handler:terminate(Req#http_req{resp_state=locked}, HandlerState) - catch Class:Reason -> - error_logger:error_msg( - "** Handler ~p terminating in terminate/2~n" - " for the reason ~p:~p~n" - "** Options were ~p~n** Handler state was ~p~n" - "** Request was ~p~n** Stacktrace: ~p~n~n", - [Handler, Class, Reason, Opts, - HandlerState, Req, erlang:get_stacktrace()]) - end. - --spec terminate_request(any(), #http_req{}, #state{}) -> ok | none(). -terminate_request(HandlerState, Req, State) -> - HandlerRes = handler_terminate(HandlerState, Req, State), - next_request(Req, State, HandlerRes). - --spec next_request(#http_req{}, #state{}, any()) -> ok | none(). -next_request(Req=#http_req{connection=Conn}, - State=#state{req_keepalive=Keepalive, max_keepalive=MaxKeepalive}, - HandlerRes) -> - RespRes = ensure_response(Req), - {BodyRes, Buffer} = ensure_body_processed(Req), - %% Flush the resp_sent message before moving on. - receive {cowboy_http_req, resp_sent} -> ok after 0 -> ok end, - case {HandlerRes, BodyRes, RespRes, Conn} of - {ok, ok, ok, keepalive} when Keepalive < MaxKeepalive -> - ?MODULE:parse_request(State#state{ - buffer=Buffer, req_empty_lines=0, - req_keepalive=Keepalive + 1}); - _Closed -> - terminate(State) - end. - --spec ensure_body_processed(#http_req{}) -> {ok | close, binary()}. -ensure_body_processed(#http_req{body_state=done, buffer=Buffer}) -> - {ok, Buffer}; -ensure_body_processed(Req=#http_req{body_state=waiting}) -> - case cowboy_http_req:body(Req) of - {error, badarg} -> {ok, Req#http_req.buffer}; %% No body. - {error, _Reason} -> {close, <<>>}; - {ok, _, Req2} -> {ok, Req2#http_req.buffer} - end; -ensure_body_processed(Req=#http_req{body_state={multipart, _, _}}) -> - {ok, Req2} = cowboy_http_req:multipart_skip(Req), - ensure_body_processed(Req2). - --spec ensure_response(#http_req{}) -> ok. -%% The handler has already fully replied to the client. -ensure_response(#http_req{resp_state=done}) -> - ok; -%% No response has been sent but everything apparently went fine. -%% Reply with 204 No Content to indicate this. -ensure_response(Req=#http_req{resp_state=waiting}) -> - _ = cowboy_http_req:reply(204, [], [], Req), - ok; -%% Close the chunked reply. -ensure_response(#http_req{method='HEAD', resp_state=chunks}) -> - close; -ensure_response(#http_req{socket=Socket, transport=Transport, - resp_state=chunks}) -> - Transport:send(Socket, <<"0\r\n\r\n">>), - close. - -%% Only send an error reply if there is no resp_sent message. --spec error_terminate(cowboy_http:status(), #state{}) -> ok. -error_terminate(Code, State=#state{socket=Socket, transport=Transport}) -> - receive - {cowboy_http_req, resp_sent} -> ok - after 0 -> - _ = cowboy_http_req:reply(Code, #http_req{ - socket=Socket, transport=Transport, - connection=close, pid=self(), resp_state=waiting}), - ok - end, - terminate(State). - --spec terminate(#state{}) -> ok. -terminate(#state{socket=Socket, transport=Transport}) -> - Transport:close(Socket), - ok. - -%% Internal. - --spec version_to_connection(cowboy_http:version()) -> keepalive | close. -version_to_connection({1, 1}) -> keepalive; -version_to_connection(_Any) -> close. - --spec default_port(atom()) -> 80 | 443. -default_port(ssl) -> 443; -default_port(_) -> 80. - -%% @todo While 32 should be enough for everybody, we should probably make -%% this configurable or something. --spec format_header(atom()) -> atom(); (binary()) -> binary(). -format_header(Field) when is_atom(Field) -> - Field; -format_header(Field) when byte_size(Field) =< 20; byte_size(Field) > 32 -> - Field; -format_header(Field) -> - format_header(Field, true, <<>>). - -format_header(<<>>, _Any, Acc) -> - Acc; -%% Replicate a bug in OTP for compatibility reasons when there's a - right -%% after another. Proper use should always be 'true' instead of 'not Bool'. -format_header(<< $-, Rest/bits >>, Bool, Acc) -> - format_header(Rest, not Bool, << Acc/binary, $- >>); -format_header(<< C, Rest/bits >>, true, Acc) -> - format_header(Rest, false, << Acc/binary, (cowboy_bstr:char_to_upper(C)) >>); -format_header(<< C, Rest/bits >>, false, Acc) -> - format_header(Rest, false, << Acc/binary, (cowboy_bstr:char_to_lower(C)) >>). - -%% Tests. - --ifdef(TEST). - -format_header_test_() -> - %% {Header, Result} - Tests = [ - {<<"Sec-Websocket-Version">>, <<"Sec-Websocket-Version">>}, - {<<"Sec-WebSocket-Version">>, <<"Sec-Websocket-Version">>}, - {<<"sec-websocket-version">>, <<"Sec-Websocket-Version">>}, - {<<"SEC-WEBSOCKET-VERSION">>, <<"Sec-Websocket-Version">>}, - %% These last tests ensures we're formatting headers exactly like OTP. - %% Even though it's dumb, it's better for compatibility reasons. - {<<"Sec-WebSocket--Version">>, <<"Sec-Websocket--version">>}, - {<<"Sec-WebSocket---Version">>, <<"Sec-Websocket---Version">>} - ], - [{H, fun() -> R = format_header(H) end} || {H, R} <- Tests]. - --endif. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_req.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_req.erl deleted file mode 100644 index dd772df..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_req.erl +++ /dev/null @@ -1,820 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% Copyright (c) 2011, Anthony Ramine -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @doc HTTP request manipulation API. -%% -%% Almost all functions in this module return a new Req variable. -%% It should always be used instead of the one used in your function call -%% because it keeps the state of the request. It also allows Cowboy to do -%% some lazy evaluation and cache results where possible. --module(cowboy_http_req). - --export([ - method/1, version/1, peer/1, peer_addr/1, - host/1, host_info/1, raw_host/1, port/1, - path/1, path_info/1, raw_path/1, - qs_val/2, qs_val/3, qs_vals/1, raw_qs/1, - binding/2, binding/3, bindings/1, - header/2, header/3, headers/1, - parse_header/2, parse_header/3, - cookie/2, cookie/3, cookies/1, - meta/2, meta/3 -]). %% Request API. - --export([ - body/1, body/2, body_qs/1, - multipart_data/1, multipart_skip/1 -]). %% Request Body API. - --export([ - set_resp_cookie/4, set_resp_header/3, set_resp_body/2, - set_resp_body_fun/3, has_resp_header/2, has_resp_body/1, - reply/2, reply/3, reply/4, - chunked_reply/2, chunked_reply/3, chunk/2, - upgrade_reply/3 -]). %% Response API. - --export([ - compact/1, transport/1 -]). %% Misc API. - --include("include/http.hrl"). --include_lib("eunit/include/eunit.hrl"). - -%% Request API. - -%% @doc Return the HTTP method of the request. --spec method(#http_req{}) -> {cowboy_http:method(), #http_req{}}. -method(Req) -> - {Req#http_req.method, Req}. - -%% @doc Return the HTTP version used for the request. --spec version(#http_req{}) -> {cowboy_http:version(), #http_req{}}. -version(Req) -> - {Req#http_req.version, Req}. - -%% @doc Return the peer address and port number of the remote host. --spec peer(#http_req{}) -> {{inet:ip_address(), inet:ip_port()}, #http_req{}}. -peer(Req=#http_req{socket=Socket, transport=Transport, peer=undefined}) -> - {ok, Peer} = Transport:peername(Socket), - {Peer, Req#http_req{peer=Peer}}; -peer(Req) -> - {Req#http_req.peer, Req}. - -%% @doc Returns the peer address calculated from headers. --spec peer_addr(#http_req{}) -> {inet:ip_address(), #http_req{}}. -peer_addr(Req = #http_req{}) -> - {RealIp, Req1} = header(<<"X-Real-Ip">>, Req), - {ForwardedForRaw, Req2} = header(<<"X-Forwarded-For">>, Req1), - {{PeerIp, _PeerPort}, Req3} = peer(Req2), - ForwardedFor = case ForwardedForRaw of - undefined -> - undefined; - ForwardedForRaw -> - case re:run(ForwardedForRaw, "^(?[^\\,]+)", - [{capture, [first_ip], binary}]) of - {match, [FirstIp]} -> FirstIp; - _Any -> undefined - end - end, - {ok, PeerAddr} = if - is_binary(RealIp) -> inet_parse:address(binary_to_list(RealIp)); - is_binary(ForwardedFor) -> inet_parse:address(binary_to_list(ForwardedFor)); - true -> {ok, PeerIp} - end, - {PeerAddr, Req3}. - -%% @doc Return the tokens for the hostname requested. --spec host(#http_req{}) -> {cowboy_dispatcher:tokens(), #http_req{}}. -host(Req) -> - {Req#http_req.host, Req}. - -%% @doc Return the extra host information obtained from partially matching -%% the hostname using '...'. --spec host_info(#http_req{}) - -> {cowboy_dispatcher:tokens() | undefined, #http_req{}}. -host_info(Req) -> - {Req#http_req.host_info, Req}. - -%% @doc Return the raw host directly taken from the request. --spec raw_host(#http_req{}) -> {binary(), #http_req{}}. -raw_host(Req) -> - {Req#http_req.raw_host, Req}. - -%% @doc Return the port used for this request. --spec port(#http_req{}) -> {inet:ip_port(), #http_req{}}. -port(Req) -> - {Req#http_req.port, Req}. - -%% @doc Return the path segments for the path requested. -%% -%% Following RFC2396, this function may return path segments containing any -%% character, including / if, and only if, a / was escaped -%% and part of a path segment in the path requested. --spec path(#http_req{}) -> {cowboy_dispatcher:tokens(), #http_req{}}. -path(Req) -> - {Req#http_req.path, Req}. - -%% @doc Return the extra path information obtained from partially matching -%% the patch using '...'. --spec path_info(#http_req{}) - -> {cowboy_dispatcher:tokens() | undefined, #http_req{}}. -path_info(Req) -> - {Req#http_req.path_info, Req}. - -%% @doc Return the raw path directly taken from the request. --spec raw_path(#http_req{}) -> {binary(), #http_req{}}. -raw_path(Req) -> - {Req#http_req.raw_path, Req}. - -%% @equiv qs_val(Name, Req, undefined) --spec qs_val(binary(), #http_req{}) - -> {binary() | true | undefined, #http_req{}}. -qs_val(Name, Req) when is_binary(Name) -> - qs_val(Name, Req, undefined). - -%% @doc Return the query string value for the given key, or a default if -%% missing. -qs_val(Name, Req=#http_req{raw_qs=RawQs, qs_vals=undefined, - urldecode={URLDecFun, URLDecArg}}, Default) when is_binary(Name) -> - QsVals = parse_qs(RawQs, fun(Bin) -> URLDecFun(Bin, URLDecArg) end), - qs_val(Name, Req#http_req{qs_vals=QsVals}, Default); -qs_val(Name, Req, Default) -> - case lists:keyfind(Name, 1, Req#http_req.qs_vals) of - {Name, Value} -> {Value, Req}; - false -> {Default, Req} - end. - -%% @doc Return the full list of query string values. --spec qs_vals(#http_req{}) -> {list({binary(), binary() | true}), #http_req{}}. -qs_vals(Req=#http_req{raw_qs=RawQs, qs_vals=undefined, - urldecode={URLDecFun, URLDecArg}}) -> - QsVals = parse_qs(RawQs, fun(Bin) -> URLDecFun(Bin, URLDecArg) end), - qs_vals(Req#http_req{qs_vals=QsVals}); -qs_vals(Req=#http_req{qs_vals=QsVals}) -> - {QsVals, Req}. - -%% @doc Return the raw query string directly taken from the request. --spec raw_qs(#http_req{}) -> {binary(), #http_req{}}. -raw_qs(Req) -> - {Req#http_req.raw_qs, Req}. - -%% @equiv binding(Name, Req, undefined) --spec binding(atom(), #http_req{}) -> {binary() | undefined, #http_req{}}. -binding(Name, Req) when is_atom(Name) -> - binding(Name, Req, undefined). - -%% @doc Return the binding value for the given key obtained when matching -%% the host and path against the dispatch list, or a default if missing. -binding(Name, Req, Default) when is_atom(Name) -> - case lists:keyfind(Name, 1, Req#http_req.bindings) of - {Name, Value} -> {Value, Req}; - false -> {Default, Req} - end. - -%% @doc Return the full list of binding values. --spec bindings(#http_req{}) -> {list({atom(), binary()}), #http_req{}}. -bindings(Req) -> - {Req#http_req.bindings, Req}. - -%% @equiv header(Name, Req, undefined) --spec header(atom() | binary(), #http_req{}) - -> {binary() | undefined, #http_req{}}. -header(Name, Req) when is_atom(Name) orelse is_binary(Name) -> - header(Name, Req, undefined). - -%% @doc Return the header value for the given key, or a default if missing. -header(Name, Req, Default) when is_atom(Name) orelse is_binary(Name) -> - case lists:keyfind(Name, 1, Req#http_req.headers) of - {Name, Value} -> {Value, Req}; - false -> {Default, Req} - end. - -%% @doc Return the full list of headers. --spec headers(#http_req{}) -> {cowboy_http:headers(), #http_req{}}. -headers(Req) -> - {Req#http_req.headers, Req}. - -%% @doc Semantically parse headers. -%% -%% When the value isn't found, a proper default value for the type -%% returned is used as a return value. -%% @see parse_header/3 --spec parse_header(cowboy_http:header(), #http_req{}) - -> {any(), #http_req{}} | {error, badarg}. -parse_header(Name, Req=#http_req{p_headers=PHeaders}) -> - case lists:keyfind(Name, 1, PHeaders) of - false -> parse_header(Name, Req, parse_header_default(Name)); - {Name, Value} -> {Value, Req} - end. - -%% @doc Default values for semantic header parsing. --spec parse_header_default(cowboy_http:header()) -> any(). -parse_header_default('Connection') -> []; -parse_header_default(_Name) -> undefined. - -%% @doc Semantically parse headers. -%% -%% When the header is unknown, the value is returned directly without parsing. --spec parse_header(cowboy_http:header(), #http_req{}, any()) - -> {any(), #http_req{}} | {error, badarg}. -parse_header(Name, Req, Default) when Name =:= 'Accept' -> - parse_header(Name, Req, Default, - fun (Value) -> - cowboy_http:list(Value, fun cowboy_http:media_range/2) - end); -parse_header(Name, Req, Default) when Name =:= 'Accept-Charset' -> - parse_header(Name, Req, Default, - fun (Value) -> - cowboy_http:nonempty_list(Value, fun cowboy_http:conneg/2) - end); -parse_header(Name, Req, Default) when Name =:= 'Accept-Encoding' -> - parse_header(Name, Req, Default, - fun (Value) -> - cowboy_http:list(Value, fun cowboy_http:conneg/2) - end); -parse_header(Name, Req, Default) when Name =:= 'Accept-Language' -> - parse_header(Name, Req, Default, - fun (Value) -> - cowboy_http:nonempty_list(Value, fun cowboy_http:language_range/2) - end); -parse_header(Name, Req, Default) when Name =:= 'Connection' -> - parse_header(Name, Req, Default, - fun (Value) -> - cowboy_http:nonempty_list(Value, fun cowboy_http:token_ci/2) - end); -parse_header(Name, Req, Default) when Name =:= 'Content-Length' -> - parse_header(Name, Req, Default, - fun (Value) -> - cowboy_http:digits(Value) - end); -parse_header(Name, Req, Default) when Name =:= 'Content-Type' -> - parse_header(Name, Req, Default, - fun (Value) -> - cowboy_http:content_type(Value) - end); -parse_header(Name, Req, Default) - when Name =:= 'If-Match'; Name =:= 'If-None-Match' -> - parse_header(Name, Req, Default, - fun (Value) -> - cowboy_http:entity_tag_match(Value) - end); -parse_header(Name, Req, Default) - when Name =:= 'If-Modified-Since'; Name =:= 'If-Unmodified-Since' -> - parse_header(Name, Req, Default, - fun (Value) -> - cowboy_http:http_date(Value) - end); -parse_header(Name, Req, Default) when Name =:= 'Upgrade' -> - parse_header(Name, Req, Default, - fun (Value) -> - cowboy_http:nonempty_list(Value, fun cowboy_http:token_ci/2) - end); -parse_header(Name, Req, Default) when Name =:= <<"sec-websocket-protocol">> -> - parse_header(Name, Req, Default, - fun (Value) -> - cowboy_http:nonempty_list(Value, fun cowboy_http:token/2) - end); -parse_header(Name, Req, Default) -> - {Value, Req2} = header(Name, Req, Default), - {undefined, Value, Req2}. - -parse_header(Name, Req=#http_req{p_headers=PHeaders}, Default, Fun) -> - case header(Name, Req) of - {undefined, Req2} -> - {Default, Req2#http_req{p_headers=[{Name, Default}|PHeaders]}}; - {Value, Req2} -> - case Fun(Value) of - {error, badarg} -> - {error, badarg}; - P -> - {P, Req2#http_req{p_headers=[{Name, P}|PHeaders]}} - end - end. - -%% @equiv cookie(Name, Req, undefined) --spec cookie(binary(), #http_req{}) - -> {binary() | true | undefined, #http_req{}}. -cookie(Name, Req) when is_binary(Name) -> - cookie(Name, Req, undefined). - -%% @doc Return the cookie value for the given key, or a default if -%% missing. -cookie(Name, Req=#http_req{cookies=undefined}, Default) when is_binary(Name) -> - case header('Cookie', Req) of - {undefined, Req2} -> - {Default, Req2#http_req{cookies=[]}}; - {RawCookie, Req2} -> - Cookies = cowboy_cookies:parse_cookie(RawCookie), - cookie(Name, Req2#http_req{cookies=Cookies}, Default) - end; -cookie(Name, Req, Default) -> - case lists:keyfind(Name, 1, Req#http_req.cookies) of - {Name, Value} -> {Value, Req}; - false -> {Default, Req} - end. - -%% @doc Return the full list of cookie values. --spec cookies(#http_req{}) -> {list({binary(), binary() | true}), #http_req{}}. -cookies(Req=#http_req{cookies=undefined}) -> - case header('Cookie', Req) of - {undefined, Req2} -> - {[], Req2#http_req{cookies=[]}}; - {RawCookie, Req2} -> - Cookies = cowboy_cookies:parse_cookie(RawCookie), - cookies(Req2#http_req{cookies=Cookies}) - end; -cookies(Req=#http_req{cookies=Cookies}) -> - {Cookies, Req}. - -%% @equiv meta(Name, Req, undefined) --spec meta(atom(), #http_req{}) -> {any() | undefined, #http_req{}}. -meta(Name, Req) -> - meta(Name, Req, undefined). - -%% @doc Return metadata information about the request. -%% -%% Metadata information varies from one protocol to another. Websockets -%% would define the protocol version here, while REST would use it to -%% indicate which media type, language and charset were retained. --spec meta(atom(), #http_req{}, any()) -> {any(), #http_req{}}. -meta(Name, Req, Default) -> - case lists:keyfind(Name, 1, Req#http_req.meta) of - {Name, Value} -> {Value, Req}; - false -> {Default, Req} - end. - -%% Request Body API. - -%% @doc Return the full body sent with the request, or {error, badarg} -%% if no Content-Length is available. -%% @todo We probably want to allow a max length. -%% @todo Add multipart support to this function. --spec body(#http_req{}) -> {ok, binary(), #http_req{}} | {error, atom()}. -body(Req) -> - {Length, Req2} = cowboy_http_req:parse_header('Content-Length', Req), - case Length of - undefined -> {error, badarg}; - {error, badarg} -> {error, badarg}; - _Any -> - body(Length, Req2) - end. - -%% @doc Return Length bytes of the request body. -%% -%% You probably shouldn't be calling this function directly, as it expects the -%% Length argument to be the full size of the body, and will consider -%% the body to be fully read from the socket. -%% @todo We probably want to configure the timeout. --spec body(non_neg_integer(), #http_req{}) - -> {ok, binary(), #http_req{}} | {error, atom()}. -body(Length, Req=#http_req{body_state=waiting, buffer=Buffer}) - when is_integer(Length) andalso Length =< byte_size(Buffer) -> - << Body:Length/binary, Rest/bits >> = Buffer, - {ok, Body, Req#http_req{body_state=done, buffer=Rest}}; -body(Length, Req=#http_req{socket=Socket, transport=Transport, - body_state=waiting, buffer=Buffer}) -> - case Transport:recv(Socket, Length - byte_size(Buffer), 5000) of - {ok, Body} -> {ok, << Buffer/binary, Body/binary >>, - Req#http_req{body_state=done, buffer= <<>>}}; - {error, Reason} -> {error, Reason} - end. - -%% @doc Return the full body sent with the reqest, parsed as an -%% application/x-www-form-urlencoded string. Essentially a POST query string. --spec body_qs(#http_req{}) -> {list({binary(), binary() | true}), #http_req{}}. -body_qs(Req=#http_req{urldecode={URLDecFun, URLDecArg}}) -> - {ok, Body, Req2} = body(Req), - {parse_qs(Body, fun(Bin) -> URLDecFun(Bin, URLDecArg) end), Req2}. - -%% Multipart Request API. - -%% @doc Return data from the multipart parser. -%% -%% Use this function for multipart streaming. For each part in the request, -%% this function returns {headers, Headers} followed by a sequence of -%% {data, Data} tuples and finally end_of_part. When there -%% is no part to parse anymore, eof is returned. -%% -%% If the request Content-Type is not a multipart one, {error, badarg} -%% is returned. --spec multipart_data(#http_req{}) - -> {{headers, cowboy_http:headers()} - | {data, binary()} | end_of_part | eof, - #http_req{}}. -multipart_data(Req=#http_req{body_state=waiting}) -> - {{<<"multipart">>, _SubType, Params}, Req2} = - parse_header('Content-Type', Req), - {_, Boundary} = lists:keyfind(<<"boundary">>, 1, Params), - {Length, Req3=#http_req{buffer=Buffer}} = - parse_header('Content-Length', Req2), - multipart_data(Req3, Length, cowboy_multipart:parser(Boundary), Buffer); -multipart_data(Req=#http_req{body_state={multipart, Length, Cont}}) -> - multipart_data(Req, Length, Cont()); -multipart_data(Req=#http_req{body_state=done}) -> - {eof, Req}. - -multipart_data(Req, Length, Parser, Buffer) when byte_size(Buffer) >= Length -> - << Data:Length/binary, Rest/binary >> = Buffer, - multipart_data(Req#http_req{buffer=Rest}, 0, Parser(Data)); -multipart_data(Req, Length, Parser, Buffer) -> - NewLength = Length - byte_size(Buffer), - multipart_data(Req#http_req{buffer= <<>>}, NewLength, Parser(Buffer)). - -multipart_data(Req, Length, {headers, Headers, Cont}) -> - {{headers, Headers}, Req#http_req{body_state={multipart, Length, Cont}}}; -multipart_data(Req, Length, {body, Data, Cont}) -> - {{body, Data}, Req#http_req{body_state={multipart, Length, Cont}}}; -multipart_data(Req, Length, {end_of_part, Cont}) -> - {end_of_part, Req#http_req{body_state={multipart, Length, Cont}}}; -multipart_data(Req, 0, eof) -> - {eof, Req#http_req{body_state=done}}; -multipart_data(Req=#http_req{socket=Socket, transport=Transport}, - Length, eof) -> - {ok, _Data} = Transport:recv(Socket, Length, 5000), - {eof, Req#http_req{body_state=done}}; -multipart_data(Req=#http_req{socket=Socket, transport=Transport}, - Length, {more, Parser}) when Length > 0 -> - case Transport:recv(Socket, 0, 5000) of - {ok, << Data:Length/binary, Buffer/binary >>} -> - multipart_data(Req#http_req{buffer=Buffer}, 0, Parser(Data)); - {ok, Data} -> - multipart_data(Req, Length - byte_size(Data), Parser(Data)) - end. - -%% @doc Skip a part returned by the multipart parser. -%% -%% This function repeatedly calls multipart_data/1 until -%% end_of_part or eof is parsed. -multipart_skip(Req) -> - case multipart_data(Req) of - {end_of_part, Req2} -> {ok, Req2}; - {eof, Req2} -> {ok, Req2}; - {_Other, Req2} -> multipart_skip(Req2) - end. - -%% Response API. - -%% @doc Add a cookie header to the response. --spec set_resp_cookie(binary(), binary(), [cowboy_cookies:cookie_option()], - #http_req{}) -> {ok, #http_req{}}. -set_resp_cookie(Name, Value, Options, Req) -> - {HeaderName, HeaderValue} = cowboy_cookies:cookie(Name, Value, Options), - set_resp_header(HeaderName, HeaderValue, Req). - -%% @doc Add a header to the response. -set_resp_header(Name, Value, Req=#http_req{resp_headers=RespHeaders}) -> - NameBin = header_to_binary(Name), - {ok, Req#http_req{resp_headers=[{NameBin, Value}|RespHeaders]}}. - -%% @doc Add a body to the response. -%% -%% The body set here is ignored if the response is later sent using -%% anything other than reply/2 or reply/3. The response body is expected -%% to be a binary or an iolist. -set_resp_body(Body, Req) -> - {ok, Req#http_req{resp_body=Body}}. - - -%% @doc Add a body function to the response. -%% -%% The response body may also be set to a content-length - stream-function pair. -%% If the response body is of this type normal response headers will be sent. -%% After the response headers has been sent the body function is applied. -%% The body function is expected to write the response body directly to the -%% socket using the transport module. -%% -%% If the body function crashes while writing the response body or writes fewer -%% bytes than declared the behaviour is undefined. The body set here is ignored -%% if the response is later sent using anything other than `reply/2' or -%% `reply/3'. -%% -%% @see cowboy_http_req:transport/1. --spec set_resp_body_fun(non_neg_integer(), fun(() -> {sent, non_neg_integer()}), - #http_req{}) -> {ok, #http_req{}}. -set_resp_body_fun(StreamLen, StreamFun, Req) -> - {ok, Req#http_req{resp_body={StreamLen, StreamFun}}}. - - -%% @doc Return whether the given header has been set for the response. -has_resp_header(Name, #http_req{resp_headers=RespHeaders}) -> - NameBin = header_to_binary(Name), - lists:keymember(NameBin, 1, RespHeaders). - -%% @doc Return whether a body has been set for the response. -has_resp_body(#http_req{resp_body={Length, _}}) -> - Length > 0; -has_resp_body(#http_req{resp_body=RespBody}) -> - iolist_size(RespBody) > 0. - -%% @equiv reply(Status, [], [], Req) --spec reply(cowboy_http:status(), #http_req{}) -> {ok, #http_req{}}. -reply(Status, Req=#http_req{resp_body=Body}) -> - reply(Status, [], Body, Req). - -%% @equiv reply(Status, Headers, [], Req) --spec reply(cowboy_http:status(), cowboy_http:headers(), #http_req{}) - -> {ok, #http_req{}}. -reply(Status, Headers, Req=#http_req{resp_body=Body}) -> - reply(Status, Headers, Body, Req). - -%% @doc Send a reply to the client. -reply(Status, Headers, Body, Req=#http_req{socket=Socket, - transport=Transport, connection=Connection, pid=ReqPid, - method=Method, resp_state=waiting, resp_headers=RespHeaders}) -> - RespConn = response_connection(Headers, Connection), - ContentLen = case Body of {CL, _} -> CL; _ -> iolist_size(Body) end, - Head = response_head(Status, Headers, RespHeaders, [ - {<<"Connection">>, atom_to_connection(Connection)}, - {<<"Content-Length">>, integer_to_list(ContentLen)}, - {<<"Date">>, cowboy_clock:rfc1123()}, - {<<"Server">>, <<"Cowboy">>} - ]), - case {Method, Body} of - {'HEAD', _} -> Transport:send(Socket, Head); - {_, {_, StreamFun}} -> Transport:send(Socket, Head), StreamFun(); - {_, _} -> Transport:send(Socket, [Head, Body]) - end, - ReqPid ! {?MODULE, resp_sent}, - {ok, Req#http_req{connection=RespConn, resp_state=done, - resp_headers=[], resp_body= <<>>}}. - -%% @equiv chunked_reply(Status, [], Req) --spec chunked_reply(cowboy_http:status(), #http_req{}) -> {ok, #http_req{}}. -chunked_reply(Status, Req) -> - chunked_reply(Status, [], Req). - -%% @doc Initiate the sending of a chunked reply to the client. -%% @see cowboy_http_req:chunk/2 --spec chunked_reply(cowboy_http:status(), cowboy_http:headers(), #http_req{}) - -> {ok, #http_req{}}. -chunked_reply(Status, Headers, Req=#http_req{socket=Socket, - transport=Transport, connection=Connection, pid=ReqPid, - resp_state=waiting, resp_headers=RespHeaders}) -> - RespConn = response_connection(Headers, Connection), - Head = response_head(Status, Headers, RespHeaders, [ - {<<"Connection">>, atom_to_connection(Connection)}, - {<<"Transfer-Encoding">>, <<"chunked">>}, - {<<"Date">>, cowboy_clock:rfc1123()}, - {<<"Server">>, <<"Cowboy">>} - ]), - Transport:send(Socket, Head), - ReqPid ! {?MODULE, resp_sent}, - {ok, Req#http_req{connection=RespConn, resp_state=chunks, - resp_headers=[], resp_body= <<>>}}. - -%% @doc Send a chunk of data. -%% -%% A chunked reply must have been initiated before calling this function. -chunk(_Data, #http_req{socket=_Socket, transport=_Transport, method='HEAD'}) -> - ok; -chunk(Data, #http_req{socket=Socket, transport=Transport, resp_state=chunks}) -> - Transport:send(Socket, [erlang:integer_to_list(iolist_size(Data), 16), - <<"\r\n">>, Data, <<"\r\n">>]). - -%% @doc Send an upgrade reply. -%% @private --spec upgrade_reply(cowboy_http:status(), cowboy_http:headers(), #http_req{}) - -> {ok, #http_req{}}. -upgrade_reply(Status, Headers, Req=#http_req{socket=Socket, transport=Transport, - pid=ReqPid, resp_state=waiting, resp_headers=RespHeaders}) -> - Head = response_head(Status, Headers, RespHeaders, [ - {<<"Connection">>, <<"Upgrade">>} - ]), - Transport:send(Socket, Head), - ReqPid ! {?MODULE, resp_sent}, - {ok, Req#http_req{resp_state=done, resp_headers=[], resp_body= <<>>}}. - -%% Misc API. - -%% @doc Compact the request data by removing all non-system information. -%% -%% This essentially removes the host, path, query string, bindings and headers. -%% Use it when you really need to save up memory, for example when having -%% many concurrent long-running connections. --spec compact(#http_req{}) -> #http_req{}. -compact(Req) -> - Req#http_req{host=undefined, host_info=undefined, path=undefined, - path_info=undefined, qs_vals=undefined, - bindings=undefined, headers=[], - p_headers=[], cookies=[]}. - -%% @doc Return the transport module and socket associated with a request. -%% -%% This exposes the same socket interface used internally by the HTTP protocol -%% implementation to developers that needs low level access to the socket. -%% -%% It is preferred to use this in conjuction with the stream function support -%% in `set_resp_body_fun/3' if this is used to write a response body directly -%% to the socket. This ensures that the response headers are set correctly. --spec transport(#http_req{}) -> {ok, module(), inet:socket()}. -transport(#http_req{transport=Transport, socket=Socket}) -> - {ok, Transport, Socket}. - -%% Internal. - --spec parse_qs(binary(), fun((binary()) -> binary())) -> - list({binary(), binary() | true}). -parse_qs(<<>>, _URLDecode) -> - []; -parse_qs(Qs, URLDecode) -> - Tokens = binary:split(Qs, <<"&">>, [global, trim]), - [case binary:split(Token, <<"=">>) of - [Token] -> {URLDecode(Token), true}; - [Name, Value] -> {URLDecode(Name), URLDecode(Value)} - end || Token <- Tokens]. - --spec response_connection(cowboy_http:headers(), keepalive | close) - -> keepalive | close. -response_connection([], Connection) -> - Connection; -response_connection([{Name, Value}|Tail], Connection) -> - case Name of - 'Connection' -> response_connection_parse(Value); - Name when is_atom(Name) -> response_connection(Tail, Connection); - Name -> - Name2 = cowboy_bstr:to_lower(Name), - case Name2 of - <<"connection">> -> response_connection_parse(Value); - _Any -> response_connection(Tail, Connection) - end - end. - --spec response_connection_parse(binary()) -> keepalive | close. -response_connection_parse(ReplyConn) -> - Tokens = cowboy_http:nonempty_list(ReplyConn, fun cowboy_http:token/2), - cowboy_http:connection_to_atom(Tokens). - --spec response_head(cowboy_http:status(), cowboy_http:headers(), - cowboy_http:headers(), cowboy_http:headers()) -> iolist(). -response_head(Status, Headers, RespHeaders, DefaultHeaders) -> - StatusLine = <<"HTTP/1.1 ", (status(Status))/binary, "\r\n">>, - Headers2 = [{header_to_binary(Key), Value} || {Key, Value} <- Headers], - Headers3 = merge_headers( - merge_headers(Headers2, RespHeaders), - DefaultHeaders), - Headers4 = [[Key, <<": ">>, Value, <<"\r\n">>] - || {Key, Value} <- Headers3], - [StatusLine, Headers4, <<"\r\n">>]. - --spec merge_headers(cowboy_http:headers(), cowboy_http:headers()) - -> cowboy_http:headers(). -merge_headers(Headers, []) -> - Headers; -merge_headers(Headers, [{Name, Value}|Tail]) -> - Headers2 = case lists:keymember(Name, 1, Headers) of - true -> Headers; - false -> Headers ++ [{Name, Value}] - end, - merge_headers(Headers2, Tail). - --spec atom_to_connection(keepalive) -> <<_:80>>; - (close) -> <<_:40>>. -atom_to_connection(keepalive) -> - <<"keep-alive">>; -atom_to_connection(close) -> - <<"close">>. - --spec status(cowboy_http:status()) -> binary(). -status(100) -> <<"100 Continue">>; -status(101) -> <<"101 Switching Protocols">>; -status(102) -> <<"102 Processing">>; -status(200) -> <<"200 OK">>; -status(201) -> <<"201 Created">>; -status(202) -> <<"202 Accepted">>; -status(203) -> <<"203 Non-Authoritative Information">>; -status(204) -> <<"204 No Content">>; -status(205) -> <<"205 Reset Content">>; -status(206) -> <<"206 Partial Content">>; -status(207) -> <<"207 Multi-Status">>; -status(226) -> <<"226 IM Used">>; -status(300) -> <<"300 Multiple Choices">>; -status(301) -> <<"301 Moved Permanently">>; -status(302) -> <<"302 Found">>; -status(303) -> <<"303 See Other">>; -status(304) -> <<"304 Not Modified">>; -status(305) -> <<"305 Use Proxy">>; -status(306) -> <<"306 Switch Proxy">>; -status(307) -> <<"307 Temporary Redirect">>; -status(400) -> <<"400 Bad Request">>; -status(401) -> <<"401 Unauthorized">>; -status(402) -> <<"402 Payment Required">>; -status(403) -> <<"403 Forbidden">>; -status(404) -> <<"404 Not Found">>; -status(405) -> <<"405 Method Not Allowed">>; -status(406) -> <<"406 Not Acceptable">>; -status(407) -> <<"407 Proxy Authentication Required">>; -status(408) -> <<"408 Request Timeout">>; -status(409) -> <<"409 Conflict">>; -status(410) -> <<"410 Gone">>; -status(411) -> <<"411 Length Required">>; -status(412) -> <<"412 Precondition Failed">>; -status(413) -> <<"413 Request Entity Too Large">>; -status(414) -> <<"414 Request-URI Too Long">>; -status(415) -> <<"415 Unsupported Media Type">>; -status(416) -> <<"416 Requested Range Not Satisfiable">>; -status(417) -> <<"417 Expectation Failed">>; -status(418) -> <<"418 I'm a teapot">>; -status(422) -> <<"422 Unprocessable Entity">>; -status(423) -> <<"423 Locked">>; -status(424) -> <<"424 Failed Dependency">>; -status(425) -> <<"425 Unordered Collection">>; -status(426) -> <<"426 Upgrade Required">>; -status(500) -> <<"500 Internal Server Error">>; -status(501) -> <<"501 Not Implemented">>; -status(502) -> <<"502 Bad Gateway">>; -status(503) -> <<"503 Service Unavailable">>; -status(504) -> <<"504 Gateway Timeout">>; -status(505) -> <<"505 HTTP Version Not Supported">>; -status(506) -> <<"506 Variant Also Negotiates">>; -status(507) -> <<"507 Insufficient Storage">>; -status(510) -> <<"510 Not Extended">>; -status(B) when is_binary(B) -> B. - --spec header_to_binary(cowboy_http:header()) -> binary(). -header_to_binary('Cache-Control') -> <<"Cache-Control">>; -header_to_binary('Connection') -> <<"Connection">>; -header_to_binary('Date') -> <<"Date">>; -header_to_binary('Pragma') -> <<"Pragma">>; -header_to_binary('Transfer-Encoding') -> <<"Transfer-Encoding">>; -header_to_binary('Upgrade') -> <<"Upgrade">>; -header_to_binary('Via') -> <<"Via">>; -header_to_binary('Accept') -> <<"Accept">>; -header_to_binary('Accept-Charset') -> <<"Accept-Charset">>; -header_to_binary('Accept-Encoding') -> <<"Accept-Encoding">>; -header_to_binary('Accept-Language') -> <<"Accept-Language">>; -header_to_binary('Authorization') -> <<"Authorization">>; -header_to_binary('From') -> <<"From">>; -header_to_binary('Host') -> <<"Host">>; -header_to_binary('If-Modified-Since') -> <<"If-Modified-Since">>; -header_to_binary('If-Match') -> <<"If-Match">>; -header_to_binary('If-None-Match') -> <<"If-None-Match">>; -header_to_binary('If-Range') -> <<"If-Range">>; -header_to_binary('If-Unmodified-Since') -> <<"If-Unmodified-Since">>; -header_to_binary('Max-Forwards') -> <<"Max-Forwards">>; -header_to_binary('Proxy-Authorization') -> <<"Proxy-Authorization">>; -header_to_binary('Range') -> <<"Range">>; -header_to_binary('Referer') -> <<"Referer">>; -header_to_binary('User-Agent') -> <<"User-Agent">>; -header_to_binary('Age') -> <<"Age">>; -header_to_binary('Location') -> <<"Location">>; -header_to_binary('Proxy-Authenticate') -> <<"Proxy-Authenticate">>; -header_to_binary('Public') -> <<"Public">>; -header_to_binary('Retry-After') -> <<"Retry-After">>; -header_to_binary('Server') -> <<"Server">>; -header_to_binary('Vary') -> <<"Vary">>; -header_to_binary('Warning') -> <<"Warning">>; -header_to_binary('Www-Authenticate') -> <<"Www-Authenticate">>; -header_to_binary('Allow') -> <<"Allow">>; -header_to_binary('Content-Base') -> <<"Content-Base">>; -header_to_binary('Content-Encoding') -> <<"Content-Encoding">>; -header_to_binary('Content-Language') -> <<"Content-Language">>; -header_to_binary('Content-Length') -> <<"Content-Length">>; -header_to_binary('Content-Location') -> <<"Content-Location">>; -header_to_binary('Content-Md5') -> <<"Content-Md5">>; -header_to_binary('Content-Range') -> <<"Content-Range">>; -header_to_binary('Content-Type') -> <<"Content-Type">>; -header_to_binary('Etag') -> <<"Etag">>; -header_to_binary('Expires') -> <<"Expires">>; -header_to_binary('Last-Modified') -> <<"Last-Modified">>; -header_to_binary('Accept-Ranges') -> <<"Accept-Ranges">>; -header_to_binary('Set-Cookie') -> <<"Set-Cookie">>; -header_to_binary('Set-Cookie2') -> <<"Set-Cookie2">>; -header_to_binary('X-Forwarded-For') -> <<"X-Forwarded-For">>; -header_to_binary('Cookie') -> <<"Cookie">>; -header_to_binary('Keep-Alive') -> <<"Keep-Alive">>; -header_to_binary('Proxy-Connection') -> <<"Proxy-Connection">>; -header_to_binary(B) when is_binary(B) -> B. - -%% Tests. - --ifdef(TEST). - -parse_qs_test_() -> - %% {Qs, Result} - Tests = [ - {<<"">>, []}, - {<<"a=b">>, [{<<"a">>, <<"b">>}]}, - {<<"aaa=bbb">>, [{<<"aaa">>, <<"bbb">>}]}, - {<<"a&b">>, [{<<"a">>, true}, {<<"b">>, true}]}, - {<<"a=b&c&d=e">>, [{<<"a">>, <<"b">>}, - {<<"c">>, true}, {<<"d">>, <<"e">>}]}, - {<<"a=b=c=d=e&f=g">>, [{<<"a">>, <<"b=c=d=e">>}, {<<"f">>, <<"g">>}]}, - {<<"a+b=c+d">>, [{<<"a b">>, <<"c d">>}]} - ], - URLDecode = fun cowboy_http:urldecode/1, - [{Qs, fun() -> R = parse_qs(Qs, URLDecode) end} || {Qs, R} <- Tests]. - --endif. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_static.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_static.erl deleted file mode 100644 index da3bd33..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_static.erl +++ /dev/null @@ -1,456 +0,0 @@ -%% Copyright (c) 2011, Magnus Klaar -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @doc Static resource handler. -%% -%% This built in HTTP handler provides a simple file serving capability for -%% cowboy applications. It should be considered an experimental feature because -%% of it's dependency on the experimental REST handler. It's recommended to be -%% used for small or temporary environments where it is not preferrable to set -%% up a second server just to serve files. -%% -%% If this handler is used the Erlang node running the cowboy application must -%% be configured to use an async thread pool. This is configured by adding the -%% `+A $POOL_SIZE' argument to the `erl' command used to start the node. See -%%
-%% this reply from the OTP team to erlang-bugs -%% -%% == Base configuration == -%% -%% The handler must be configured with a request path prefix to serve files -%% under and the path to a directory to read files from. The request path prefix -%% is defined in the path pattern of the cowboy dispatch rule for the handler. -%% The request path pattern must end with a ``'...''' token. -%% The directory path can be set to either an absolute or relative path in the -%% form of a list or binary string representation of a file system path. A list -%% of binary path segments, as is used throughout cowboy, is also a valid -%% directory path. -%% -%% The directory path can also be set to a relative path within the `priv/' -%% directory of an application. This is configured by setting the value of the -%% directory option to a tuple of the form `{priv_dir, Application, Relpath}'. -%% -%% ==== Examples ==== -%% ``` -%% %% Serve files from /var/www/ under http://example.com/static/ -%% {[<<"static">>, '...'], cowboy_http_static, -%% [{directory, "/var/www"}]} -%% -%% %% Serve files from the current working directory under http://example.com/static/ -%% {[<<"static">>, '...'], cowboy_http_static, -%% [{directory, <<"./">>}]} -%% -%% %% Serve files from cowboy/priv/www under http://example.com/ -%% {['...'], cowboy_http_static, -%% [{directory, {priv_dir, cowboy, [<<"www">>]}}]} -%% ''' -%% -%% == Content type configuration == -%% -%% By default the content type of all static resources will be set to -%% `application/octet-stream'. This can be overriden by supplying a list -%% of filename extension to mimetypes pairs in the `mimetypes' option. -%% The filename extension should be a binary string including the leading dot. -%% The mimetypes must be of a type that the `cowboy_http_rest' protocol can -%% handle. -%% -%% The spawngrid/mimetypes -%% application, or an arbitrary function accepting the path to the file being -%% served, can also be used to generate the list of content types for a static -%% file resource. The function used must accept an additional argument after -%% the file path argument. -%% -%% ==== Example ==== -%% ``` -%% %% Use a static list of content types. -%% {[<<"static">>, '...'], cowboy_http_static, -%% [{directory, {priv_dir, cowboy, []}}, -%% {mimetypes, [ -%% {<<".css">>, [<<"text/css">>]}, -%% {<<".js">>, [<<"application/javascript">>]}]}]} -%% -%% %% Use the default database in the mimetypes application. -%% {[<<"static">>, '...', cowboy_http_static, -%% [{directory, {priv_dir, cowboy, []}}, -%% {mimetypes, {fun mimetypes:path_to_mimes/2, default}}]]} -%% ''' -%% -%% == ETag Header Function == -%% -%% The default behaviour of the static file handler is to not generate ETag -%% headers. This is because generating ETag headers based on file metadata -%% causes different servers in a cluster to generate different ETag values for -%% the same file unless the metadata is also synced. Generating strong ETags -%% based on the contents of a file is currently out of scope for this module. -%% -%% The default behaviour can be overridden to generate an ETag header based on -%% a combination of the file path, file size, inode and mtime values. If the -%% option value is a list of attribute names tagged with `attributes' a hex -%% encoded CRC32 checksum of the attribute values are used as the ETag header -%% value. -%% -%% If a strong ETag is required a user defined function for generating the -%% header value can be supplied. The function must accept a proplist of the -%% file attributes as the first argument and a second argument containing any -%% additional data that the function requires. The function must return a -%% `binary()' or `undefined'. -%% -%% ==== Examples ==== -%% ``` -%% %% A value of default is equal to not specifying the option. -%% {[<<"static">>, '...', cowboy_http_static, -%% [{directory, {priv_dir, cowboy, []}}, -%% {etag, default}]]} -%% -%% %% Use all avaliable ETag function arguments to generate a header value. -%% {[<<"static">>, '...', cowboy_http_static, -%% [{directory, {priv_dir, cowboy, []}}, -%% {etag, {attributes, [filepath, filesize, inode, mtime]}}]]} -%% -%% %% Use a user defined function to generate a strong ETag header value. -%% {[<<"static">>, '...', cowboy_http_static, -%% [{directory, {priv_dir, cowboy, []}}, -%% {etag, {fun generate_strong_etag/2, strong_etag_extra}}]]} -%% -%% generate_strong_etag(Arguments, strong_etag_extra) -> -%% {_, Filepath} = lists:keyfind(filepath, 1, Arguments), -%% {_, _Filesize} = lists:keyfind(filesize, 1, Arguments), -%% {_, _INode} = lists:keyfind(inode, 1, Arguments), -%% {_, _Modified} = lists:keyfind(mtime, 1, Arguments), -%% ChecksumCommand = lists:flatten(io_lib:format("sha1sum ~s", [Filepath])), -%% [Checksum|_] = string:tokens(os:cmd(ChecksumCommand), " "), -%% iolist_to_binary(Checksum). -%% ''' --module(cowboy_http_static). - -%% include files --include("http.hrl"). --include_lib("kernel/include/file.hrl"). - -%% cowboy_http_protocol callbacks --export([init/3]). - -%% cowboy_http_rest callbacks --export([rest_init/2, allowed_methods/2, malformed_request/2, - resource_exists/2, forbidden/2, last_modified/2, generate_etag/2, - content_types_provided/2, file_contents/2]). - -%% internal --export([path_to_mimetypes/2]). - -%% types --type dirpath() :: string() | binary() | [binary()]. --type dirspec() :: dirpath() | {priv, atom(), dirpath()}. --type mimedef() :: {binary(), binary(), [{binary(), binary()}]}. --type etagarg() :: {filepath, binary()} | {mtime, calendar:datetime()} - | {inode, non_neg_integer()} | {filesize, non_neg_integer()}. - -%% handler state --record(state, { - filepath :: binary() | error, - fileinfo :: {ok, #file_info{}} | {error, _} | error, - mimetypes :: {fun((binary(), T) -> [mimedef()]), T} | undefined, - etag_fun :: {fun(([etagarg()], T) -> undefined | binary()), T}}). - - -%% @private Upgrade from HTTP handler to REST handler. -init({_Transport, http}, _Req, _Opts) -> - {upgrade, protocol, cowboy_http_rest}. - - -%% @private Set up initial state of REST handler. --spec rest_init(#http_req{}, list()) -> {ok, #http_req{}, #state{}}. -rest_init(Req, Opts) -> - Directory = proplists:get_value(directory, Opts), - Directory1 = directory_path(Directory), - Mimetypes = proplists:get_value(mimetypes, Opts, []), - Mimetypes1 = case Mimetypes of - {_, _} -> Mimetypes; - [] -> {fun path_to_mimetypes/2, []}; - [_|_] -> {fun path_to_mimetypes/2, Mimetypes} - end, - ETagFunction = case proplists:get_value(etag, Opts) of - default -> {fun no_etag_function/2, undefined}; - undefined -> {fun no_etag_function/2, undefined}; - {attributes, Attrs} -> {fun attr_etag_function/2, Attrs}; - {_, _}=EtagFunction1 -> EtagFunction1 - end, - {Filepath, Req1} = cowboy_http_req:path_info(Req), - State = case check_path(Filepath) of - error -> - #state{filepath=error, fileinfo=error, mimetypes=undefined, - etag_fun=ETagFunction}; - ok -> - Filepath1 = join_paths(Directory1, Filepath), - Fileinfo = file:read_file_info(Filepath1), - #state{filepath=Filepath1, fileinfo=Fileinfo, mimetypes=Mimetypes1, - etag_fun=ETagFunction} - end, - {ok, Req1, State}. - - -%% @private Only allow GET and HEAD requests on files. --spec allowed_methods(#http_req{}, #state{}) -> - {[atom()], #http_req{}, #state{}}. -allowed_methods(Req, State) -> - {['GET', 'HEAD'], Req, State}. - -%% @private -malformed_request(Req, #state{filepath=error}=State) -> - {true, Req, State}; -malformed_request(Req, State) -> - {false, Req, State}. - - -%% @private Check if the resource exists under the document root. -resource_exists(Req, #state{fileinfo={error, _}}=State) -> - {false, Req, State}; -resource_exists(Req, #state{fileinfo={ok, Fileinfo}}=State) -> - {Fileinfo#file_info.type =:= regular, Req, State}. - - -%% @private -%% Access to a file resource is forbidden if it exists and the local node does -%% not have permission to read it. Directory listings are always forbidden. -forbidden(Req, #state{fileinfo={_, #file_info{type=directory}}}=State) -> - {true, Req, State}; -forbidden(Req, #state{fileinfo={error, eacces}}=State) -> - {true, Req, State}; -forbidden(Req, #state{fileinfo={error, _}}=State) -> - {false, Req, State}; -forbidden(Req, #state{fileinfo={ok, #file_info{access=Access}}}=State) -> - {not (Access =:= read orelse Access =:= read_write), Req, State}. - - -%% @private Read the time a file system system object was last modified. --spec last_modified(#http_req{}, #state{}) -> - {calendar:datetime(), #http_req{}, #state{}}. -last_modified(Req, #state{fileinfo={ok, #file_info{mtime=Modified}}}=State) -> - {Modified, Req, State}. - - -%% @private Generate the ETag header value for this file. -%% The ETag header value is only generated if the resource is a file that -%% exists in document root. --spec generate_etag(#http_req{}, #state{}) -> - {undefined | binary(), #http_req{}, #state{}}. -generate_etag(Req, #state{fileinfo={_, #file_info{type=regular, inode=INode, - mtime=Modified, size=Filesize}}, filepath=Filepath, - etag_fun={ETagFun, ETagData}}=State) -> - ETagArgs = [ - {filepath, Filepath}, {filesize, Filesize}, - {inode, INode}, {mtime, Modified}], - {ETagFun(ETagArgs, ETagData), Req, State}; -generate_etag(Req, State) -> - {undefined, Req, State}. - - -%% @private Return the content type of a file. --spec content_types_provided(#http_req{}, #state{}) -> tuple(). -content_types_provided(Req, #state{filepath=Filepath, - mimetypes={MimetypesFun, MimetypesData}}=State) -> - Mimetypes = [{T, file_contents} - || T <- MimetypesFun(Filepath, MimetypesData)], - {Mimetypes, Req, State}. - - -%% @private Return a function that writes a file directly to the socket. --spec file_contents(#http_req{}, #state{}) -> tuple(). -file_contents(Req, #state{filepath=Filepath, - fileinfo={ok, #file_info{size=Filesize}}}=State) -> - {ok, Transport, Socket} = cowboy_http_req:transport(Req), - Writefile = content_function(Transport, Socket, Filepath), - {{stream, Filesize, Writefile}, Req, State}. - - -%% @private Return a function writing the contents of a file to a socket. -%% The function returns the number of bytes written to the socket to enable -%% the calling function to determine if the expected number of bytes were -%% written to the socket. --spec content_function(module(), inet:socket(), binary()) -> - fun(() -> {sent, non_neg_integer()}). -content_function(Transport, Socket, Filepath) -> - %% `file:sendfile/2' will only work with the `cowboy_tcp_transport' - %% transport module. SSL or future SPDY transports that require the - %% content to be encrypted or framed as the content is sent. - case erlang:function_exported(file, sendfile, 2) of - false -> - fun() -> sfallback(Transport, Socket, Filepath) end; - _ when Transport =/= cowboy_tcp_transport -> - fun() -> sfallback(Transport, Socket, Filepath) end; - true -> - fun() -> sendfile(Socket, Filepath) end - end. - - -%% @private Sendfile fallback function. --spec sfallback(module(), inet:socket(), binary()) -> {sent, non_neg_integer()}. -sfallback(Transport, Socket, Filepath) -> - {ok, File} = file:open(Filepath, [read,binary,raw]), - sfallback(Transport, Socket, File, 0). - --spec sfallback(module(), inet:socket(), file:io_device(), - non_neg_integer()) -> {sent, non_neg_integer()}. -sfallback(Transport, Socket, File, Sent) -> - case file:read(File, 16#1FFF) of - eof -> - ok = file:close(File), - {sent, Sent}; - {ok, Bin} -> - ok = Transport:send(Socket, Bin), - sfallback(Transport, Socket, File, Sent + byte_size(Bin)) - end. - - -%% @private Wrapper for sendfile function. --spec sendfile(inet:socket(), binary()) -> {sent, non_neg_integer()}. -sendfile(Socket, Filepath) -> - {ok, Sent} = file:sendfile(Filepath, Socket), - {sent, Sent}. - --spec directory_path(dirspec()) -> dirpath(). -directory_path({priv_dir, App, []}) -> - priv_dir_path(App); -directory_path({priv_dir, App, [H|_]=Path}) when is_integer(H) -> - filename:join(priv_dir_path(App), Path); -directory_path({priv_dir, App, [H|_]=Path}) when is_binary(H) -> - filename:join(filename:split(priv_dir_path(App)) ++ Path); -directory_path({priv_dir, App, Path}) when is_binary(Path) -> - filename:join(priv_dir_path(App), Path); -directory_path(Path) -> - Path. - - -%% @private Validate a request path for unsafe characters. -%% There is no way to escape special characters in a filesystem path. --spec check_path(Path::[binary()]) -> ok | error. -check_path([]) -> ok; -check_path([<<"">>|_T]) -> error; -check_path([<<".">>|_T]) -> error; -check_path([<<"..">>|_T]) -> error; -check_path([H|T]) -> - case binary:match(H, <<"/">>) of - {_, _} -> error; - nomatch -> check_path(T) - end. - - -%% @private Join the the directory and request paths. --spec join_paths(dirpath(), [binary()]) -> binary(). -join_paths([H|_]=Dirpath, Filepath) when is_integer(H) -> - filename:join(filename:split(Dirpath) ++ Filepath); -join_paths([H|_]=Dirpath, Filepath) when is_binary(H) -> - filename:join(Dirpath ++ Filepath); -join_paths(Dirpath, Filepath) when is_binary(Dirpath) -> - filename:join([Dirpath] ++ Filepath); -join_paths([], Filepath) -> - filename:join(Filepath). - - -%% @private Return the path to the priv/ directory of an application. --spec priv_dir_path(atom()) -> string(). -priv_dir_path(App) -> - case code:priv_dir(App) of - {error, bad_name} -> priv_dir_mod(App); - Dir -> Dir - end. - --spec priv_dir_mod(atom()) -> string(). -priv_dir_mod(Mod) -> - case code:which(Mod) of - File when not is_list(File) -> "../priv"; - File -> filename:join([filename:dirname(File),"../priv"]) - end. - - -%% @private Use application/octet-stream as the default mimetype. -%% If a list of extension - mimetype pairs are provided as the mimetypes -%% an attempt to find the mimetype using the file extension. If no match -%% is found the default mimetype is returned. --spec path_to_mimetypes(binary(), [{binary(), [mimedef()]}]) -> - [mimedef()]. -path_to_mimetypes(Filepath, Extensions) when is_binary(Filepath) -> - Ext = filename:extension(Filepath), - case Ext of - <<>> -> default_mimetype(); - _Ext -> path_to_mimetypes_(Ext, Extensions) - end. - --spec path_to_mimetypes_(binary(), [{binary(), [mimedef()]}]) -> [mimedef()]. -path_to_mimetypes_(Ext, Extensions) -> - case lists:keyfind(Ext, 1, Extensions) of - {_, MTs} -> MTs; - _Unknown -> default_mimetype() - end. - --spec default_mimetype() -> [mimedef()]. -default_mimetype() -> - [{<<"application">>, <<"octet-stream">>, []}]. - - -%% @private Do not send ETag headers in the default configuration. --spec no_etag_function([etagarg()], undefined) -> undefined. -no_etag_function(_Args, undefined) -> - undefined. - -%% @private A simple alternative is to send an ETag based on file attributes. --type fileattr() :: filepath | filesize | mtime | inode. --spec attr_etag_function([etagarg()], [fileattr()]) -> binary(). -attr_etag_function(Args, Attrs) -> - attr_etag_function(Args, Attrs, []). - --spec attr_etag_function([etagarg()], [fileattr()], [binary()]) -> binary(). -attr_etag_function(_Args, [], Acc) -> - list_to_binary(erlang:integer_to_list(erlang:crc32(Acc), 16)); -attr_etag_function(Args, [H|T], Acc) -> - {_, Value} = lists:keyfind(H, 1, Args), - attr_etag_function(Args, T, [term_to_binary(Value)|Acc]). - - --ifdef(TEST). --include_lib("eunit/include/eunit.hrl"). --define(_eq(E, I), ?_assertEqual(E, I)). - -check_path_test_() -> - C = fun check_path/1, - [?_eq(error, C([<<>>])), - ?_eq(ok, C([<<"abc">>])), - ?_eq(error, C([<<".">>])), - ?_eq(error, C([<<"..">>])), - ?_eq(error, C([<<"/">>])) - ]. - -join_paths_test_() -> - P = fun join_paths/2, - [?_eq(<<"a">>, P([], [<<"a">>])), - ?_eq(<<"a/b/c">>, P(<<"a/b">>, [<<"c">>])), - ?_eq(<<"a/b/c">>, P("a/b", [<<"c">>])), - ?_eq(<<"a/b/c">>, P([<<"a">>, <<"b">>], [<<"c">>])) - ]. - -directory_path_test_() -> - P = fun directory_path/1, - PL = fun(I) -> length(filename:split(P(I))) end, - Base = PL({priv_dir, cowboy, []}), - [?_eq(Base + 1, PL({priv_dir, cowboy, "a"})), - ?_eq(Base + 1, PL({priv_dir, cowboy, <<"a">>})), - ?_eq(Base + 1, PL({priv_dir, cowboy, [<<"a">>]})), - ?_eq(Base + 2, PL({priv_dir, cowboy, "a/b"})), - ?_eq(Base + 2, PL({priv_dir, cowboy, <<"a/b">>})), - ?_eq(Base + 2, PL({priv_dir, cowboy, [<<"a">>, <<"b">>]})), - ?_eq("a/b", P("a/b")) - ]. - - --endif. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_websocket.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_websocket.erl deleted file mode 100644 index 5100213..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_websocket.erl +++ /dev/null @@ -1,530 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @doc WebSocket protocol implementation. -%% -%% Supports the protocol version 0 (hixie-76), version 7 (hybi-7) -%% and version 8 (hybi-8, hybi-9 and hybi-10). -%% -%% Version 0 is supported by the following browsers: -%%
    -%%
  • Firefox 4-5 (disabled by default)
  • -%%
  • Chrome 6-13
  • -%%
  • Safari 5.0.1+
  • -%%
  • Opera 11.00+ (disabled by default)
  • -%%
-%% -%% Version 7 is supported by the following browser: -%%
    -%%
  • Firefox 6
  • -%%
-%% -%% Version 8+ is supported by the following browsers: -%%
    -%%
  • Firefox 7+
  • -%%
  • Chrome 14+
  • -%%
--module(cowboy_http_websocket). - --export([upgrade/4]). %% API. --export([handler_loop/4]). %% Internal. - --include("include/http.hrl"). --include_lib("eunit/include/eunit.hrl"). - --type opcode() :: 0 | 1 | 2 | 8 | 9 | 10. --type mask_key() :: 0..16#ffffffff. - --record(state, { - version :: 0 | 7 | 8 | 13, - handler :: module(), - opts :: any(), - challenge = undefined :: undefined | binary() | {binary(), binary()}, - timeout = infinity :: timeout(), - timeout_ref = undefined, - messages = undefined :: undefined | {atom(), atom(), atom()}, - hibernate = false, - eop :: undefined | tuple(), %% hixie-76 specific. - origin = undefined :: undefined | binary() %% hixie-76 specific. -}). - -%% @doc Upgrade a HTTP request to the WebSocket protocol. -%% -%% You do not need to call this function manually. To upgrade to the WebSocket -%% protocol, you simply need to return {upgrade, protocol, {@module}} -%% in your cowboy_http_handler:init/3 handler function. --spec upgrade(pid(), module(), any(), #http_req{}) -> closed | none(). -upgrade(ListenerPid, Handler, Opts, Req) -> - cowboy_listener:move_connection(ListenerPid, websocket, self()), - case catch websocket_upgrade(#state{handler=Handler, opts=Opts}, Req) of - {ok, State, Req2} -> handler_init(State, Req2); - {'EXIT', _Reason} -> upgrade_error(Req) - end. - --spec websocket_upgrade(#state{}, #http_req{}) -> {ok, #state{}, #http_req{}}. -websocket_upgrade(State, Req) -> - {ConnTokens, Req2} - = cowboy_http_req:parse_header('Connection', Req), - true = lists:member(<<"upgrade">>, ConnTokens), - %% @todo Should probably send a 426 if the Upgrade header is missing. - {[<<"websocket">>], Req3} = cowboy_http_req:parse_header('Upgrade', Req2), - {Version, Req4} = cowboy_http_req:header(<<"Sec-Websocket-Version">>, Req3), - websocket_upgrade(Version, State, Req4). - -%% @todo Handle the Sec-Websocket-Protocol header. -%% @todo Reply a proper error, don't die, if a required header is undefined. --spec websocket_upgrade(undefined | <<_:8>>, #state{}, #http_req{}) - -> {ok, #state{}, #http_req{}}. -%% No version given. Assuming hixie-76 draft. -%% -%% We need to wait to send a reply back before trying to read the -%% third part of the challenge key, because proxies will wait for -%% a reply before sending it. Therefore we calculate the challenge -%% key only in websocket_handshake/3. -websocket_upgrade(undefined, State, Req=#http_req{meta=Meta}) -> - {Origin, Req2} = cowboy_http_req:header(<<"Origin">>, Req), - {Key1, Req3} = cowboy_http_req:header(<<"Sec-Websocket-Key1">>, Req2), - {Key2, Req4} = cowboy_http_req:header(<<"Sec-Websocket-Key2">>, Req3), - false = lists:member(undefined, [Origin, Key1, Key2]), - EOP = binary:compile_pattern(<< 255 >>), - {ok, State#state{version=0, origin=Origin, challenge={Key1, Key2}, - eop=EOP}, Req4#http_req{meta=[{websocket_version, 0}|Meta]}}; -%% Versions 7 and 8. Implementation follows the hybi 7 through 17 drafts. -websocket_upgrade(Version, State, Req=#http_req{meta=Meta}) - when Version =:= <<"7">>; Version =:= <<"8">>; - Version =:= <<"13">> -> - {Key, Req2} = cowboy_http_req:header(<<"Sec-Websocket-Key">>, Req), - false = Key =:= undefined, - Challenge = hybi_challenge(Key), - IntVersion = list_to_integer(binary_to_list(Version)), - {ok, State#state{version=IntVersion, challenge=Challenge}, - Req2#http_req{meta=[{websocket_version, IntVersion}|Meta]}}. - --spec handler_init(#state{}, #http_req{}) -> closed | none(). -handler_init(State=#state{handler=Handler, opts=Opts}, - Req=#http_req{transport=Transport}) -> - try Handler:websocket_init(Transport:name(), Req, Opts) of - {ok, Req2, HandlerState} -> - websocket_handshake(State, Req2, HandlerState); - {ok, Req2, HandlerState, hibernate} -> - websocket_handshake(State#state{hibernate=true}, - Req2, HandlerState); - {ok, Req2, HandlerState, Timeout} -> - websocket_handshake(State#state{timeout=Timeout}, - Req2, HandlerState); - {ok, Req2, HandlerState, Timeout, hibernate} -> - websocket_handshake(State#state{timeout=Timeout, - hibernate=true}, Req2, HandlerState); - {shutdown, Req2} -> - upgrade_denied(Req2) - catch Class:Reason -> - upgrade_error(Req), - error_logger:error_msg( - "** Handler ~p terminating in websocket_init/3~n" - " for the reason ~p:~p~n** Options were ~p~n" - "** Request was ~p~n** Stacktrace: ~p~n~n", - [Handler, Class, Reason, Opts, Req, erlang:get_stacktrace()]) - end. - --spec upgrade_error(#http_req{}) -> closed. -upgrade_error(Req) -> - {ok, _Req2} = cowboy_http_req:reply(400, [], [], - Req#http_req{resp_state=waiting}), - closed. - -%% @see cowboy_http_protocol:ensure_response/1 --spec upgrade_denied(#http_req{}) -> closed. -upgrade_denied(#http_req{resp_state=done}) -> - closed; -upgrade_denied(Req=#http_req{resp_state=waiting}) -> - {ok, _Req2} = cowboy_http_req:reply(400, [], [], Req), - closed; -upgrade_denied(#http_req{method='HEAD', resp_state=chunks}) -> - closed; -upgrade_denied(#http_req{socket=Socket, transport=Transport, - resp_state=chunks}) -> - Transport:send(Socket, <<"0\r\n\r\n">>), - closed. - --spec websocket_handshake(#state{}, #http_req{}, any()) -> closed | none(). -websocket_handshake(State=#state{version=0, origin=Origin, - challenge={Key1, Key2}}, Req=#http_req{socket=Socket, - transport=Transport, raw_host=Host, port=Port, - raw_path=Path, raw_qs=QS}, HandlerState) -> - Location = hixie76_location(Transport:name(), Host, Port, Path, QS), - {ok, Req2} = cowboy_http_req:upgrade_reply( - <<"101 WebSocket Protocol Handshake">>, - [{<<"Upgrade">>, <<"WebSocket">>}, - {<<"Sec-Websocket-Location">>, Location}, - {<<"Sec-Websocket-Origin">>, Origin}], - Req#http_req{resp_state=waiting}), - %% Flush the resp_sent message before moving on. - receive {cowboy_http_req, resp_sent} -> ok after 0 -> ok end, - %% We replied with a proper response. Proxies should be happy enough, - %% we can now read the 8 last bytes of the challenge keys and send - %% the challenge response directly to the socket. - case cowboy_http_req:body(8, Req2) of - {ok, Key3, Req3} -> - Challenge = hixie76_challenge(Key1, Key2, Key3), - Transport:send(Socket, Challenge), - handler_before_loop(State#state{messages=Transport:messages()}, - Req3, HandlerState, <<>>); - _Any -> - closed %% If an error happened reading the body, stop there. - end; -websocket_handshake(State=#state{challenge=Challenge}, - Req=#http_req{transport=Transport}, HandlerState) -> - {ok, Req2} = cowboy_http_req:upgrade_reply( - 101, - [{<<"Upgrade">>, <<"websocket">>}, - {<<"Sec-Websocket-Accept">>, Challenge}], - Req#http_req{resp_state=waiting}), - %% Flush the resp_sent message before moving on. - receive {cowboy_http_req, resp_sent} -> ok after 0 -> ok end, - handler_before_loop(State#state{messages=Transport:messages()}, - Req2, HandlerState, <<>>). - --spec handler_before_loop(#state{}, #http_req{}, any(), binary()) -> closed | none(). -handler_before_loop(State=#state{hibernate=true}, - Req=#http_req{socket=Socket, transport=Transport}, - HandlerState, SoFar) -> - Transport:setopts(Socket, [{active, once}]), - State2 = handler_loop_timeout(State), - erlang:hibernate(?MODULE, handler_loop, [State2#state{hibernate=false}, - Req, HandlerState, SoFar]); -handler_before_loop(State, Req=#http_req{socket=Socket, transport=Transport}, - HandlerState, SoFar) -> - Transport:setopts(Socket, [{active, once}]), - State2 = handler_loop_timeout(State), - handler_loop(State2, Req, HandlerState, SoFar). - --spec handler_loop_timeout(#state{}) -> #state{}. -handler_loop_timeout(State=#state{timeout=infinity}) -> - State#state{timeout_ref=undefined}; -handler_loop_timeout(State=#state{timeout=Timeout, timeout_ref=PrevRef}) -> - _ = case PrevRef of undefined -> ignore; PrevRef -> - erlang:cancel_timer(PrevRef) end, - TRef = make_ref(), - erlang:send_after(Timeout, self(), {?MODULE, timeout, TRef}), - State#state{timeout_ref=TRef}. - -%% @private --spec handler_loop(#state{}, #http_req{}, any(), binary()) -> closed | none(). -handler_loop(State=#state{messages={OK, Closed, Error}, timeout_ref=TRef}, - Req=#http_req{socket=Socket}, HandlerState, SoFar) -> - receive - {OK, Socket, Data} -> - websocket_data(State, Req, HandlerState, - << SoFar/binary, Data/binary >>); - {Closed, Socket} -> - handler_terminate(State, Req, HandlerState, {error, closed}); - {Error, Socket, Reason} -> - handler_terminate(State, Req, HandlerState, {error, Reason}); - {?MODULE, timeout, TRef} -> - websocket_close(State, Req, HandlerState, {normal, timeout}); - {?MODULE, timeout, OlderTRef} when is_reference(OlderTRef) -> - handler_loop(State, Req, HandlerState, SoFar); - Message -> - handler_call(State, Req, HandlerState, - SoFar, websocket_info, Message, fun handler_before_loop/4) - end. - --spec websocket_data(#state{}, #http_req{}, any(), binary()) -> closed | none(). -%% No more data. -websocket_data(State, Req, HandlerState, <<>>) -> - handler_before_loop(State, Req, HandlerState, <<>>); -%% hixie-76 close frame. -websocket_data(State=#state{version=0}, Req, HandlerState, - << 255, 0, _Rest/binary >>) -> - websocket_close(State, Req, HandlerState, {normal, closed}); -%% hixie-76 data frame. We only support the frame type 0, same as the specs. -websocket_data(State=#state{version=0, eop=EOP}, Req, HandlerState, - Data = << 0, _/binary >>) -> - case binary:match(Data, EOP) of - {Pos, 1} -> - Pos2 = Pos - 1, - << 0, Payload:Pos2/binary, 255, Rest/bits >> = Data, - handler_call(State, Req, HandlerState, - Rest, websocket_handle, {text, Payload}, fun websocket_data/4); - nomatch -> - %% @todo We probably should allow limiting frame length. - handler_before_loop(State, Req, HandlerState, Data) - end; -%% incomplete hybi data frame. -websocket_data(State=#state{version=Version}, Req, HandlerState, Data) - when Version =/= 0, byte_size(Data) =:= 1 -> - handler_before_loop(State, Req, HandlerState, Data); -%% hybi data frame. -%% @todo Handle Fin. -websocket_data(State=#state{version=Version}, Req, HandlerState, Data) - when Version =/= 0 -> - << 1:1, 0:3, Opcode:4, Mask:1, PayloadLen:7, Rest/bits >> = Data, - case {PayloadLen, Rest} of - {126, _} when Opcode >= 8 -> websocket_close( - State, Req, HandlerState, {error, protocol}); - {127, _} when Opcode >= 8 -> websocket_close( - State, Req, HandlerState, {error, protocol}); - {126, << L:16, R/bits >>} -> websocket_before_unmask( - State, Req, HandlerState, Data, R, Opcode, Mask, L); - {126, Rest} -> websocket_before_unmask( - State, Req, HandlerState, Data, Rest, Opcode, Mask, undefined); - {127, << 0:1, L:63, R/bits >>} -> websocket_before_unmask( - State, Req, HandlerState, Data, R, Opcode, Mask, L); - {127, Rest} -> websocket_before_unmask( - State, Req, HandlerState, Data, Rest, Opcode, Mask, undefined); - {PayloadLen, Rest} -> websocket_before_unmask( - State, Req, HandlerState, Data, Rest, Opcode, Mask, PayloadLen) - end; -%% Something was wrong with the frame. Close the connection. -websocket_data(State, Req, HandlerState, _Bad) -> - websocket_close(State, Req, HandlerState, {error, badframe}). - -%% hybi routing depending on whether unmasking is needed. --spec websocket_before_unmask(#state{}, #http_req{}, any(), binary(), - binary(), opcode(), 0 | 1, non_neg_integer() | undefined) - -> closed | none(). -websocket_before_unmask(State, Req, HandlerState, Data, - Rest, Opcode, Mask, PayloadLen) -> - case {Mask, PayloadLen} of - {0, 0} -> - websocket_dispatch(State, Req, HandlerState, Rest, Opcode, <<>>); - {1, N} when N + 4 > byte_size(Rest); N =:= undefined -> - %% @todo We probably should allow limiting frame length. - handler_before_loop(State, Req, HandlerState, Data); - {1, _N} -> - << MaskKey:32, Payload:PayloadLen/binary, Rest2/bits >> = Rest, - websocket_unmask(State, Req, HandlerState, Rest2, - Opcode, Payload, MaskKey) - end. - -%% hybi unmasking. --spec websocket_unmask(#state{}, #http_req{}, any(), binary(), - opcode(), binary(), mask_key()) -> closed | none(). -websocket_unmask(State, Req, HandlerState, RemainingData, - Opcode, Payload, MaskKey) -> - websocket_unmask(State, Req, HandlerState, RemainingData, - Opcode, Payload, MaskKey, <<>>). - --spec websocket_unmask(#state{}, #http_req{}, any(), binary(), - opcode(), binary(), mask_key(), binary()) -> closed | none(). -websocket_unmask(State, Req, HandlerState, RemainingData, - Opcode, << O:32, Rest/bits >>, MaskKey, Acc) -> - T = O bxor MaskKey, - websocket_unmask(State, Req, HandlerState, RemainingData, - Opcode, Rest, MaskKey, << Acc/binary, T:32 >>); -websocket_unmask(State, Req, HandlerState, RemainingData, - Opcode, << O:24 >>, MaskKey, Acc) -> - << MaskKey2:24, _:8 >> = << MaskKey:32 >>, - T = O bxor MaskKey2, - websocket_dispatch(State, Req, HandlerState, RemainingData, - Opcode, << Acc/binary, T:24 >>); -websocket_unmask(State, Req, HandlerState, RemainingData, - Opcode, << O:16 >>, MaskKey, Acc) -> - << MaskKey2:16, _:16 >> = << MaskKey:32 >>, - T = O bxor MaskKey2, - websocket_dispatch(State, Req, HandlerState, RemainingData, - Opcode, << Acc/binary, T:16 >>); -websocket_unmask(State, Req, HandlerState, RemainingData, - Opcode, << O:8 >>, MaskKey, Acc) -> - << MaskKey2:8, _:24 >> = << MaskKey:32 >>, - T = O bxor MaskKey2, - websocket_dispatch(State, Req, HandlerState, RemainingData, - Opcode, << Acc/binary, T:8 >>); -websocket_unmask(State, Req, HandlerState, RemainingData, - Opcode, <<>>, _MaskKey, Acc) -> - websocket_dispatch(State, Req, HandlerState, RemainingData, - Opcode, Acc). - -%% hybi dispatching. --spec websocket_dispatch(#state{}, #http_req{}, any(), binary(), - opcode(), binary()) -> closed | none(). -%% @todo Fragmentation. -%~ websocket_dispatch(State, Req, HandlerState, RemainingData, 0, Payload) -> -%% Text frame. -websocket_dispatch(State, Req, HandlerState, RemainingData, 1, Payload) -> - handler_call(State, Req, HandlerState, RemainingData, - websocket_handle, {text, Payload}, fun websocket_data/4); -%% Binary frame. -websocket_dispatch(State, Req, HandlerState, RemainingData, 2, Payload) -> - handler_call(State, Req, HandlerState, RemainingData, - websocket_handle, {binary, Payload}, fun websocket_data/4); -%% Close control frame. -%% @todo Handle the optional Payload. -websocket_dispatch(State, Req, HandlerState, _RemainingData, 8, _Payload) -> - websocket_close(State, Req, HandlerState, {normal, closed}); -%% Ping control frame. Send a pong back and forward the ping to the handler. -websocket_dispatch(State, Req=#http_req{socket=Socket, transport=Transport}, - HandlerState, RemainingData, 9, Payload) -> - Len = hybi_payload_length(byte_size(Payload)), - Transport:send(Socket, << 1:1, 0:3, 10:4, 0:1, Len/bits, Payload/binary >>), - handler_call(State, Req, HandlerState, RemainingData, - websocket_handle, {ping, Payload}, fun websocket_data/4); -%% Pong control frame. -websocket_dispatch(State, Req, HandlerState, RemainingData, 10, Payload) -> - handler_call(State, Req, HandlerState, RemainingData, - websocket_handle, {pong, Payload}, fun websocket_data/4). - --spec handler_call(#state{}, #http_req{}, any(), binary(), - atom(), any(), fun()) -> closed | none(). -handler_call(State=#state{handler=Handler, opts=Opts}, Req, HandlerState, - RemainingData, Callback, Message, NextState) -> - try Handler:Callback(Message, Req, HandlerState) of - {ok, Req2, HandlerState2} -> - NextState(State, Req2, HandlerState2, RemainingData); - {ok, Req2, HandlerState2, hibernate} -> - NextState(State#state{hibernate=true}, - Req2, HandlerState2, RemainingData); - {reply, Payload, Req2, HandlerState2} -> - websocket_send(Payload, State, Req2), - NextState(State, Req2, HandlerState2, RemainingData); - {reply, Payload, Req2, HandlerState2, hibernate} -> - websocket_send(Payload, State, Req2), - NextState(State#state{hibernate=true}, - Req2, HandlerState2, RemainingData); - {shutdown, Req2, HandlerState2} -> - websocket_close(State, Req2, HandlerState2, {normal, shutdown}) - catch Class:Reason -> - error_logger:error_msg( - "** Handler ~p terminating in ~p/3~n" - " for the reason ~p:~p~n** Message was ~p~n" - "** Options were ~p~n** Handler state was ~p~n" - "** Request was ~p~n** Stacktrace: ~p~n~n", - [Handler, Callback, Class, Reason, Message, Opts, - HandlerState, Req, erlang:get_stacktrace()]), - websocket_close(State, Req, HandlerState, {error, handler}) - end. - --spec websocket_send(binary(), #state{}, #http_req{}) -> closed | ignore. -%% hixie-76 text frame. -websocket_send({text, Payload}, #state{version=0}, - #http_req{socket=Socket, transport=Transport}) -> - Transport:send(Socket, [0, Payload, 255]); -%% Ignore all unknown frame types for compatibility with hixie 76. -websocket_send(_Any, #state{version=0}, _Req) -> - ignore; -websocket_send({Type, Payload}, _State, - #http_req{socket=Socket, transport=Transport}) -> - Opcode = case Type of - text -> 1; - binary -> 2; - ping -> 9; - pong -> 10 - end, - Len = hybi_payload_length(iolist_size(Payload)), - Transport:send(Socket, [<< 1:1, 0:3, Opcode:4, 0:1, Len/bits >>, - Payload]). - --spec websocket_close(#state{}, #http_req{}, any(), {atom(), atom()}) -> closed. -websocket_close(State=#state{version=0}, Req=#http_req{socket=Socket, - transport=Transport}, HandlerState, Reason) -> - Transport:send(Socket, << 255, 0 >>), - handler_terminate(State, Req, HandlerState, Reason); -%% @todo Send a Payload? Using Reason is usually good but we're quite careless. -websocket_close(State, Req=#http_req{socket=Socket, - transport=Transport}, HandlerState, Reason) -> - Transport:send(Socket, << 1:1, 0:3, 8:4, 0:8 >>), - handler_terminate(State, Req, HandlerState, Reason). - --spec handler_terminate(#state{}, #http_req{}, - any(), atom() | {atom(), atom()}) -> closed. -handler_terminate(#state{handler=Handler, opts=Opts}, - Req, HandlerState, TerminateReason) -> - try - Handler:websocket_terminate(TerminateReason, Req, HandlerState) - catch Class:Reason -> - error_logger:error_msg( - "** Handler ~p terminating in websocket_terminate/3~n" - " for the reason ~p:~p~n** Initial reason was ~p~n" - "** Options were ~p~n** Handler state was ~p~n" - "** Request was ~p~n** Stacktrace: ~p~n~n", - [Handler, Class, Reason, TerminateReason, Opts, - HandlerState, Req, erlang:get_stacktrace()]) - end, - closed. - -%% hixie-76 specific. - --spec hixie76_challenge(binary(), binary(), binary()) -> binary(). -hixie76_challenge(Key1, Key2, Key3) -> - IntKey1 = hixie76_key_to_integer(Key1), - IntKey2 = hixie76_key_to_integer(Key2), - erlang:md5(<< IntKey1:32, IntKey2:32, Key3/binary >>). - --spec hixie76_key_to_integer(binary()) -> integer(). -hixie76_key_to_integer(Key) -> - Number = list_to_integer([C || << C >> <= Key, C >= $0, C =< $9]), - Spaces = length([C || << C >> <= Key, C =:= 32]), - Number div Spaces. - --spec hixie76_location(atom(), binary(), inet:ip_port(), binary(), binary()) - -> binary(). -hixie76_location(Protocol, Host, Port, Path, <<>>) -> - << (hixie76_location_protocol(Protocol))/binary, "://", Host/binary, - (hixie76_location_port(Protocol, Port))/binary, Path/binary>>; -hixie76_location(Protocol, Host, Port, Path, QS) -> - << (hixie76_location_protocol(Protocol))/binary, "://", Host/binary, - (hixie76_location_port(Protocol, Port))/binary, Path/binary, "?", QS/binary >>. - --spec hixie76_location_protocol(atom()) -> binary(). -hixie76_location_protocol(ssl) -> <<"wss">>; -hixie76_location_protocol(_) -> <<"ws">>. - -%% @todo We should add a secure/0 function to transports -%% instead of relying on their name. --spec hixie76_location_port(atom(), inet:ip_port()) -> binary(). -hixie76_location_port(ssl, 443) -> - <<>>; -hixie76_location_port(tcp, 80) -> - <<>>; -hixie76_location_port(_, Port) -> - <<":", (list_to_binary(integer_to_list(Port)))/binary>>. - -%% hybi specific. - --spec hybi_challenge(binary()) -> binary(). -hybi_challenge(Key) -> - Bin = << Key/binary, "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" >>, - base64:encode(crypto:sha(Bin)). - --spec hybi_payload_length(0..16#7fffffffffffffff) - -> << _:7 >> | << _:23 >> | << _:71 >>. -hybi_payload_length(N) -> - case N of - N when N =< 125 -> << N:7 >>; - N when N =< 16#ffff -> << 126:7, N:16 >>; - N when N =< 16#7fffffffffffffff -> << 127:7, N:64 >> - end. - -%% Tests. - --ifdef(TEST). - -hixie76_location_test() -> - ?assertEqual(<<"ws://localhost/path">>, - hixie76_location(tcp, <<"localhost">>, 80, <<"/path">>, <<>>)), - ?assertEqual(<<"ws://localhost:443/path">>, - hixie76_location(tcp, <<"localhost">>, 443, <<"/path">>, <<>>)), - ?assertEqual(<<"ws://localhost:8080/path">>, - hixie76_location(tcp, <<"localhost">>, 8080, <<"/path">>, <<>>)), - ?assertEqual(<<"ws://localhost:8080/path?dummy=2785">>, - hixie76_location(tcp, <<"localhost">>, 8080, <<"/path">>, <<"dummy=2785">>)), - ?assertEqual(<<"wss://localhost/path">>, - hixie76_location(ssl, <<"localhost">>, 443, <<"/path">>, <<>>)), - ?assertEqual(<<"wss://localhost:8443/path">>, - hixie76_location(ssl, <<"localhost">>, 8443, <<"/path">>, <<>>)), - ?assertEqual(<<"wss://localhost:8443/path?dummy=2785">>, - hixie76_location(ssl, <<"localhost">>, 8443, <<"/path">>, <<"dummy=2785">>)), - ok. - --endif. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_websocket_handler.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_websocket_handler.erl deleted file mode 100644 index 2ea0a46..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_http_websocket_handler.erl +++ /dev/null @@ -1,60 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @doc Handler for HTTP WebSocket requests. -%% -%% WebSocket handlers must implement four callbacks: websocket_init/3, -%% websocket_handle/3, websocket_info/3 and -%% websocket_terminate/3. These callbacks will only be called if the -%% connection is upgraded to WebSocket in the HTTP handler's init/3 -%% callback. They are then called in that order, although -%% websocket_handle/3 will be called for each packet received, -%% and websocket_info for each message received. -%% -%% websocket_init/3 is meant for initialization. It receives -%% information about the transport and protocol used, along with the handler -%% options from the dispatch list. You can define a request-wide state here. -%% If you are going to want to compact the request, you should probably do it -%% here. -%% -%% websocket_handle/3 receives the data from the socket. It can reply -%% something, do nothing or close the connection. -%% -%% websocket_info/3 receives messages sent to the process. It has -%% the same reply format as websocket_handle/3 described above. Note -%% that unlike in a gen_server, when websocket_info/3 -%% replies something, it is always to the socket, not to the process that -%% originated the message. -%% -%% websocket_terminate/3 is meant for cleaning up. It also receives -%% the request and the state previously defined, along with a reason for -%% termination. -%% -%% All of websocket_init/3, websocket_handle/3 and -%% websocket_info/3 can decide to hibernate the process by adding -%% an extra element to the returned tuple, containing the atom -%% hibernate. Doing so helps save memory and improve CPU usage. --module(cowboy_http_websocket_handler). - --export([behaviour_info/1]). - -%% @private --spec behaviour_info(_) - -> undefined | [{websocket_handle, 3} | {websocket_info, 3} - | {websocket_init, 3} | {websocket_terminate, 3}, ...]. -behaviour_info(callbacks) -> - [{websocket_init, 3}, {websocket_handle, 3}, - {websocket_info, 3}, {websocket_terminate, 3}]; -behaviour_info(_Other) -> - undefined. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_listener.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_listener.erl deleted file mode 100644 index 4565b31..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_listener.erl +++ /dev/null @@ -1,174 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @doc Public API for managing listeners. --module(cowboy_listener). --behaviour(gen_server). - --export([start_link/0, stop/1, - add_connection/3, move_connection/3, remove_connection/2, wait/3]). %% API. --export([init/1, handle_call/3, handle_cast/2, - handle_info/2, terminate/2, code_change/3]). %% gen_server. - --record(state, { - req_pools = [] :: [{atom(), non_neg_integer()}], - reqs_table, - queue = [] -}). - -%% API. - -%% @private -%% -%% We set the process priority to high because cowboy_listener is the central -%% gen_server in Cowboy and is used to manage all the incoming connections. -%% Setting the process priority to high ensures the connection-related code -%% will always be executed when a connection needs it, allowing Cowboy to -%% scale far beyond what it would with a normal priority. --spec start_link() -> {ok, pid()}. -start_link() -> - gen_server:start_link(?MODULE, [], [{spawn_opt, [{priority, high}]}]). - -%% @private --spec stop(pid()) -> stopped. -stop(ServerPid) -> - gen_server:call(ServerPid, stop). - -%% @doc Add a connection to the given pool in the listener. -%% -%% Pools of connections are used to restrict the maximum number of connections -%% depending on their type. By default, Cowboy add all connections to the -%% pool default. It also checks for the maximum number of connections -%% in that pool before accepting again. -%% -%% When a process managing a connection dies, the process is removed from the -%% pool. If the socket has been sent to another process, it is up to the -%% protocol code to inform the listener of the new ConnPid by removing -%% the previous and adding the new one. --spec add_connection(pid(), atom(), pid()) -> {ok, non_neg_integer()}. -add_connection(ServerPid, Pool, ConnPid) -> - gen_server:call(ServerPid, {add_connection, Pool, ConnPid}). - -%% @doc Move a connection from one pool to another. --spec move_connection(pid(), atom(), pid()) -> ok. -move_connection(ServerPid, DestPool, ConnPid) -> - gen_server:cast(ServerPid, {move_connection, DestPool, ConnPid}). - -%% @doc Remove the given connection from its pool. --spec remove_connection(pid(), pid()) -> ok. -remove_connection(ServerPid, ConnPid) -> - gen_server:cast(ServerPid, {remove_connection, ConnPid}). - -%% @doc Wait until the number of connections in the given pool gets below -%% the given threshold. -%% -%% This function will not return until the number of connections in the pool -%% gets below MaxConns. It makes use of gen_server:reply/2 -%% to make the process wait for a reply indefinitely. --spec wait(pid(), atom(), non_neg_integer()) -> ok. -wait(ServerPid, Pool, MaxConns) -> - gen_server:call(ServerPid, {wait, Pool, MaxConns}, infinity). - -%% gen_server. - -%% @private --spec init([]) -> {ok, #state{}}. -init([]) -> - ReqsTablePid = ets:new(requests_table, [set, private]), - {ok, #state{reqs_table=ReqsTablePid}}. - -%% @private --spec handle_call(_, _, State) - -> {reply, ignored, State} | {stop, normal, stopped, State}. -handle_call({add_connection, Pool, ConnPid}, _From, State=#state{ - req_pools=Pools, reqs_table=ReqsTable}) -> - MonitorRef = erlang:monitor(process, ConnPid), - {NbConnsRet, Pools2} = case lists:keyfind(Pool, 1, Pools) of - false -> - {1, [{Pool, 1}|Pools]}; - {Pool, NbConns} -> - NbConns2 = NbConns + 1, - {NbConns2, [{Pool, NbConns2}|lists:keydelete(Pool, 1, Pools)]} - end, - ets:insert(ReqsTable, {ConnPid, {MonitorRef, Pool}}), - {reply, {ok, NbConnsRet}, State#state{req_pools=Pools2}}; -handle_call({wait, Pool, MaxConns}, From, State=#state{ - req_pools=Pools, queue=Queue}) -> - case lists:keyfind(Pool, 1, Pools) of - {Pool, NbConns} when NbConns > MaxConns -> - {noreply, State#state{queue=[From|Queue]}}; - _Any -> - {reply, ok, State} - end; -handle_call(stop, _From, State) -> - {stop, normal, stopped, State}; -handle_call(_Request, _From, State) -> - {reply, ignored, State}. - -%% @private --spec handle_cast(_, State) -> {noreply, State}. -handle_cast({move_connection, DestPool, ConnPid}, State=#state{ - req_pools=Pools, reqs_table=ReqsTable}) -> - {MonitorRef, SrcPool} = ets:lookup_element(ReqsTable, ConnPid, 2), - ets:insert(ReqsTable, {ConnPid, {MonitorRef, DestPool}}), - {SrcPool, SrcNbConns} = lists:keyfind(SrcPool, 1, Pools), - DestNbConns = case lists:keyfind(DestPool, 1, Pools) of - false -> 1; - {DestPool, NbConns} -> NbConns + 1 - end, - Pools2 = lists:keydelete(SrcPool, 1, lists:keydelete(DestPool, 1, Pools)), - Pools3 = [{SrcPool, SrcNbConns - 1}, {DestPool, DestNbConns}|Pools2], - {noreply, State#state{req_pools=Pools3}}; -handle_cast({remove_connection, ConnPid}, State) -> - State2 = remove_pid(ConnPid, State), - {noreply, State2}; -handle_cast(_Msg, State) -> - {noreply, State}. - -%% @private --spec handle_info(_, State) -> {noreply, State}. -handle_info({'DOWN', _Ref, process, Pid, _Info}, State) -> - State2 = remove_pid(Pid, State), - {noreply, State2}; -handle_info(_Info, State) -> - {noreply, State}. - -%% @private --spec terminate(_, _) -> ok. -terminate(_Reason, _State) -> - ok. - -%% @private --spec code_change(_, State, _) -> {ok, State}. -code_change(_OldVsn, State, _Extra) -> - {ok, State}. - -%% Internal. - -%% @private --spec remove_pid(pid(), State) -> State. -remove_pid(Pid, State=#state{ - req_pools=Pools, reqs_table=ReqsTable, queue=Queue}) -> - {MonitorRef, Pool} = ets:lookup_element(ReqsTable, Pid, 2), - erlang:demonitor(MonitorRef, [flush]), - {Pool, NbConns} = lists:keyfind(Pool, 1, Pools), - Pools2 = [{Pool, NbConns - 1}|lists:keydelete(Pool, 1, Pools)], - ets:delete(ReqsTable, Pid), - case Queue of - [] -> - State#state{req_pools=Pools2}; - [Client|Queue2] -> - gen_server:reply(Client, ok), - State#state{req_pools=Pools2, queue=Queue2} - end. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_listener_sup.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_listener_sup.erl deleted file mode 100644 index aca2b0b..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_listener_sup.erl +++ /dev/null @@ -1,45 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @private --module(cowboy_listener_sup). --behaviour(supervisor). - --export([start_link/5]). %% API. --export([init/1]). %% supervisor. - -%% API. - --spec start_link(non_neg_integer(), module(), any(), module(), any()) - -> {ok, pid()}. -start_link(NbAcceptors, Transport, TransOpts, Protocol, ProtoOpts) -> - {ok, SupPid} = supervisor:start_link(?MODULE, []), - {ok, ListenerPid} = supervisor:start_child(SupPid, - {cowboy_listener, {cowboy_listener, start_link, []}, - permanent, 5000, worker, [cowboy_listener]}), - {ok, ReqsPid} = supervisor:start_child(SupPid, - {cowboy_requests_sup, {cowboy_requests_sup, start_link, []}, - permanent, 5000, supervisor, [cowboy_requests_sup]}), - {ok, _PoolPid} = supervisor:start_child(SupPid, - {cowboy_acceptors_sup, {cowboy_acceptors_sup, start_link, [ - NbAcceptors, Transport, TransOpts, - Protocol, ProtoOpts, ListenerPid, ReqsPid - ]}, permanent, 5000, supervisor, [cowboy_acceptors_sup]}), - {ok, SupPid}. - -%% supervisor. - --spec init([]) -> {ok, {{one_for_all, 10, 10}, []}}. -init([]) -> - {ok, {{one_for_all, 10, 10}, []}}. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_multipart.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_multipart.erl deleted file mode 100644 index 0bd123a..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_multipart.erl +++ /dev/null @@ -1,249 +0,0 @@ -%% Copyright (c) 2011, Anthony Ramine -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @doc Multipart parser. --module(cowboy_multipart). - --type part_parser() :: any(). --type parser(T) :: fun((binary()) -> T). --type more(T) :: T | {more, parser(T)}. --type part_result() :: any(). --type headers() :: any(). --type http_headers() :: [{atom() | binary(), binary()}]. --type body_cont() :: any(). --type cont(T) :: fun(() -> T). --type body_result() :: any(). --type end_of_part() :: {end_of_part, cont(more(part_result()))}. --type disposition() :: {binary(), [{binary(), binary()}]}. - --export([parser/1, content_disposition/1]). - --include_lib("eunit/include/eunit.hrl"). - -%% API. - -%% @doc Return a multipart parser for the given boundary. --spec parser(binary()) -> part_parser(). -parser(Boundary) when is_binary(Boundary) -> - fun (Bin) when is_binary(Bin) -> parse(Bin, Boundary) end. - -%% @doc Parse a content disposition. -%% @todo Parse the MIME header instead of the HTTP one. --spec content_disposition(binary()) -> disposition(). -content_disposition(Data) -> - cowboy_http:token_ci(Data, - fun (_Rest, <<>>) -> {error, badarg}; - (Rest, Disposition) -> - cowboy_http:content_type_params(Rest, - fun (Params) -> {Disposition, Params} end, []) - end). - -%% Internal. - -%% @doc Entry point of the multipart parser, skips over the preamble if any. --spec parse(binary(), binary()) -> more(part_result()). -parse(Bin, Boundary) when byte_size(Bin) >= byte_size(Boundary) + 2 -> - BoundarySize = byte_size(Boundary), - Pattern = pattern(Boundary), - case Bin of - <<"--", Boundary:BoundarySize/binary, Rest/binary>> -> - % Data starts with initial boundary, skip preamble parsing. - parse_boundary_tail(Rest, Pattern); - _ -> - % Parse preamble. - skip(Bin, Pattern) - end; -parse(Bin, Boundary) -> - % Not enough data to know if the data begins with a boundary. - more(Bin, fun (NewBin) -> parse(NewBin, Boundary) end). - --type pattern() :: {binary:cp(), non_neg_integer()}. - -%% @doc Return a compiled binary pattern with its size in bytes. -%% The pattern is the boundary prepended with "\r\n--". --spec pattern(binary()) -> pattern(). -pattern(Boundary) -> - MatchPattern = <<"\r\n--", Boundary/binary>>, - {binary:compile_pattern(MatchPattern), byte_size(MatchPattern)}. - -%% @doc Parse remaining characters of a line beginning with the boundary. -%% If followed by "--", eof is returned and parsing is finished. --spec parse_boundary_tail(binary(), pattern()) -> more(part_result()). -parse_boundary_tail(Bin, Pattern) when byte_size(Bin) >= 2 -> - case Bin of - <<"--", _Rest/binary>> -> - % Boundary is followed by "--", end parsing. - eof; - _ -> - % No dash after boundary, proceed with unknown chars and lwsp - % removal. - parse_boundary_eol(Bin, Pattern) - end; -parse_boundary_tail(Bin, Pattern) -> - % Boundary may be followed by "--", need more data. - more(Bin, fun (NewBin) -> parse_boundary_tail(NewBin, Pattern) end). - -%% @doc Skip whitespace and unknown chars until CRLF. --spec parse_boundary_eol(binary(), pattern()) -> more(part_result()). -parse_boundary_eol(Bin, Pattern) -> - case binary:match(Bin, <<"\r\n">>) of - {CrlfStart, _Length} -> - % End of line found, remove optional whitespace. - <<_:CrlfStart/binary, Rest/binary>> = Bin, - Fun = fun (Rest2) -> parse_boundary_crlf(Rest2, Pattern) end, - cowboy_http:whitespace(Rest, Fun); - nomatch -> - % CRLF not found in the given binary. - RestStart = lists:max([byte_size(Bin) - 1, 0]), - <<_:RestStart/binary, Rest/binary>> = Bin, - more(Rest, fun (NewBin) -> parse_boundary_eol(NewBin, Pattern) end) - end. - --spec parse_boundary_crlf(binary(), pattern()) -> more(part_result()). -parse_boundary_crlf(<<"\r\n", Rest/binary>>, Pattern) -> - % The binary is at least 2 bytes long as this function is only called by - % parse_boundary_eol/3 when CRLF has been found so a more tuple will never - % be returned from here. - parse_headers(Rest, Pattern); -parse_boundary_crlf(Bin, Pattern) -> - % Unspecified behaviour here: RFC 2046 doesn't say what to do when LWSP is - % not followed directly by a new line. In this implementation it is - % considered part of the boundary so EOL needs to be searched again. - parse_boundary_eol(Bin, Pattern). - --spec parse_headers(binary(), pattern()) -> more(part_result()). -parse_headers(Bin, Pattern) -> - parse_headers(Bin, Pattern, []). - --spec parse_headers(binary(), pattern(), http_headers()) -> more(part_result()). -parse_headers(Bin, Pattern, Acc) -> - case erlang:decode_packet(httph_bin, Bin, []) of - {ok, {http_header, _, Name, _, Value}, Rest} -> - parse_headers(Rest, Pattern, [{Name, Value} | Acc]); - {ok, http_eoh, Rest} -> - Headers = lists:reverse(Acc), - {headers, Headers, fun () -> parse_body(Rest, Pattern) end}; - {ok, {http_error, _}, _} -> - % Skip malformed parts. - skip(Bin, Pattern); - {more, _} -> - more(Bin, fun (NewBin) -> parse_headers(NewBin, Pattern, Acc) end) - end. - --spec parse_body(binary(), pattern()) -> more(body_result()). -parse_body(Bin, Pattern = {P, PSize}) when byte_size(Bin) >= PSize -> - case binary:match(Bin, P) of - {0, _Length} -> - <<_:PSize/binary, Rest/binary>> = Bin, - end_of_part(Rest, Pattern); - {BoundaryStart, _Length} -> - % Boundary found, this is the latest partial body that will be - % returned for this part. - <> = Bin, - FResult = end_of_part(Rest, Pattern), - {body, PBody, fun () -> FResult end}; - nomatch -> - PartialLength = byte_size(Bin) - PSize + 1, - <> = Bin, - {body, PBody, fun () -> parse_body(Rest, Pattern) end} - end; -parse_body(Bin, Pattern) -> - more(Bin, fun (NewBin) -> parse_body(NewBin, Pattern) end). - --spec end_of_part(binary(), pattern()) -> end_of_part(). -end_of_part(Bin, Pattern) -> - {end_of_part, fun () -> parse_boundary_tail(Bin, Pattern) end}. - --spec skip(binary(), pattern()) -> more(part_result()). -skip(Bin, Pattern = {P, PSize}) -> - case binary:match(Bin, P) of - {BoundaryStart, _Length} -> - % Boundary found, proceed with parsing of the next part. - RestStart = BoundaryStart + PSize, - <<_:RestStart/binary, Rest/binary>> = Bin, - parse_boundary_tail(Rest, Pattern); - nomatch -> - % Boundary not found, need more data. - RestStart = lists:max([byte_size(Bin) - PSize + 1, 0]), - <<_:RestStart/binary, Rest/binary>> = Bin, - more(Rest, fun (NewBin) -> skip(NewBin, Pattern) end) - end. - --spec more(binary(), parser(T)) -> {more, parser(T)}. -more(<<>>, F) -> - {more, F}; -more(Bin, InnerF) -> - F = fun (NewData) when is_binary(NewData) -> - InnerF(<>) - end, - {more, F}. - -%% Tests. - --ifdef(TEST). - -multipart_test_() -> - %% {Body, Result} - Tests = [ - {<<"--boundary--">>, []}, - {<<"preamble\r\n--boundary--">>, []}, - {<<"--boundary--\r\nepilogue">>, []}, - {<<"\r\n--boundary\r\nA:b\r\nC:d\r\n\r\n\r\n--boundary--">>, - [{[{<<"A">>, <<"b">>}, {<<"C">>, <<"d">>}], <<>>}]}, - { - << - "--boundary\r\nX-Name:answer\r\n\r\n42" - "\r\n--boundary\r\nServer:Cowboy\r\n\r\nIt rocks!\r\n" - "\r\n--boundary--" - >>, - [ - {[{<<"X-Name">>, <<"answer">>}], <<"42">>}, - {[{'Server', <<"Cowboy">>}], <<"It rocks!\r\n">>} - ] - } - ], - [{title(V), fun () -> R = acc_multipart(V) end} || {V, R} <- Tests]. - -acc_multipart(V) -> - acc_multipart((parser(<<"boundary">>))(V), []). - -acc_multipart({headers, Headers, Cont}, Acc) -> - acc_multipart(Cont(), [{Headers, []}|Acc]); -acc_multipart({body, Body, Cont}, [{Headers, BodyAcc}|Acc]) -> - acc_multipart(Cont(), [{Headers, [Body|BodyAcc]}|Acc]); -acc_multipart({end_of_part, Cont}, [{Headers, BodyAcc}|Acc]) -> - Body = list_to_binary(lists:reverse(BodyAcc)), - acc_multipart(Cont(), [{Headers, Body}|Acc]); -acc_multipart(eof, Acc) -> - lists:reverse(Acc). - -content_disposition_test_() -> - %% {Disposition, Result} - Tests = [ - {<<"form-data; name=id">>, {<<"form-data">>, [{<<"name">>, <<"id">>}]}}, - {<<"inline">>, {<<"inline">>, []}}, - {<<"attachment; \tfilename=brackets-slides.pdf">>, - {<<"attachment">>, [{<<"filename">>, <<"brackets-slides.pdf">>}]}} - ], - [{title(V), fun () -> R = content_disposition(V) end} || {V, R} <- Tests]. - -title(Bin) -> - Title = lists:foldl( - fun ({T, R}, V) -> re:replace(V, T, R, [global]) end, - Bin, - [{"\t", "\\\\t"}, {"\r", "\\\\r"}, {"\n", "\\\\n"}] - ), - iolist_to_binary(Title). - --endif. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_protocol.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_protocol.erl deleted file mode 100644 index 34bb1a1..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_protocol.erl +++ /dev/null @@ -1,61 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% Copyright (c) 2011, Michiel Hakvoort -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @doc Cowboy protocol. -%% -%% A Cowboy protocol must implement one callback: start_link/4. -%% -%% start_link/4 is meant for the initialization of the -%% protocol process. -%% It receives the pid to the listener's gen_server, the client socket, -%% the module name of the chosen transport and the options defined when -%% starting the listener. The start_link/4 function must follow -%% the supervisor start function specification. -%% -%% After initializing your protocol, it is recommended to call the -%% function cowboy:accept_ack/1 with the ListenerPid as argument, -%% as it will ensure Cowboy has been able to fully initialize the socket. -%% Anything you do past this point is up to you! -%% -%% If you need to change some socket options, like enabling raw mode -%% for example, you can call the Transport:setopts/2 function. -%% It is the protocol's responsability to manage the socket usage, -%% there should be no need for an user to specify that kind of options -%% while starting a listener. -%% -%% You should definitely look at the cowboy_http_protocol module for -%% a great example of fast request handling if you need to. -%% Otherwise it's probably safe to use {active, once} mode -%% and handle everything as it comes. -%% -%% Note that while you technically can run a protocol handler directly -%% as a gen_server or a gen_fsm, it's probably not a good idea, -%% as the only call you'll ever receive from Cowboy is the -%% start_link/4 call. On the other hand, feel free to write -%% a very basic protocol handler which then forwards requests to a -%% gen_server or gen_fsm. By doing so however you must take care to -%% supervise their processes as Cowboy only knows about the protocol -%% handler itself. --module(cowboy_protocol). - --export([behaviour_info/1]). - -%% @private --spec behaviour_info(_) - -> undefined | [{start_link, 4}, ...]. -behaviour_info(callbacks) -> - [{start_link, 4}]; -behaviour_info(_Other) -> - undefined. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_ssl_transport.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_ssl_transport.erl deleted file mode 100644 index bf8b1fb..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_ssl_transport.erl +++ /dev/null @@ -1,164 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @doc SSL transport API. -%% -%% Wrapper around ssl implementing the Cowboy transport API. -%% -%% This transport requires the crypto, public_key -%% and ssl applications to be started. If they aren't started, -%% it will try to start them itself before opening a port to listen. -%% Applications aren't stopped when the listening socket is closed, though. -%% -%% @see ssl --module(cowboy_ssl_transport). --export([name/0, messages/0, listen/1, accept/2, recv/3, send/2, setopts/2, - controlling_process/2, peername/1, close/1]). - -%% @doc Name of this transport API, ssl. --spec name() -> ssl. -name() -> ssl. - -%% @doc Atoms used in the process messages sent by this API. -%% -%% They identify incoming data, closed connection and errors when receiving -%% data in active mode. --spec messages() -> {ssl, ssl_closed, ssl_error}. -messages() -> {ssl, ssl_closed, ssl_error}. - -%% @doc Setup a socket to listen on the given port on the local host. -%% -%% The available options are: -%%
-%%
port
Mandatory. TCP port number to open.
-%%
backlog
Maximum length of the pending connections queue. -%% Defaults to 1024.
-%%
ip
Interface to listen on. Listen on all interfaces -%% by default.
-%%
certfile
Mandatory. Path to a file containing the user's -%% certificate.
-%%
keyfile
Mandatory. Path to the file containing the user's -%% private PEM encoded key.
-%%
cacertfile
Optional. Path to file containing PEM encoded -%% CA certificates (trusted certificates used for verifying a peer -%% certificate).
-%%
password
Mandatory. String containing the user's password. -%% All private keyfiles must be password protected currently.
-%%
-%% -%% @see ssl:listen/2 -%% @todo The password option shouldn't be mandatory. --spec listen([{port, inet:ip_port()} | {certfile, string()} - | {keyfile, string()} | {password, string()} - | {cacertfile, string()} | {ip, inet:ip_address()}]) - -> {ok, ssl:sslsocket()} | {error, atom()}. -listen(Opts) -> - require([crypto, public_key, ssl]), - {port, Port} = lists:keyfind(port, 1, Opts), - Backlog = proplists:get_value(backlog, Opts, 1024), - {certfile, CertFile} = lists:keyfind(certfile, 1, Opts), - {keyfile, KeyFile} = lists:keyfind(keyfile, 1, Opts), - {password, Password} = lists:keyfind(password, 1, Opts), - ListenOpts0 = [binary, {active, false}, - {backlog, Backlog}, {packet, raw}, {reuseaddr, true}, - {certfile, CertFile}, {keyfile, KeyFile}, {password, Password}], - ListenOpts1 = - case lists:keyfind(ip, 1, Opts) of - false -> ListenOpts0; - Ip -> [Ip|ListenOpts0] - end, - ListenOpts = - case lists:keyfind(cacertfile, 1, Opts) of - false -> ListenOpts1; - CACertFile -> [CACertFile|ListenOpts1] - end, - ssl:listen(Port, ListenOpts). - -%% @doc Accept an incoming connection on a listen socket. -%% -%% Note that this function does both the transport accept and -%% the SSL handshake. -%% -%% @see ssl:transport_accept/2 -%% @see ssl:ssl_accept/2 --spec accept(ssl:sslsocket(), timeout()) - -> {ok, ssl:sslsocket()} | {error, closed | timeout | atom()}. -accept(LSocket, Timeout) -> - case ssl:transport_accept(LSocket, Timeout) of - {ok, CSocket} -> - ssl_accept(CSocket, Timeout); - {error, Reason} -> - {error, Reason} - end. - -%% @doc Receive a packet from a socket in passive mode. -%% @see ssl:recv/3 --spec recv(ssl:sslsocket(), non_neg_integer(), timeout()) - -> {ok, any()} | {error, closed | atom()}. -recv(Socket, Length, Timeout) -> - ssl:recv(Socket, Length, Timeout). - -%% @doc Send a packet on a socket. -%% @see ssl:send/2 --spec send(ssl:sslsocket(), iolist()) -> ok | {error, atom()}. -send(Socket, Packet) -> - ssl:send(Socket, Packet). - -%% @doc Set one or more options for a socket. -%% @see ssl:setopts/2 --spec setopts(ssl:sslsocket(), list()) -> ok | {error, atom()}. -setopts(Socket, Opts) -> - ssl:setopts(Socket, Opts). - -%% @doc Assign a new controlling process Pid to Socket. -%% @see ssl:controlling_process/2 --spec controlling_process(ssl:sslsocket(), pid()) - -> ok | {error, closed | not_owner | atom()}. -controlling_process(Socket, Pid) -> - ssl:controlling_process(Socket, Pid). - -%% @doc Return the address and port for the other end of a connection. -%% @see ssl:peername/1 --spec peername(ssl:sslsocket()) - -> {ok, {inet:ip_address(), inet:ip_port()}} | {error, atom()}. -peername(Socket) -> - ssl:peername(Socket). - -%% @doc Close a TCP socket. -%% @see ssl:close/1 --spec close(ssl:sslsocket()) -> ok. -close(Socket) -> - ssl:close(Socket). - -%% Internal. - --spec require(list(module())) -> ok. -require([]) -> - ok; -require([App|Tail]) -> - case application:start(App) of - ok -> ok; - {error, {already_started, App}} -> ok - end, - require(Tail). - --spec ssl_accept(ssl:sslsocket(), timeout()) - -> {ok, ssl:sslsocket()} | {error, closed | timeout | atom()}. -ssl_accept(Socket, Timeout) -> - case ssl:ssl_accept(Socket, Timeout) of - ok -> - {ok, Socket}; - {error, Reason} -> - {error, Reason} - end. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_tcp_transport.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_tcp_transport.erl deleted file mode 100644 index c1dad62..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/src/cowboy_tcp_transport.erl +++ /dev/null @@ -1,106 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -%% @doc TCP transport API. -%% -%% Wrapper around gen_tcp implementing the Cowboy transport API. -%% -%% @see gen_tcp --module(cowboy_tcp_transport). - --export([name/0, messages/0, listen/1, accept/2, recv/3, send/2, setopts/2, - controlling_process/2, peername/1, close/1]). - -%% @doc Name of this transport API, tcp. --spec name() -> tcp. -name() -> tcp. - -%% @doc Atoms used in the process messages sent by this API. -%% -%% They identify incoming data, closed connection and errors when receiving -%% data in active mode. --spec messages() -> {tcp, tcp_closed, tcp_error}. -messages() -> {tcp, tcp_closed, tcp_error}. - -%% @doc Setup a socket to listen on the given port on the local host. -%% -%% The available options are: -%%
-%%
port
Mandatory. TCP port number to open.
-%%
backlog
Maximum length of the pending connections queue. -%% Defaults to 1024.
-%%
ip
Interface to listen on. Listen on all interfaces -%% by default.
-%%
-%% -%% @see gen_tcp:listen/2 --spec listen([{port, inet:ip_port()} | {ip, inet:ip_address()}]) - -> {ok, inet:socket()} | {error, atom()}. -listen(Opts) -> - {port, Port} = lists:keyfind(port, 1, Opts), - Backlog = proplists:get_value(backlog, Opts, 1024), - ListenOpts0 = [binary, {active, false}, - {backlog, Backlog}, {packet, raw}, {reuseaddr, true}], - ListenOpts = - case lists:keyfind(ip, 1, Opts) of - false -> ListenOpts0; - Ip -> [Ip|ListenOpts0] - end, - gen_tcp:listen(Port, ListenOpts). - -%% @doc Accept an incoming connection on a listen socket. -%% @see gen_tcp:accept/2 --spec accept(inet:socket(), timeout()) - -> {ok, inet:socket()} | {error, closed | timeout | atom()}. -accept(LSocket, Timeout) -> - gen_tcp:accept(LSocket, Timeout). - -%% @doc Receive a packet from a socket in passive mode. -%% @see gen_tcp:recv/3 --spec recv(inet:socket(), non_neg_integer(), timeout()) - -> {ok, any()} | {error, closed | atom()}. -recv(Socket, Length, Timeout) -> - gen_tcp:recv(Socket, Length, Timeout). - -%% @doc Send a packet on a socket. -%% @see gen_tcp:send/2 --spec send(inet:socket(), iolist()) -> ok | {error, atom()}. -send(Socket, Packet) -> - gen_tcp:send(Socket, Packet). - -%% @doc Set one or more options for a socket. -%% @see inet:setopts/2 --spec setopts(inet:socket(), list()) -> ok | {error, atom()}. -setopts(Socket, Opts) -> - inet:setopts(Socket, Opts). - -%% @doc Assign a new controlling process Pid to Socket. -%% @see gen_tcp:controlling_process/2 --spec controlling_process(inet:socket(), pid()) - -> ok | {error, closed | not_owner | atom()}. -controlling_process(Socket, Pid) -> - gen_tcp:controlling_process(Socket, Pid). - -%% @doc Return the address and port for the other end of a connection. -%% @see inet:peername/1 --spec peername(inet:socket()) - -> {ok, {inet:ip_address(), inet:ip_port()}} | {error, atom()}. -peername(Socket) -> - inet:peername(Socket). - -%% @doc Close a TCP socket. -%% @see gen_tcp:close/1 --spec close(inet:socket()) -> ok. -close(Socket) -> - gen_tcp:close(Socket). diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/chunked_handler.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/chunked_handler.erl deleted file mode 100644 index d246d51..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/chunked_handler.erl +++ /dev/null @@ -1,17 +0,0 @@ -%% Feel free to use, reuse and abuse the code in this file. - --module(chunked_handler). --behaviour(cowboy_http_handler). --export([init/3, handle/2, terminate/2]). - -init({_Transport, http}, Req, _Opts) -> - {ok, Req, undefined}. - -handle(Req, State) -> - {ok, Req2} = cowboy_http_req:chunked_reply(200, Req), - cowboy_http_req:chunk("chunked_handler\r\n", Req2), - cowboy_http_req:chunk("works fine!", Req2), - {ok, Req2, State}. - -terminate(_Req, _State) -> - ok. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/dispatcher_prop.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/dispatcher_prop.erl deleted file mode 100644 index b6a1c92..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/dispatcher_prop.erl +++ /dev/null @@ -1,68 +0,0 @@ -%% Copyright (c) 2011, Magnus Klaar -%% Copyright (c) 2011, Loïc Hoguin -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - --module(dispatcher_prop). --include_lib("proper/include/proper.hrl"). - -%% Generators. - -hostname_head_char() -> - oneof([choose($a, $z), choose($A, $Z), choose($0, $9)]). - -hostname_char() -> - oneof([choose($a, $z), choose($A, $Z), choose($0, $9), $-]). - -hostname_label() -> - ?SUCHTHAT(Label, [hostname_head_char()|list(hostname_char())], - length(Label) < 64). - -hostname() -> - ?SUCHTHAT(Hostname, - ?LET(Labels, list(hostname_label()), string:join(Labels, ".")), - length(Hostname) > 0 andalso length(Hostname) =< 255). - -port_number() -> - choose(1, 16#ffff). - -port_str() -> - oneof(["", ?LET(Port, port_number(), ":" ++ integer_to_list(Port))]). - -server() -> - ?LET({Hostname, PortStr}, {hostname(), port_str()}, - list_to_binary(Hostname ++ PortStr)). - -%% Properties. - -prop_split_host_symmetric() -> - ?FORALL(Server, server(), - begin case cowboy_dispatcher:split_host(Server) of - {Tokens, RawHost, undefined} -> - (Server == RawHost) and (Server == binary_join(Tokens, ".")); - {Tokens, RawHost, Port} -> - PortBin = (list_to_binary(":" ++ integer_to_list(Port))), - (Server == << RawHost/binary, PortBin/binary >>) - and (Server == << (binary_join(Tokens, "."))/binary, - PortBin/binary >>) - end end). - -%% Internal. - -%% Contributed by MononcQc on #erlounge. -binary_join(Flowers, Leaf) -> - case Flowers of - [] -> <<>>; - [Petal|Pot] -> iolist_to_binary( - [Petal | [[Leaf | Pollen] || Pollen <- Pot]]) - end. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE.erl deleted file mode 100644 index bad91a8..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE.erl +++ /dev/null @@ -1,613 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% Copyright (c) 2011, Anthony Ramine -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - --module(http_SUITE). - --include_lib("common_test/include/ct.hrl"). - --export([all/0, groups/0, init_per_suite/1, end_per_suite/1, - init_per_group/2, end_per_group/2]). %% ct. --export([chunked_response/1, headers_dupe/1, headers_huge/1, - keepalive_nl/1, max_keepalive/1, nc_rand/1, nc_zero/1, - pipeline/1, raw/1, set_resp_header/1, set_resp_overwrite/1, - set_resp_body/1, stream_body_set_resp/1, response_as_req/1, - static_mimetypes_function/1, static_attribute_etag/1, - static_function_etag/1, multipart/1]). %% http. --export([http_200/1, http_404/1, handler_errors/1, - file_200/1, file_403/1, dir_403/1, file_404/1, - file_400/1]). %% http and https. --export([http_10_hostless/1]). %% misc. --export([rest_simple/1, rest_keepalive/1, rest_keepalive_post/1]). %% rest. - -%% ct. - -all() -> - [{group, http}, {group, https}, {group, misc}, {group, rest}]. - -groups() -> - BaseTests = [http_200, http_404, handler_errors, - file_200, file_403, dir_403, file_404, file_400], - [{http, [], [chunked_response, headers_dupe, headers_huge, - keepalive_nl, max_keepalive, nc_rand, nc_zero, pipeline, raw, - set_resp_header, set_resp_overwrite, - set_resp_body, response_as_req, stream_body_set_resp, - static_mimetypes_function, static_attribute_etag, - static_function_etag, multipart] ++ BaseTests}, - {https, [], BaseTests}, - {misc, [], [http_10_hostless]}, - {rest, [], [rest_simple, rest_keepalive, rest_keepalive_post]}]. - -init_per_suite(Config) -> - application:start(inets), - application:start(cowboy), - Config. - -end_per_suite(_Config) -> - application:stop(cowboy), - application:stop(inets), - ok. - -init_per_group(http, Config) -> - Port = 33080, - Config1 = init_static_dir(Config), - cowboy:start_listener(http, 100, - cowboy_tcp_transport, [{port, Port}], - cowboy_http_protocol, [{max_keepalive, 50}, - {dispatch, init_http_dispatch(Config1)}] - ), - [{scheme, "http"}, {port, Port}|Config1]; -init_per_group(https, Config) -> - Port = 33081, - Config1 = init_static_dir(Config), - application:start(crypto), - application:start(public_key), - application:start(ssl), - DataDir = ?config(data_dir, Config), - cowboy:start_listener(https, 100, - cowboy_ssl_transport, [ - {port, Port}, {certfile, DataDir ++ "cert.pem"}, - {keyfile, DataDir ++ "key.pem"}, {password, "cowboy"}], - cowboy_http_protocol, [{dispatch, init_https_dispatch(Config1)}] - ), - [{scheme, "https"}, {port, Port}|Config1]; -init_per_group(misc, Config) -> - Port = 33082, - cowboy:start_listener(misc, 100, - cowboy_tcp_transport, [{port, Port}], - cowboy_http_protocol, [{dispatch, [{'_', [ - {[], http_handler, []} - ]}]}]), - [{port, Port}|Config]; -init_per_group(rest, Config) -> - Port = 33083, - cowboy:start_listener(reset, 100, - cowboy_tcp_transport, [{port, Port}], - cowboy_http_protocol, [{dispatch, [{'_', [ - {[<<"simple">>], rest_simple_resource, []}, - {[<<"forbidden_post">>], rest_forbidden_resource, [true]}, - {[<<"simple_post">>], rest_forbidden_resource, [false]} - ]}]}]), - [{port, Port}|Config]. - -end_per_group(https, Config) -> - cowboy:stop_listener(https), - application:stop(ssl), - application:stop(public_key), - application:stop(crypto), - end_static_dir(Config), - ok; -end_per_group(http, Config) -> - cowboy:stop_listener(http), - end_static_dir(Config); -end_per_group(Listener, _Config) -> - cowboy:stop_listener(Listener), - ok. - -%% Dispatch configuration. - -init_http_dispatch(Config) -> - [ - {[<<"localhost">>], [ - {[<<"chunked_response">>], chunked_handler, []}, - {[<<"init_shutdown">>], http_handler_init_shutdown, []}, - {[<<"long_polling">>], http_handler_long_polling, []}, - {[<<"headers">>, <<"dupe">>], http_handler, - [{headers, [{<<"Connection">>, <<"close">>}]}]}, - {[<<"set_resp">>, <<"header">>], http_handler_set_resp, - [{headers, [{<<"Vary">>, <<"Accept">>}]}]}, - {[<<"set_resp">>, <<"overwrite">>], http_handler_set_resp, - [{headers, [{<<"Server">>, <<"DesireDrive/1.0">>}]}]}, - {[<<"set_resp">>, <<"body">>], http_handler_set_resp, - [{body, <<"A flameless dance does not equal a cycle">>}]}, - {[<<"stream_body">>, <<"set_resp">>], http_handler_stream_body, - [{reply, set_resp}, {body, <<"stream_body_set_resp">>}]}, - {[<<"static">>, '...'], cowboy_http_static, - [{directory, ?config(static_dir, Config)}, - {mimetypes, [{<<".css">>, [<<"text/css">>]}]}]}, - {[<<"static_mimetypes_function">>, '...'], cowboy_http_static, - [{directory, ?config(static_dir, Config)}, - {mimetypes, {fun(Path, data) when is_binary(Path) -> - [<<"text/html">>] end, data}}]}, - {[<<"handler_errors">>], http_handler_errors, []}, - {[<<"static_attribute_etag">>, '...'], cowboy_http_static, - [{directory, ?config(static_dir, Config)}, - {etag, {attributes, [filepath, filesize, inode, mtime]}}]}, - {[<<"static_function_etag">>, '...'], cowboy_http_static, - [{directory, ?config(static_dir, Config)}, - {etag, {fun static_function_etag/2, etag_data}}]}, - {[<<"multipart">>], http_handler_multipart, []}, - {[], http_handler, []} - ]} - ]. - -init_https_dispatch(Config) -> - init_http_dispatch(Config). - - -init_static_dir(Config) -> - Dir = filename:join(?config(priv_dir, Config), "static"), - Level1 = fun(Name) -> filename:join(Dir, Name) end, - ok = file:make_dir(Dir), - ok = file:write_file(Level1("test_file"), "test_file\n"), - ok = file:write_file(Level1("test_file.css"), "test_file.css\n"), - ok = file:write_file(Level1("test_noread"), "test_noread\n"), - ok = file:change_mode(Level1("test_noread"), 8#0333), - ok = file:write_file(Level1("test.html"), "test.html\n"), - ok = file:make_dir(Level1("test_dir")), - [{static_dir, Dir}|Config]. - -end_static_dir(Config) -> - Dir = ?config(static_dir, Config), - Level1 = fun(Name) -> filename:join(Dir, Name) end, - ok = file:delete(Level1("test_file")), - ok = file:delete(Level1("test_file.css")), - ok = file:delete(Level1("test_noread")), - ok = file:delete(Level1("test.html")), - ok = file:del_dir(Level1("test_dir")), - ok = file:del_dir(Dir), - Config. - -%% http. - -chunked_response(Config) -> - {ok, {{"HTTP/1.1", 200, "OK"}, _Headers, "chunked_handler\r\nworks fine!"}} = - httpc:request(build_url("/chunked_response", Config)). - -headers_dupe(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = gen_tcp:send(Socket, "GET /headers/dupe HTTP/1.1\r\n" - "Host: localhost\r\nConnection: keep-alive\r\n\r\n"), - {ok, Data} = gen_tcp:recv(Socket, 0, 6000), - {_Start, _Length} = binary:match(Data, <<"Connection: close">>), - nomatch = binary:match(Data, <<"Connection: keep-alive">>), - {error, closed} = gen_tcp:recv(Socket, 0, 1000). - -headers_huge(Config) -> - Cookie = lists:flatten(["whatever_man_biiiiiiiiiiiig_cookie_me_want_77=" - "Wed Apr 06 2011 10:38:52 GMT-0500 (CDT)" || _N <- lists:seq(1, 40)]), - {_Packet, 200} = raw_req(["GET / HTTP/1.0\r\nHost: localhost\r\n" - "Set-Cookie: ", Cookie, "\r\n\r\n"], Config). - -keepalive_nl(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = keepalive_nl_loop(Socket, 10), - ok = gen_tcp:close(Socket). - -keepalive_nl_loop(_Socket, 0) -> - ok; -keepalive_nl_loop(Socket, N) -> - ok = gen_tcp:send(Socket, "GET / HTTP/1.1\r\n" - "Host: localhost\r\nConnection: keep-alive\r\n\r\n"), - {ok, Data} = gen_tcp:recv(Socket, 0, 6000), - {0, 12} = binary:match(Data, <<"HTTP/1.1 200">>), - nomatch = binary:match(Data, <<"Connection: close">>), - ok = gen_tcp:send(Socket, "\r\n"), %% extra nl - keepalive_nl_loop(Socket, N - 1). - -max_keepalive(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = max_keepalive_loop(Socket, 50), - {error, closed} = gen_tcp:recv(Socket, 0, 1000). - -max_keepalive_loop(_Socket, 0) -> - ok; -max_keepalive_loop(Socket, N) -> - ok = gen_tcp:send(Socket, "GET / HTTP/1.1\r\n" - "Host: localhost\r\nConnection: keep-alive\r\n\r\n"), - {ok, Data} = gen_tcp:recv(Socket, 0, 6000), - {0, 12} = binary:match(Data, <<"HTTP/1.1 200">>), - case N of - 1 -> {_, _} = binary:match(Data, <<"Connection: close">>); - N -> nomatch = binary:match(Data, <<"Connection: close">>) - end, - keepalive_nl_loop(Socket, N - 1). - -multipart(Config) -> - Url = build_url("/multipart", Config), - Body = << - "This is a preamble." - "\r\n--OHai\r\nX-Name:answer\r\n\r\n42" - "\r\n--OHai\r\nServer:Cowboy\r\n\r\nIt rocks!\r\n" - "\r\n--OHai--" - "This is an epiloque." - >>, - Request = {Url, [], "multipart/x-makes-no-sense; boundary=OHai", Body}, - {ok, {{"HTTP/1.1", 200, "OK"}, _Headers, Response}} = - httpc:request(post, Request, [], [{body_format, binary}]), - Parts = binary_to_term(Response), - Parts = [ - {[{<<"X-Name">>, <<"answer">>}], <<"42">>}, - {[{'Server', <<"Cowboy">>}], <<"It rocks!\r\n">>} - ]. - -nc_rand(Config) -> - nc_reqs(Config, "/dev/urandom"). - -nc_zero(Config) -> - nc_reqs(Config, "/dev/zero"). - -nc_reqs(Config, Input) -> - Cat = os:find_executable("cat"), - Nc = os:find_executable("nc"), - case {Cat, Nc} of - {false, _} -> - {skip, {notfound, cat}}; - {_, false} -> - {skip, {notfound, nc}}; - _Good -> - %% Throw garbage at the server then check if it's still up. - {port, Port} = lists:keyfind(port, 1, Config), - [nc_run_req(Port, Input) || _N <- lists:seq(1, 100)], - Packet = "GET / HTTP/1.0\r\nHost: localhost\r\n\r\n", - {Packet, 200} = raw_req(Packet, Config) - end. - -nc_run_req(Port, Input) -> - os:cmd("cat " ++ Input ++ " | nc localhost " ++ integer_to_list(Port)). - -pipeline(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = gen_tcp:send(Socket, - "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: keep-alive\r\n\r\n" - "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: keep-alive\r\n\r\n" - "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: keep-alive\r\n\r\n" - "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: keep-alive\r\n\r\n" - "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n"), - Data = pipeline_recv(Socket, <<>>), - Reqs = binary:split(Data, << "\r\n\r\nhttp_handler" >>, [global, trim]), - 5 = length(Reqs), - pipeline_check(Reqs). - -pipeline_check([]) -> - ok; -pipeline_check([Req|Tail]) -> - << "HTTP/1.1 200", _Rest/bits >> = Req, - pipeline_check(Tail). - -pipeline_recv(Socket, SoFar) -> - case gen_tcp:recv(Socket, 0, 6000) of - {ok, Data} -> - pipeline_recv(Socket, << SoFar/binary, Data/binary >>); - {error, closed} -> - ok = gen_tcp:close(Socket), - SoFar - end. - -raw_req(Packet, Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = gen_tcp:send(Socket, Packet), - Res = case gen_tcp:recv(Socket, 0, 6000) of - {ok, << "HTTP/1.1 ", Str:24/bits, _Rest/bits >>} -> - list_to_integer(binary_to_list(Str)); - {error, Reason} -> - Reason - end, - gen_tcp:close(Socket), - {Packet, Res}. - -%% Send a raw request. Return the response code and the full response. -raw_resp(Request, Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - Transport = case ?config(scheme, Config) of - "http" -> gen_tcp; - "https" -> ssl - end, - {ok, Socket} = Transport:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = Transport:send(Socket, Request), - {StatusCode, Response} = case recv_loop(Transport, Socket, <<>>) of - {ok, << "HTTP/1.1 ", Str:24/bits, _Rest/bits >> = Bin} -> - {list_to_integer(binary_to_list(Str)), Bin}; - {ok, Bin} -> - {badresp, Bin}; - {error, Reason} -> - {Reason, <<>>} - end, - Transport:close(Socket), - {Response, StatusCode}. - -recv_loop(Transport, Socket, Acc) -> - case Transport:recv(Socket, 0, 6000) of - {ok, Data} -> - recv_loop(Transport, Socket, <>); - {error, closed} -> - ok = Transport:close(Socket), - {ok, Acc}; - {error, Reason} -> - {error, Reason} - end. - - - -raw(Config) -> - Huge = [$0 || _N <- lists:seq(1, 5000)], - Tests = [ - {"\r\n\r\n\r\n\r\n\r\nGET / HTTP/1.1\r\nHost: localhost\r\n\r\n", 200}, - {"\n", 400}, - {"Garbage\r\n\r\n", 400}, - {"\r\n\r\n\r\n\r\n\r\n\r\n", 400}, - {"GET / HTTP/1.1\r\nHost: dev-extend.eu\r\n\r\n", 400}, - {"", closed}, - {"\r\n", closed}, - {"\r\n\r\n", closed}, - {"GET / HTTP/1.1", closed}, - {"GET / HTTP/1.1\r\n", 408}, - {"GET / HTTP/1.1\r\nHost: localhost", 408}, - {"GET / HTTP/1.1\r\nHost: localhost\r\n", 408}, - {"GET / HTTP/1.1\r\nHost: localhost\r\n\r", 408}, - {"GET http://localhost/ HTTP/1.1\r\n\r\n", 501}, - {"GET / HTTP/1.2\r\nHost: localhost\r\n\r\n", 505}, - {"GET /init_shutdown HTTP/1.1\r\nHost: localhost\r\n\r\n", 666}, - {"GET /long_polling HTTP/1.1\r\nHost: localhost\r\n\r\n", 102}, - {Huge, 413}, - {"GET / HTTP/1.1\r\n" ++ Huge, 413} - ], - [{Packet, StatusCode} = raw_req(Packet, Config) - || {Packet, StatusCode} <- Tests]. - -set_resp_header(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = gen_tcp:send(Socket, "GET /set_resp/header HTTP/1.1\r\n" - "Host: localhost\r\nConnection: close\r\n\r\n"), - {ok, Data} = gen_tcp:recv(Socket, 0, 6000), - {_, _} = binary:match(Data, <<"Vary: Accept">>), - {_, _} = binary:match(Data, <<"Set-Cookie: ">>). - -set_resp_overwrite(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = gen_tcp:send(Socket, "GET /set_resp/overwrite HTTP/1.1\r\n" - "Host: localhost\r\nConnection: close\r\n\r\n"), - {ok, Data} = gen_tcp:recv(Socket, 0, 6000), - {_Start, _Length} = binary:match(Data, <<"Server: DesireDrive/1.0">>). - -set_resp_body(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = gen_tcp:send(Socket, "GET /set_resp/body HTTP/1.1\r\n" - "Host: localhost\r\nConnection: close\r\n\r\n"), - {ok, Data} = gen_tcp:recv(Socket, 0, 6000), - {_Start, _Length} = binary:match(Data, <<"\r\n\r\n" - "A flameless dance does not equal a cycle">>). - -response_as_req(Config) -> - Packet = -"HTTP/1.0 302 Found -Location: http://www.google.co.il/ -Cache-Control: private -Content-Type: text/html; charset=UTF-8 -Set-Cookie: PREF=ID=568f67013d4a7afa:FF=0:TM=1323014101:LM=1323014101:S=XqctDWC65MzKT0zC; expires=Tue, 03-Dec-2013 15:55:01 GMT; path=/; domain=.google.com -Date: Sun, 04 Dec 2011 15:55:01 GMT -Server: gws -Content-Length: 221 -X-XSS-Protection: 1; mode=block -X-Frame-Options: SAMEORIGIN - - -302 Moved -

302 Moved

-The document has moved -here. -", - {Packet, 400} = raw_req(Packet, Config). - -stream_body_set_resp(Config) -> - {Packet, 200} = raw_resp( - "GET /stream_body/set_resp HTTP/1.1\r\n" - "Host: localhost\r\nConnection: close\r\n\r\n", Config), - {_Start, _Length} = binary:match(Packet, <<"stream_body_set_resp">>). - -static_mimetypes_function(Config) -> - TestURL = build_url("/static_mimetypes_function/test.html", Config), - {ok, {{"HTTP/1.1", 200, "OK"}, Headers1, "test.html\n"}} = - httpc:request(TestURL), - "text/html" = ?config("content-type", Headers1). - -handler_errors(Config) -> - Request = fun(Case) -> - raw_resp(["GET /handler_errors?case=", Case, " HTTP/1.1\r\n", - "Host: localhost\r\n\r\n"], Config) end, - - {_Packet1, 500} = Request("init_before_reply"), - - {Packet2, 200} = Request("init_after_reply"), - nomatch = binary:match(Packet2, <<"HTTP/1.1 500">>), - - {Packet3, 200} = Request("init_reply_handle_error"), - nomatch = binary:match(Packet3, <<"HTTP/1.1 500">>), - - {_Packet4, 500} = Request("handle_before_reply"), - - {Packet5, 200} = Request("handle_after_reply"), - nomatch = binary:match(Packet5, <<"HTTP/1.1 500">>), - - {Packet6, 200} = raw_resp([ - "GET / HTTP/1.1\r\n", - "Host: localhost\r\n", - "Connection: keep-alive\r\n\r\n", - "GET /handler_errors?case=handle_after_reply\r\n", - "Host: localhost\r\n\r\n"], Config), - nomatch = binary:match(Packet6, <<"HTTP/1.1 500">>), - - {Packet7, 200} = raw_resp([ - "GET / HTTP/1.1\r\n", - "Host: localhost\r\n", - "Connection: keep-alive\r\n\r\n", - "GET /handler_errors?case=handle_before_reply HTTP/1.1\r\n", - "Host: localhost\r\n\r\n"], Config), - {{_, _}, _} = {binary:match(Packet7, <<"HTTP/1.1 500">>), Packet7}, - - done. - -static_attribute_etag(Config) -> - TestURL = build_url("/static_attribute_etag/test.html", Config), - {ok, {{"HTTP/1.1", 200, "OK"}, Headers1, "test.html\n"}} = - httpc:request(TestURL), - false = ?config("etag", Headers1) =:= undefined, - {ok, {{"HTTP/1.1", 200, "OK"}, Headers2, "test.html\n"}} = - httpc:request(TestURL), - true = ?config("etag", Headers1) =:= ?config("etag", Headers2). - -static_function_etag(Config) -> - TestURL = build_url("/static_function_etag/test.html", Config), - {ok, {{"HTTP/1.1", 200, "OK"}, Headers1, "test.html\n"}} = - httpc:request(TestURL), - false = ?config("etag", Headers1) =:= undefined, - {ok, {{"HTTP/1.1", 200, "OK"}, Headers2, "test.html\n"}} = - httpc:request(TestURL), - true = ?config("etag", Headers1) =:= ?config("etag", Headers2). - -static_function_etag(Arguments, etag_data) -> - {_, Filepath} = lists:keyfind(filepath, 1, Arguments), - {_, _Filesize} = lists:keyfind(filesize, 1, Arguments), - {_, _INode} = lists:keyfind(inode, 1, Arguments), - {_, _Modified} = lists:keyfind(mtime, 1, Arguments), - ChecksumCommand = lists:flatten(io_lib:format("sha1sum ~s", [Filepath])), - [Checksum|_] = string:tokens(os:cmd(ChecksumCommand), " "), - iolist_to_binary(Checksum). - -%% http and https. - -build_url(Path, Config) -> - {scheme, Scheme} = lists:keyfind(scheme, 1, Config), - {port, Port} = lists:keyfind(port, 1, Config), - Scheme ++ "://localhost:" ++ integer_to_list(Port) ++ Path. - -http_200(Config) -> - {ok, {{"HTTP/1.1", 200, "OK"}, _Headers, "http_handler"}} = - httpc:request(build_url("/", Config)). - -http_404(Config) -> - {ok, {{"HTTP/1.1", 404, "Not Found"}, _Headers, _Body}} = - httpc:request(build_url("/not/found", Config)). - -file_200(Config) -> - {ok, {{"HTTP/1.1", 200, "OK"}, Headers, "test_file\n"}} = - httpc:request(build_url("/static/test_file", Config)), - "application/octet-stream" = ?config("content-type", Headers), - - {ok, {{"HTTP/1.1", 200, "OK"}, Headers1, "test_file.css\n"}} = - httpc:request(build_url("/static/test_file.css", Config)), - "text/css" = ?config("content-type", Headers1). - -file_403(Config) -> - {ok, {{"HTTP/1.1", 403, "Forbidden"}, _Headers, _Body}} = - httpc:request(build_url("/static/test_noread", Config)). - -dir_403(Config) -> - {ok, {{"HTTP/1.1", 403, "Forbidden"}, _Headers, _Body}} = - httpc:request(build_url("/static/test_dir", Config)), - {ok, {{"HTTP/1.1", 403, "Forbidden"}, _Headers, _Body}} = - httpc:request(build_url("/static/test_dir/", Config)). - -file_404(Config) -> - {ok, {{"HTTP/1.1", 404, "Not Found"}, _Headers, _Body}} = - httpc:request(build_url("/static/not_found", Config)). - -file_400(Config) -> - {ok, {{"HTTP/1.1", 400, "Bad Request"}, _Headers, _Body}} = - httpc:request(build_url("/static/%2f", Config)), - {ok, {{"HTTP/1.1", 400, "Bad Request"}, _Headers1, _Body1}} = - httpc:request(build_url("/static/%2e", Config)), - {ok, {{"HTTP/1.1", 400, "Bad Request"}, _Headers2, _Body2}} = - httpc:request(build_url("/static/%2e%2e", Config)). -%% misc. - -http_10_hostless(Config) -> - Packet = "GET / HTTP/1.0\r\n\r\n", - {Packet, 200} = raw_req(Packet, Config). - -%% rest. - -rest_simple(Config) -> - Packet = "GET /simple HTTP/1.1\r\nHost: localhost\r\n\r\n", - {Packet, 200} = raw_req(Packet, Config). - -rest_keepalive(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = rest_keepalive_loop(Socket, 100), - ok = gen_tcp:close(Socket). - -rest_keepalive_loop(_Socket, 0) -> - ok; -rest_keepalive_loop(Socket, N) -> - ok = gen_tcp:send(Socket, "GET /simple HTTP/1.1\r\n" - "Host: localhost\r\nConnection: keep-alive\r\n\r\n"), - {ok, Data} = gen_tcp:recv(Socket, 0, 6000), - {0, 12} = binary:match(Data, <<"HTTP/1.1 200">>), - nomatch = binary:match(Data, <<"Connection: close">>), - rest_keepalive_loop(Socket, N - 1). - -rest_keepalive_post(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = rest_keepalive_post_loop(Socket, 10, forbidden_post), - ok = gen_tcp:close(Socket). - -rest_keepalive_post_loop(_Socket, 0, _) -> - ok; -rest_keepalive_post_loop(Socket, N, simple_post) -> - ok = gen_tcp:send(Socket, "POST /simple_post HTTP/1.1\r\n" - "Host: localhost\r\nConnection: keep-alive\r\n" - "Content-Length: 5\r\nContent-Type: text/plain\r\n\r\n12345"), - {ok, Data} = gen_tcp:recv(Socket, 0, 6000), - {0, 12} = binary:match(Data, <<"HTTP/1.1 303">>), - nomatch = binary:match(Data, <<"Connection: close">>), - rest_keepalive_post_loop(Socket, N - 1, forbidden_post); -rest_keepalive_post_loop(Socket, N, forbidden_post) -> - ok = gen_tcp:send(Socket, "POST /forbidden_post HTTP/1.1\r\n" - "Host: localhost\r\nConnection: keep-alive\r\n" - "Content-Length: 5\r\nContent-Type: text/plain\r\n\r\n12345"), - {ok, Data} = gen_tcp:recv(Socket, 0, 6000), - {0, 12} = binary:match(Data, <<"HTTP/1.1 403">>), - nomatch = binary:match(Data, <<"Connection: close">>), - rest_keepalive_post_loop(Socket, N - 1, simple_post). diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE_data/cert.pem b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE_data/cert.pem deleted file mode 100644 index a772007..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE_data/cert.pem +++ /dev/null @@ -1,14 +0,0 @@ ------BEGIN CERTIFICATE----- -MIICKTCCAZICCQCl9gdHk5NqUjANBgkqhkiG9w0BAQUFADBZMQswCQYDVQQGEwJB -VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0 -cyBQdHkgTHRkMRIwEAYDVQQDDAlsb2NhbGhvc3QwHhcNMTEwNDA4MTMxNTE3WhcN -MTEwNTA4MTMxNTE3WjBZMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0 -ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMRIwEAYDVQQDDAls -b2NhbGhvc3QwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAOjgFPS0dP4d8F1e -bNJPB+kAjM2FyTZGmkFCLUYONTPrdGOUIHL/UOGtU22BQzlskE+a6/j2Kg72tm8x -4X7yf+6s7CdRe086idNx9+GymZ64ZTnly33rD3AJffbBeWHwT2e9fuBeFk9WGC8v -kqECFZyqf7+znS0o48oBNcx3ePB5AgMBAAEwDQYJKoZIhvcNAQEFBQADgYEASTkv -oHuZyO8DgT8bIE6W3yM2fvlNshkhh7Thgpf32qQoVOxRU9EF0KpuJCCAHQHQNQlI -nf9Zc4UzOrLhxZBGocNhkkn4WLw2ysto/7+/+9xHah0M0l4auHLQagVLCoOsHUn2 -JX+A2NrbvuX5wnUrZGOdgY70tvMBeU/xLtp3af8= ------END CERTIFICATE----- diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE_data/key.pem b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE_data/key.pem deleted file mode 100644 index 0b699cc..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_SUITE_data/key.pem +++ /dev/null @@ -1,18 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -Proc-Type: 4,ENCRYPTED -DEK-Info: DES-EDE3-CBC,F11262DB77BB804C - -jOJ+ft/dihIxz7CTuuK47fCTGdX7xMLANmA7mRg8y9OYhNZQiCz5GjcWLqe0NNl5 -qXPW0uvT/9B5O9o21Y2i/CKU1BqRLuXHXDsjHg7RGaSH6wIavWt+lR+I1sjieFbX -VByK1KHXjEU704DEILKJIA9gVzoYAgMzo+FTw2e/2jusXntxk8HXyF5zKTzjHBtI -NQGweJqTmfZjX3SgPP4Co/ShrA6fUG0uTp1HwbByJnwtAeT3xWJrAD4QSn7+qrlv -3qmEIqVXsvLrfZRY1WZ4uIsbLK8wkvxboSIoIK55VV9R2zRbwQULon6QJwKYujAr -J2WUYkHHQOMpaAzUmalaT+8GUt8/A1oSK4BdiSZywsMMm46/hDadXBzFg+dPL5g2 -Td+7/L0S6tUVWq4+YBp5EalZH6VQ4cqPYDJZUZ9xt6+yY7V5748lSdA7cHCROnbG -bKbSW9WbF7MPDHCjvCAfq+s1dafHJgyIOlMg2bm7V8eHWAA0xKQ/o7i5EyEyaKYR -UXGeAf+KfXcclEZ77v2RCXZvd6ceWkifm59qWv/3TCYaHiS2Aa3lVToMKTwYzzXQ -p5X5os6wv3IAi2nGyAIOoSDisdHmFteZNXNQsw0n3XCAYfsNMk+r5/r5YqDffURH -c8SMOCP4BIPoZ/abi/gnEntGqsx1YALg0aosHwHGDJ/l+QJC6u6PZk310YzRw4GL -K9+wscFgEub2OO+R83Vkfesj4tYzgOjab7+92a/soHdW0zhGejlvehODOgNZ6NUG -MPQlT+qpF9Jh5IThYXupXXFzJzQe3O/qVXy89m69JGa+AWRvbu+M/A== ------END RSA PRIVATE KEY----- diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_init_shutdown.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_init_shutdown.erl deleted file mode 100644 index ac63b44..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_init_shutdown.erl +++ /dev/null @@ -1,17 +0,0 @@ -%% Feel free to use, reuse and abuse the code in this file. - --module(http_handler_init_shutdown). --behaviour(cowboy_http_handler). --export([init/3, handle/2, terminate/2]). - -init({_Transport, http}, Req, _Opts) -> - {ok, Req2} = cowboy_http_req:reply(<<"666 Init Shutdown Testing">>, - [{'Connection', <<"close">>}], Req), - {shutdown, Req2, undefined}. - -handle(Req, State) -> - {ok, Req2} = cowboy_http_req:reply(200, [], "Hello world!", Req), - {ok, Req2, State}. - -terminate(_Req, _State) -> - ok. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_long_polling.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_long_polling.erl deleted file mode 100644 index e838619..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_long_polling.erl +++ /dev/null @@ -1,22 +0,0 @@ -%% Feel free to use, reuse and abuse the code in this file. - --module(http_handler_long_polling). --behaviour(cowboy_http_handler). --export([init/3, handle/2, info/3, terminate/2]). - -init({_Transport, http}, Req, _Opts) -> - erlang:send_after(500, self(), timeout), - {loop, Req, 9, 5000, hibernate}. - -handle(_Req, _State) -> - exit(badarg). - -info(timeout, Req, 0) -> - {ok, Req2} = cowboy_http_req:reply(102, Req), - {ok, Req2, 0}; -info(timeout, Req, State) -> - erlang:send_after(500, self(), timeout), - {loop, Req, State - 1, hibernate}. - -terminate(_Req, _State) -> - ok. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_multipart.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_multipart.erl deleted file mode 100644 index f5f7919..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_multipart.erl +++ /dev/null @@ -1,29 +0,0 @@ -%% Feel free to use, reuse and abuse the code in this file. - --module(http_handler_multipart). --behaviour(cowboy_http_handler). --export([init/3, handle/2, terminate/2]). - -init({_Transport, http}, Req, []) -> - {ok, Req, {}}. - -handle(Req, State) -> - {Result, Req2} = acc_multipart(Req, []), - {ok, Req3} = cowboy_http_req:reply(200, [], term_to_binary(Result), Req2), - {ok, Req3, State}. - -terminate(_Req, _State) -> - ok. - -acc_multipart(Req, Acc) -> - {Result, Req2} = cowboy_http_req:multipart_data(Req), - acc_multipart(Req2, Acc, Result). - -acc_multipart(Req, Acc, {headers, Headers}) -> - acc_multipart(Req, [{Headers, []}|Acc]); -acc_multipart(Req, [{Headers, BodyAcc}|Acc], {body, Data}) -> - acc_multipart(Req, [{Headers, [Data|BodyAcc]}|Acc]); -acc_multipart(Req, [{Headers, BodyAcc}|Acc], end_of_part) -> - acc_multipart(Req, [{Headers, list_to_binary(lists:reverse(BodyAcc))}|Acc]); -acc_multipart(Req, Acc, eof) -> - {lists:reverse(Acc), Req}. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_set_resp.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_set_resp.erl deleted file mode 100644 index 83d48c0..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_set_resp.erl +++ /dev/null @@ -1,33 +0,0 @@ -%% Feel free to use, reuse and abuse the code in this file. - --module(http_handler_set_resp). --behaviour(cowboy_http_handler). --export([init/3, handle/2, terminate/2]). - -init({_Transport, http}, Req, Opts) -> - Headers = proplists:get_value(headers, Opts, []), - Body = proplists:get_value(body, Opts, <<"http_handler_set_resp">>), - {ok, Req2} = lists:foldl(fun({Name, Value}, {ok, R}) -> - cowboy_http_req:set_resp_header(Name, Value, R) - end, {ok, Req}, Headers), - {ok, Req3} = cowboy_http_req:set_resp_body(Body, Req2), - {ok, Req4} = cowboy_http_req:set_resp_header( - <<"X-Cowboy-Test">>, <<"ok">>, Req3), - {ok, Req5} = cowboy_http_req:set_resp_cookie( - <<"cake">>, <<"lie">>, [], Req4), - {ok, Req5, undefined}. - -handle(Req, State) -> - case cowboy_http_req:has_resp_header(<<"X-Cowboy-Test">>, Req) of - false -> {ok, Req, State}; - true -> - case cowboy_http_req:has_resp_body(Req) of - false -> {ok, Req, State}; - true -> - {ok, Req2} = cowboy_http_req:reply(200, Req), - {ok, Req2, State} - end - end. - -terminate(_Req, _State) -> - ok. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_stream_body.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_stream_body.erl deleted file mode 100644 index c90f746..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/http_handler_stream_body.erl +++ /dev/null @@ -1,24 +0,0 @@ -%% Feel free to use, reuse and abuse the code in this file. - --module(http_handler_stream_body). --behaviour(cowboy_http_handler). --export([init/3, handle/2, terminate/2]). - --record(state, {headers, body, reply}). - -init({_Transport, http}, Req, Opts) -> - Headers = proplists:get_value(headers, Opts, []), - Body = proplists:get_value(body, Opts, "http_handler_stream_body"), - Reply = proplists:get_value(reply, Opts), - {ok, Req, #state{headers=Headers, body=Body, reply=Reply}}. - -handle(Req, State=#state{headers=_Headers, body=Body, reply=set_resp}) -> - {ok, Transport, Socket} = cowboy_http_req:transport(Req), - SFun = fun() -> Transport:send(Socket, Body), sent end, - SLen = iolist_size(Body), - {ok, Req2} = cowboy_http_req:set_resp_body_fun(SLen, SFun, Req), - {ok, Req3} = cowboy_http_req:reply(200, Req2), - {ok, Req3, State}. - -terminate(_Req, _State) -> - ok. diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/ws_SUITE.erl b/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/ws_SUITE.erl deleted file mode 100644 index 136833f..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/cowboy-git/test/ws_SUITE.erl +++ /dev/null @@ -1,318 +0,0 @@ -%% Copyright (c) 2011, Loïc Hoguin -%% -%% Permission to use, copy, modify, and/or distribute this software for any -%% purpose with or without fee is hereby granted, provided that the above -%% copyright notice and this permission notice appear in all copies. -%% -%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - --module(ws_SUITE). - --include_lib("common_test/include/ct.hrl"). - --export([all/0, groups/0, init_per_suite/1, end_per_suite/1, - init_per_group/2, end_per_group/2]). %% ct. --export([ws0/1, ws8/1, ws8_single_bytes/1, ws8_init_shutdown/1, - ws13/1, ws_timeout_hibernate/1]). %% ws. - -%% ct. - -all() -> - [{group, ws}]. - -groups() -> - BaseTests = [ws0, ws8, ws8_single_bytes, ws8_init_shutdown, ws13, - ws_timeout_hibernate], - [{ws, [], BaseTests}]. - -init_per_suite(Config) -> - application:start(inets), - application:start(cowboy), - Config. - -end_per_suite(_Config) -> - application:stop(cowboy), - application:stop(inets), - ok. - -init_per_group(ws, Config) -> - Port = 33080, - cowboy:start_listener(ws, 100, - cowboy_tcp_transport, [{port, Port}], - cowboy_http_protocol, [{dispatch, init_dispatch()}] - ), - [{port, Port}|Config]. - -end_per_group(Listener, _Config) -> - cowboy:stop_listener(Listener), - ok. - -%% Dispatch configuration. - -init_dispatch() -> - [ - {[<<"localhost">>], [ - {[<<"websocket">>], websocket_handler, []}, - {[<<"ws_timeout_hibernate">>], ws_timeout_hibernate_handler, []}, - {[<<"ws_init_shutdown">>], websocket_handler_init_shutdown, []} - ]} - ]. - -%% ws and wss. - -%% This test makes sure the code works even if we wait for a reply -%% before sending the third challenge key in the GET body. -%% -%% This ensures that Cowboy will work fine with proxies on hixie. -ws0(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = gen_tcp:send(Socket, - "GET /websocket HTTP/1.1\r\n" - "Host: localhost\r\n" - "Connection: Upgrade\r\n" - "Upgrade: WebSocket\r\n" - "Origin: http://localhost\r\n" - "Sec-Websocket-Key1: Y\" 4 1Lj!957b8@0H756!i\r\n" - "Sec-Websocket-Key2: 1711 M;4\\74 80<6\r\n" - "\r\n"), - {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), - {ok, {http_response, {1, 1}, 101, "WebSocket Protocol Handshake"}, Rest} - = erlang:decode_packet(http, Handshake, []), - [Headers, <<>>] = websocket_headers( - erlang:decode_packet(httph, Rest, []), []), - {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), - {'Upgrade', "WebSocket"} = lists:keyfind('Upgrade', 1, Headers), - {"sec-websocket-location", "ws://localhost/websocket"} - = lists:keyfind("sec-websocket-location", 1, Headers), - {"sec-websocket-origin", "http://localhost"} - = lists:keyfind("sec-websocket-origin", 1, Headers), - ok = gen_tcp:send(Socket, <<15,245,8,18,2,204,133,33>>), - {ok, Body} = gen_tcp:recv(Socket, 0, 6000), - <<169,244,191,103,146,33,149,59,74,104,67,5,99,118,171,236>> = Body, - ok = gen_tcp:send(Socket, << 0, "client_msg", 255 >>), - {ok, << 0, "client_msg", 255 >>} = gen_tcp:recv(Socket, 0, 6000), - {ok, << 0, "websocket_init", 255 >>} = gen_tcp:recv(Socket, 0, 6000), - {ok, << 0, "websocket_handle", 255 >>} = gen_tcp:recv(Socket, 0, 6000), - {ok, << 0, "websocket_handle", 255 >>} = gen_tcp:recv(Socket, 0, 6000), - {ok, << 0, "websocket_handle", 255 >>} = gen_tcp:recv(Socket, 0, 6000), - %% We try to send another HTTP request to make sure - %% the server closed the request. - ok = gen_tcp:send(Socket, [ - << 255, 0 >>, %% Close websocket command. - "GET / HTTP/1.1\r\nHost: localhost\r\n\r\n" %% Server should ignore it. - ]), - {ok, << 255, 0 >>} = gen_tcp:recv(Socket, 0, 6000), - {error, closed} = gen_tcp:recv(Socket, 0, 6000), - ok. - -ws8(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = gen_tcp:send(Socket, [ - "GET /websocket HTTP/1.1\r\n" - "Host: localhost\r\n" - "Connection: Upgrade\r\n" - "Upgrade: websocket\r\n" - "Sec-WebSocket-Origin: http://localhost\r\n" - "Sec-WebSocket-Version: 8\r\n" - "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" - "\r\n"]), - {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), - {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} - = erlang:decode_packet(http, Handshake, []), - [Headers, <<>>] = websocket_headers( - erlang:decode_packet(httph, Rest, []), []), - {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), - {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), - {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} - = lists:keyfind("sec-websocket-accept", 1, Headers), - ok = gen_tcp:send(Socket, << 16#81, 16#85, 16#37, 16#fa, 16#21, 16#3d, - 16#7f, 16#9f, 16#4d, 16#51, 16#58 >>), - {ok, << 1:1, 0:3, 1:4, 0:1, 5:7, "Hello" >>} - = gen_tcp:recv(Socket, 0, 6000), - {ok, << 1:1, 0:3, 1:4, 0:1, 14:7, "websocket_init" >>} - = gen_tcp:recv(Socket, 0, 6000), - {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} - = gen_tcp:recv(Socket, 0, 6000), - {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} - = gen_tcp:recv(Socket, 0, 6000), - {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} - = gen_tcp:recv(Socket, 0, 6000), - ok = gen_tcp:send(Socket, << 1:1, 0:3, 9:4, 0:8 >>), %% ping - {ok, << 1:1, 0:3, 10:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), %% pong - ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 0:8 >>), %% close - {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), - {error, closed} = gen_tcp:recv(Socket, 0, 6000), - ok. - -ws8_single_bytes(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = gen_tcp:send(Socket, [ - "GET /websocket HTTP/1.1\r\n" - "Host: localhost\r\n" - "Connection: Upgrade\r\n" - "Upgrade: websocket\r\n" - "Sec-WebSocket-Origin: http://localhost\r\n" - "Sec-WebSocket-Version: 8\r\n" - "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" - "\r\n"]), - {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), - {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} - = erlang:decode_packet(http, Handshake, []), - [Headers, <<>>] = websocket_headers( - erlang:decode_packet(httph, Rest, []), []), - {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), - {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), - {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} - = lists:keyfind("sec-websocket-accept", 1, Headers), - ok = gen_tcp:send(Socket, << 16#81 >>), %% send one byte - ok = timer:sleep(100), %% sleep for a period - ok = gen_tcp:send(Socket, << 16#85 >>), %% send another and so on - ok = timer:sleep(100), - ok = gen_tcp:send(Socket, << 16#37 >>), - ok = timer:sleep(100), - ok = gen_tcp:send(Socket, << 16#fa >>), - ok = timer:sleep(100), - ok = gen_tcp:send(Socket, << 16#21 >>), - ok = timer:sleep(100), - ok = gen_tcp:send(Socket, << 16#3d >>), - ok = timer:sleep(100), - ok = gen_tcp:send(Socket, << 16#7f >>), - ok = timer:sleep(100), - ok = gen_tcp:send(Socket, << 16#9f >>), - ok = timer:sleep(100), - ok = gen_tcp:send(Socket, << 16#4d >>), - ok = timer:sleep(100), - ok = gen_tcp:send(Socket, << 16#51 >>), - ok = timer:sleep(100), - ok = gen_tcp:send(Socket, << 16#58 >>), - {ok, << 1:1, 0:3, 1:4, 0:1, 14:7, "websocket_init" >>} - = gen_tcp:recv(Socket, 0, 6000), - {ok, << 1:1, 0:3, 1:4, 0:1, 5:7, "Hello" >>} - = gen_tcp:recv(Socket, 0, 6000), - {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} - = gen_tcp:recv(Socket, 0, 6000), - {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} - = gen_tcp:recv(Socket, 0, 6000), - {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} - = gen_tcp:recv(Socket, 0, 6000), - ok = gen_tcp:send(Socket, << 1:1, 0:3, 9:4, 0:8 >>), %% ping - {ok, << 1:1, 0:3, 10:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), %% pong - ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 0:8 >>), %% close - {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), - {error, closed} = gen_tcp:recv(Socket, 0, 6000), - ok. - -ws_timeout_hibernate(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = gen_tcp:send(Socket, [ - "GET /ws_timeout_hibernate HTTP/1.1\r\n" - "Host: localhost\r\n" - "Connection: Upgrade\r\n" - "Upgrade: websocket\r\n" - "Sec-WebSocket-Origin: http://localhost\r\n" - "Sec-WebSocket-Version: 8\r\n" - "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" - "\r\n"]), - {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), - {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} - = erlang:decode_packet(http, Handshake, []), - [Headers, <<>>] = websocket_headers( - erlang:decode_packet(httph, Rest, []), []), - {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), - {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), - {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} - = lists:keyfind("sec-websocket-accept", 1, Headers), - {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), - {error, closed} = gen_tcp:recv(Socket, 0, 6000), - ok. - -ws8_init_shutdown(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = gen_tcp:send(Socket, [ - "GET /ws_init_shutdown HTTP/1.1\r\n" - "Host: localhost\r\n" - "Connection: Upgrade\r\n" - "Upgrade: websocket\r\n" - "Sec-WebSocket-Origin: http://localhost\r\n" - "Sec-WebSocket-Version: 8\r\n" - "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" - "\r\n"]), - {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), - {ok, {http_response, {1, 1}, 403, "Forbidden"}, _Rest} - = erlang:decode_packet(http, Handshake, []), - {error, closed} = gen_tcp:recv(Socket, 0, 6000), - ok. - -ws13(Config) -> - {port, Port} = lists:keyfind(port, 1, Config), - {ok, Socket} = gen_tcp:connect("localhost", Port, - [binary, {active, false}, {packet, raw}]), - ok = gen_tcp:send(Socket, [ - "GET /websocket HTTP/1.1\r\n" - "Host: localhost\r\n" - "Connection: Upgrade\r\n" - "Origin: http://localhost\r\n" - "Sec-WebSocket-Version: 13\r\n" - "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" - "Upgrade: websocket\r\n" - "\r\n"]), - {ok, Handshake} = gen_tcp:recv(Socket, 0, 6000), - {ok, {http_response, {1, 1}, 101, "Switching Protocols"}, Rest} - = erlang:decode_packet(http, Handshake, []), - [Headers, <<>>] = websocket_headers( - erlang:decode_packet(httph, Rest, []), []), - {'Connection', "Upgrade"} = lists:keyfind('Connection', 1, Headers), - {'Upgrade', "websocket"} = lists:keyfind('Upgrade', 1, Headers), - {"sec-websocket-accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="} - = lists:keyfind("sec-websocket-accept", 1, Headers), - %% text - ok = gen_tcp:send(Socket, << 16#81, 16#85, 16#37, 16#fa, 16#21, 16#3d, - 16#7f, 16#9f, 16#4d, 16#51, 16#58 >>), - {ok, << 1:1, 0:3, 1:4, 0:1, 5:7, "Hello" >>} - = gen_tcp:recv(Socket, 0, 6000), - %% binary (empty) - ok = gen_tcp:send(Socket, << 1:1, 0:3, 2:4, 0:8 >>), - {ok, << 1:1, 0:3, 2:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), - %% binary - ok = gen_tcp:send(Socket, << 16#82, 16#85, 16#37, 16#fa, 16#21, 16#3d, - 16#7f, 16#9f, 16#4d, 16#51, 16#58 >>), - {ok, << 1:1, 0:3, 2:4, 0:1, 5:7, "Hello" >>} - = gen_tcp:recv(Socket, 0, 6000), - %% Receives. - {ok, << 1:1, 0:3, 1:4, 0:1, 14:7, "websocket_init" >>} - = gen_tcp:recv(Socket, 0, 6000), - {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} - = gen_tcp:recv(Socket, 0, 6000), - {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} - = gen_tcp:recv(Socket, 0, 6000), - {ok, << 1:1, 0:3, 1:4, 0:1, 16:7, "websocket_handle" >>} - = gen_tcp:recv(Socket, 0, 6000), - ok = gen_tcp:send(Socket, << 1:1, 0:3, 9:4, 0:8 >>), %% ping - {ok, << 1:1, 0:3, 10:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), %% pong - ok = gen_tcp:send(Socket, << 1:1, 0:3, 8:4, 0:8 >>), %% close - {ok, << 1:1, 0:3, 8:4, 0:8 >>} = gen_tcp:recv(Socket, 0, 6000), - {error, closed} = gen_tcp:recv(Socket, 0, 6000), - ok. - -websocket_headers({ok, http_eoh, Rest}, Acc) -> - [Acc, Rest]; -websocket_headers({ok, {http_header, _I, Key, _R, Value}, Rest}, Acc) -> - F = fun(S) when is_atom(S) -> S; (S) -> string:to_lower(S) end, - websocket_headers(erlang:decode_packet(httph, Rest, []), - [{F(Key), Value}|Acc]). diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/hash.mk b/rabbitmq-server/plugins-src/cowboy-wrapper/hash.mk deleted file mode 100644 index 5071907..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/hash.mk +++ /dev/null @@ -1 +0,0 @@ -UPSTREAM_SHORT_HASH:=4b93c2d diff --git a/rabbitmq-server/plugins-src/cowboy-wrapper/package.mk b/rabbitmq-server/plugins-src/cowboy-wrapper/package.mk deleted file mode 100644 index fd29da9..0000000 --- a/rabbitmq-server/plugins-src/cowboy-wrapper/package.mk +++ /dev/null @@ -1,24 +0,0 @@ -APP_NAME:=cowboy - -UPSTREAM_GIT:=https://github.com/rabbitmq/cowboy.git -UPSTREAM_REVISION:=4b93c2d19a10e5d9cee -RETAIN_ORIGINAL_VERSION:=true -WRAPPER_PATCHES:=\ - 0001-R12-fake-iodata-type.patch \ - 0002-R12-drop-all-references-to-boolean-type.patch \ - 0003-R12-drop-all-references-to-reference-type.patch \ - 0004-R12-drop-references-to-iodata-type.patch \ - 0005-R12-drop-references-to-Default-any-type.patch \ - 0006-Use-erlang-integer_to_list-and-lists-max-instead-of-.patch \ - 0007-R12-type-definitions-must-be-ordered.patch \ - 0008-sec-websocket-protocol.patch - -# Path include/http.hrl is needed during compilation -INCLUDE_DIRS+=$(CLONE_DIR) - -ORIGINAL_APP_FILE:=$(CLONE_DIR)/src/$(APP_NAME).app.src -DO_NOT_GENERATE_APP_FILE=true - -define construct_app_commands - cp $(CLONE_DIR)/LICENSE $(APP_DIR)/LICENSE-ISC-Cowboy -endef diff --git a/rabbitmq-server/plugins-src/do-package.mk b/rabbitmq-server/plugins-src/do-package.mk deleted file mode 100644 index b76c9a5..0000000 --- a/rabbitmq-server/plugins-src/do-package.mk +++ /dev/null @@ -1,578 +0,0 @@ -# This file produces the makefile fragment associated with a package. -# It includes the package's package.mk, interprets all of the -# variables that package.mk might have set, and then visits any -# dependencies of the package that have not already been visited. -# -# PACKAGE_DIR should be set to the canonical path of the package. - -# Mark that this package has been visited, so we can avoid doing it again -DONE_$(PACKAGE_DIR):=true - -# Declare the standard per-package targets -.PHONY: $(PACKAGE_DIR)+dist $(PACKAGE_DIR)+clean $(PACKAGE_DIR)+clean-recursive - -$(PACKAGE_DIR)+dist:: $(PACKAGE_DIR)/dist/.done - -$(PACKAGE_DIR)+srcdist:: $(PACKAGE_DIR)/srcdist/.done - -$(PACKAGE_DIR)+clean:: - -$(PACKAGE_DIR)+clean-with-deps:: $(PACKAGE_DIR)+clean - -# Hook into the "all package" targets used by the main public-umbrella -# makefile -all-packages:: $(PACKAGE_DIR)/dist/.done -clean-all-packages:: $(PACKAGE_DIR)+clean - -ifndef NON_INTEGRATED_$(PACKAGE_DIR) - -PACKAGE_NAME=$(notdir $(abspath $(PACKAGE_DIR))) - -# Set all the per-package vars to their default values - -# The packages upon which this package depends -DEPS:= - -# The name of the erlang application produced by the package -APP_NAME=$(call package_to_app_name,$(PACKAGE_NAME)) - -# The location of the .app file which is used as the basis for the -# .app file which goes into the .ez -ORIGINAL_APP_FILE=$(EBIN_DIR)/$(APP_NAME).app - -# The location of the source for that file (before the modules list is -# generated). Ignored if DO_NOT_GENERATE_APP_FILE is set. -ORIGINAL_APP_SOURCE=$(PACKAGE_DIR)/src/$(APP_NAME).app.src - -# Set to prevent generation of the app file. -DO_NOT_GENERATE_APP_FILE:= - -# Should the .ez files for this package, its dependencies, and its -# source distribution be included in RabbitMQ releases, and should we test -# this plugin when invoking "make test" in the umbrella? -RELEASABLE:= - -# The options to pass to erlc when compiling .erl files in this -# package -PACKAGE_ERLC_OPTS=$(ERLC_OPTS) - -# The directories containing Erlang source files -SOURCE_DIRS:=$(PACKAGE_DIR)/src - -# The Erlang source files to compile and include in the package .ez file -SOURCE_ERLS=$(strip $(foreach D,$(SOURCE_DIRS),$(wildcard $(D)/*.erl))) - -# The directories containing Erlang *.hrl files to include in the -# package .ez file. -INCLUDE_DIRS:=$(PACKAGE_DIR)/include - -# The Erlang .hrl files to include in the package .ez file. -INCLUDE_HRLS=$(strip $(foreach D,$(INCLUDE_DIRS),$(wildcard $(D)/*.hrl))) - -# The location of the directory containing the .app file. This is -# also where the .beam files produced by compiling SOURCE_ERLS will -# go. -EBIN_DIR:=$(PACKAGE_DIR)/ebin - -# The .beam files for the application. -EBIN_BEAMS=$(patsubst %,$(EBIN_DIR)/%.beam,$(notdir $(basename $(SOURCE_ERLS)))) - -# Erlang expressions which will be invoked during testing (not in the -# broker). -STANDALONE_TEST_COMMANDS:= - -# Erlang expressions which will be invoked within the broker during -# testing. -WITH_BROKER_TEST_COMMANDS:= - -# Config file to give to the test broker. -WITH_BROKER_TEST_CONFIG:= - -# Test scripts which should be invokedduring testing -STANDALONE_TEST_SCRIPTS:= - -# Test scripts which should be invoked alongside a running broker -# during testing -WITH_BROKER_TEST_SCRIPTS:= - -# Test scripts which should be invoked to configure the broker before testing -WITH_BROKER_SETUP_SCRIPTS:= - -# When cleaning, should we also remove the cloned directory for -# wrappers? -PRESERVE_CLONE_DIR?= - -# The directory within the package that contains tests -TEST_DIR=$(PACKAGE_DIR)/test - -# The directories containing .erl files for tests -TEST_SOURCE_DIRS=$(TEST_DIR)/src - -# The .erl files for tests -TEST_SOURCE_ERLS=$(strip $(foreach D,$(TEST_SOURCE_DIRS),$(wildcard $(D)/*.erl))) - -# Where to put .beam files produced by compiling TEST_SOURCE_ERLS -TEST_EBIN_DIR=$(TEST_DIR)/ebin - -# The .beam files produced by compiling TEST_SOURCE_ERLS -TEST_EBIN_BEAMS=$(patsubst %,$(TEST_EBIN_DIR)/%.beam,$(notdir $(basename $(TEST_SOURCE_ERLS)))) - -# Wrapper package variables - -# The git URL to clone from. Setting this variable marks the package -# as a wrapper package. -UPSTREAM_GIT:= - -# The Mercurial URL to clone from. Setting this variable marks the -# package as a wrapper package. -UPSTREAM_HG:= - -UPSTREAM_TYPE=$(if $(UPSTREAM_GIT),git)$(if $(UPSTREAM_HG),hg) - -# The upstream revision to clone. Leave empty for default or master -UPSTREAM_REVISION:= - -# Where to clone the upstream repository to -CLONE_DIR=$(PACKAGE_DIR)/$(patsubst %-wrapper,%,$(PACKAGE_NAME))-$(UPSTREAM_TYPE) - -# The source directories contained in the cloned repositories. These -# are appended to SOURCE_DIRS. -UPSTREAM_SOURCE_DIRS=$(CLONE_DIR)/src - -# The include directories contained in the cloned repositories. These -# are appended to INCLUDE_DIRS. -UPSTREAM_INCLUDE_DIRS=$(CLONE_DIR)/include - -# Patches to apply to the upstream codebase after cloning, if any -WRAPPER_PATCHES:= - -# The version number to assign to the build artifacts -PACKAGE_VERSION=$(VERSION) - -# Should the app version incorporate the version from the original -# .app file? -RETAIN_ORIGINAL_VERSION:= - -# The original version that should be incorporated into the package -# version if RETAIN_ORIGINAL_VERSION is set. If empty, the original -# version will be extracted from ORIGINAL_APP_FILE. -ORIGINAL_VERSION:= - -# For customising construction of the build application directory. -CONSTRUCT_APP_PREREQS:= -construct_app_commands= - -package_rules= - -# Now let the package makefile fragment do its stuff -include $(PACKAGE_DIR)/package.mk - -# package_rules provides a convenient way to force prompt expansion -# of variables, including expansion in commands that would otherwise -# be deferred. -# -# If package_rules is defined by the package makefile, we expand it -# and eval it. The point here is to get around the fact that make -# defers expansion of commands. But if we use package variables in -# targets, as we naturally want to do, deferred expansion doesn't -# work: They might have been trampled on by a later package. Because -# we expand package_rules here, references to package varialbes will -# get expanded with the values we expect. -# -# The downside is that any variable references for which expansion -# really should be deferred need to be protected by doulbing up the -# dollar. E.g., inside package_rules, you should write $$@, not $@. -# -# We use the same trick again below. -ifdef package_rules -$(eval $(package_rules)) -endif - -# Some variables used for brevity below. Packages can't set these. -APP_FILE=$(PACKAGE_DIR)/build/$(APP_NAME).app.$(PACKAGE_VERSION) -APP_DONE=$(PACKAGE_DIR)/build/app/.done.$(PACKAGE_VERSION) -APP_DIR=$(PACKAGE_DIR)/build/app/$(APP_NAME)-$(PACKAGE_VERSION) -EZ_FILE=$(PACKAGE_DIR)/dist/$(APP_NAME)-$(PACKAGE_VERSION).ez -DEPS_FILE=$(PACKAGE_DIR)/build/deps.mk - - -# Convert the DEPS package names to canonical paths -DEP_PATHS:=$(foreach DEP,$(DEPS),$(call package_to_path,$(DEP))) - -# Handle RETAIN_ORIGINAL_VERSION / ORIGINAL_VERSION -ifdef RETAIN_ORIGINAL_VERSION - -# Automatically acquire ORIGINAL_VERSION from ORIGINAL_APP_FILE -ifndef ORIGINAL_VERSION - -# The generated ORIGINAL_VERSION setting goes in build/version.mk -$(eval $(call safe_include,$(PACKAGE_DIR)/build/version.mk)) - -$(PACKAGE_DIR)/build/version.mk: $(ORIGINAL_APP_FILE) - sed -n -e 's|^.*{vsn, *"\([^"]*\)".*$$|ORIGINAL_VERSION:=\1|p' <$< >$@ - -$(APP_FILE): $(PACKAGE_DIR)/build/version.mk - -endif # ifndef ORIGINAL_VERSION - -PACKAGE_VERSION:=$(ORIGINAL_VERSION)-rmq$(VERSION) - -endif # ifdef RETAIN_ORIGINAL_VERSION - -# Handle wrapper packages -ifneq ($(UPSTREAM_TYPE),) - -SOURCE_DIRS+=$(UPSTREAM_SOURCE_DIRS) -INCLUDE_DIRS+=$(UPSTREAM_INCLUDE_DIRS) - -define package_rules - -# We use --no-backup-if-mismatch to prevent .orig files ending up in -# source builds and causing warnings on Debian if the patches have -# fuzz. -ifdef UPSTREAM_GIT -$(CLONE_DIR)/.done: - rm -rf $(CLONE_DIR) - git clone $(UPSTREAM_GIT) $(CLONE_DIR) - # Work around weird github breakage (bug 25264) - cd $(CLONE_DIR) && git pull - $(if $(UPSTREAM_REVISION),cd $(CLONE_DIR) && git checkout $(UPSTREAM_REVISION)) - $(if $(WRAPPER_PATCHES),$(foreach F,$(WRAPPER_PATCHES),patch -E -z .umbrella-orig -d $(CLONE_DIR) -p1 <$(PACKAGE_DIR)/$(F) &&) :) - find $(CLONE_DIR) -name "*.umbrella-orig" -delete - touch $$@ -endif # UPSTREAM_GIT - -ifdef UPSTREAM_HG -$(CLONE_DIR)/.done: - rm -rf $(CLONE_DIR) - hg clone -r $(or $(UPSTREAM_REVISION),default) $(UPSTREAM_HG) $(CLONE_DIR) - $(if $(WRAPPER_PATCHES),$(foreach F,$(WRAPPER_PATCHES),patch -E -z .umbrella-orig -d $(CLONE_DIR) -p1 <$(PACKAGE_DIR)/$(F) &&) :) - find $(CLONE_DIR) -name "*.umbrella-orig" -delete - touch $$@ -endif # UPSTREAM_HG - -# When we clone, we need to remake anything derived from the app file -# (e.g. build/version.mk). -$(ORIGINAL_APP_FILE): $(CLONE_DIR)/.done - -# We include the commit hash into the package version, via hash.mk -# (not in build/ because we want it to survive -# make PRESERVE_CLONE_DIR=true clean -# for obvious reasons) -$(eval $(call safe_include,$(PACKAGE_DIR)/hash.mk)) - -$(PACKAGE_DIR)/hash.mk: $(CLONE_DIR)/.done - @mkdir -p $$(@D) -ifdef UPSTREAM_GIT - echo UPSTREAM_SHORT_HASH:=`git --git-dir=$(CLONE_DIR)/.git log -n 1 HEAD | grep commit | cut -b 8-14` >$$@ -endif -ifdef UPSTREAM_HG - echo UPSTREAM_SHORT_HASH:=`hg id -R $(CLONE_DIR) -i | cut -c -7` >$$@ -endif - -$(APP_FILE): $(PACKAGE_DIR)/hash.mk - -PACKAGE_VERSION:=$(PACKAGE_VERSION)-$(UPSTREAM_TYPE)$(UPSTREAM_SHORT_HASH) - -$(PACKAGE_DIR)+clean:: - [ "x" != "x$(PRESERVE_CLONE_DIR)" ] || rm -rf $(CLONE_DIR) hash.mk -endef # package_rules -$(eval $(package_rules)) - -endif # UPSTREAM_TYPE - -# Generate a rule to compile .erl files from the directory $(1) into -# directory $(2), taking extra erlc options from $(3) -define package_source_dir_targets -$(2)/%.beam: $(1)/%.erl $(PACKAGE_DIR)/build/dep-apps/.done | $(DEPS_FILE) - @mkdir -p $$(@D) - ERL_LIBS=$(PACKAGE_DIR)/build/dep-apps $(ERLC) $(PACKAGE_ERLC_OPTS) $(foreach D,$(INCLUDE_DIRS),-I $(D)) -pa $$(@D) -o $$(@D) $(3) $$< - -endef - -$(eval $(foreach D,$(SOURCE_DIRS),$(call package_source_dir_targets,$(D),$(EBIN_DIR),))) -$(eval $(foreach D,$(TEST_SOURCE_DIRS),$(call package_source_dir_targets,$(D),$(TEST_EBIN_DIR),-pa $(EBIN_DIR)))) - -# Commands to run the broker for tests -# -# $(1): The value for RABBITMQ_SERVER_START_ARGS -# $(2): Extra env var settings when invoking the rabbitmq-server script -# $(3): Extra .ezs to copy into the plugins dir -define run_broker - rm -rf $(TEST_TMPDIR) - mkdir -p $(foreach D,log plugins $(NODENAME),$(TEST_TMPDIR)/$(D)) - cp -p $(PACKAGE_DIR)/dist/*.ez $(TEST_TMPDIR)/plugins - $(call copy,$(3),$(TEST_TMPDIR)/plugins) - rm -f $(TEST_TMPDIR)/plugins/rabbit_common*.ez - RABBITMQ_PLUGINS_DIR=$(TEST_TMPDIR)/plugins \ - RABBITMQ_ENABLED_PLUGINS_FILE=$(TEST_TMPDIR)/enabled_plugins \ - $(UMBRELLA_BASE_DIR)/rabbitmq-server/scripts/rabbitmq-plugins \ - set --offline $$$$(RABBITMQ_PLUGINS_DIR=$(TEST_TMPDIR)/plugins \ - RABBITMQ_ENABLED_PLUGINS_FILE=$(TEST_TMPDIR)/enabled_plugins \ - $(UMBRELLA_BASE_DIR)/rabbitmq-server/scripts/rabbitmq-plugins list -m | tr '\n' ' ') - MAKE="$(MAKE)" \ - RABBITMQ_PLUGINS_DIR=$(TEST_TMPDIR)/plugins \ - RABBITMQ_ENABLED_PLUGINS_FILE=$(TEST_TMPDIR)/enabled_plugins \ - RABBITMQ_LOG_BASE=$(TEST_TMPDIR)/log \ - RABBITMQ_MNESIA_BASE=$(TEST_TMPDIR)/$(NODENAME) \ - RABBITMQ_PID_FILE=$(TEST_TMPDIR)/$(NODENAME).pid \ - RABBITMQ_NODENAME=$(NODENAME) \ - RABBITMQ_SERVER_START_ARGS=$(1) \ - $(2) $(UMBRELLA_BASE_DIR)/rabbitmq-server/scripts/rabbitmq-server -endef - -# Commands to run the package's test suite -# -# $(1): Extra .ezs to copy into the plugins dir -define run_with_broker_tests -$(if $(WITH_BROKER_TEST_COMMANDS)$(WITH_BROKER_TEST_SCRIPTS),$(call run_with_broker_tests_aux,$1)) -endef - -define run_with_broker_tests_aux - $(call run_broker,'-pa $(TEST_EBIN_DIR) -coverage directories ["$(EBIN_DIR)"$(COMMA)"$(TEST_EBIN_DIR)"]',RABBITMQ_CONFIG_FILE=$(WITH_BROKER_TEST_CONFIG),$(1)) & - $(UMBRELLA_BASE_DIR)/rabbitmq-server/scripts/rabbitmqctl -n $(NODENAME) wait $(TEST_TMPDIR)/$(NODENAME).pid - echo > $(TEST_TMPDIR)/rabbit-test-output && \ - if $(foreach SCRIPT,$(WITH_BROKER_SETUP_SCRIPTS),$(SCRIPT) &&) \ - $(foreach CMD,$(WITH_BROKER_TEST_COMMANDS), \ - echo >> $(TEST_TMPDIR)/rabbit-test-output && \ - echo "$(CMD)." \ - | tee -a $(TEST_TMPDIR)/rabbit-test-output \ - | $(ERL_CALL) $(ERL_CALL_OPTS) \ - | tee -a $(TEST_TMPDIR)/rabbit-test-output \ - | egrep "{ok, (ok|passed)}" >/dev/null &&) \ - MAKE="$(MAKE)" RABBITMQ_NODENAME="$(NODENAME)" \ - $(foreach SCRIPT,$(WITH_BROKER_TEST_SCRIPTS),$(SCRIPT) &&) : ; \ - then \ - touch $(TEST_TMPDIR)/.passed ; \ - printf "\nPASSED\n" ; \ - else \ - cat $(TEST_TMPDIR)/rabbit-test-output ; \ - printf "\n\nFAILED\n" ; \ - fi - sleep 1 - echo "rabbit_misc:report_cover(), init:stop()." | $(ERL_CALL) $(ERL_CALL_OPTS) - sleep 1 - test -f $(TEST_TMPDIR)/.passed -endef - -# The targets common to all integrated packages -define package_rules - -# Put all relevant ezs into the dist dir for this package, including -# the main ez file produced by this package -# -# When the package version changes, our .ez filename will change, and -# we need to regenerate the dist directory. So the dependency needs -# to go via a stamp file that incorporates the version in its name. -# But we need a target with a fixed name for other packages to depend -# on. And it can't be a phony, as a phony will always get rebuilt. -# Hence the need for two stamp files here. -$(PACKAGE_DIR)/dist/.done: $(PACKAGE_DIR)/dist/.done.$(PACKAGE_VERSION) - touch $$@ - -$(PACKAGE_DIR)/dist/.done.$(PACKAGE_VERSION): $(PACKAGE_DIR)/build/dep-ezs/.done $(APP_DONE) - rm -rf $$(@D) - mkdir -p $$(@D) - cd $(dir $(APP_DIR)) && zip -q -r $$(abspath $(EZ_FILE)) $(notdir $(APP_DIR)) - $$(call copy,$$(wildcard $$($$@ - -ifndef DO_NOT_GENERATE_APP_FILE - -# Generate the .app file. Note that this is a separate step from above -# so that the plugin still works correctly when symlinked as a directory -$(ORIGINAL_APP_FILE): $(ORIGINAL_APP_SOURCE) $(SOURCE_ERLS) $(UMBRELLA_BASE_DIR)/generate_app - @mkdir -p $$(@D) - escript $(UMBRELLA_BASE_DIR)/generate_app $$< $$@ $(SOURCE_DIRS) - -$(PACKAGE_DIR)+clean:: - rm -f $(ORIGINAL_APP_FILE) - -endif - -# Unpack the ezs from dependency packages, so that their contents are -# accessible to erlc -$(PACKAGE_DIR)/build/dep-apps/.done: $(PACKAGE_DIR)/build/dep-ezs/.done - rm -rf $$(@D) - mkdir -p $$(@D) - @echo [elided] unzip ezs - @cd $$(@D) && $$(foreach EZ,$$(wildcard $(PACKAGE_DIR)/build/dep-ezs/*.ez),unzip -q $$(abspath $$(EZ)) &&) : - touch $$@ - -# Dependency autogeneration. This is complicated slightly by the need -# to generate a dependency file which is path-independent. -$(DEPS_FILE): $(SOURCE_ERLS) $(INCLUDE_HRLS) $(TEST_SOURCE_ERLS) - @mkdir -p $$(@D) - @echo [elided] generate deps - @$$(if $$^,echo $$(subst : ,:,$$(foreach F,$$^,$$(abspath $$(F)):)) | escript $(abspath $(UMBRELLA_BASE_DIR)/generate_deps) $$@ '$$$$(EBIN_DIR)',echo >$$@) - @echo [elided] fix test deps - @$$(foreach F,$(TEST_EBIN_BEAMS),sed -e 's|^$$$$(EBIN_DIR)/$$(notdir $$(F)):|$$$$(TEST_EBIN_DIR)/$$(notdir $$(F)):|' $$@ > $$@.tmp && mv $$@.tmp $$@ && ) : - sed -e 's|$$@|$$$$(DEPS_FILE)|' $$@ > $$@.tmp && mv $$@.tmp $$@ - -$(eval $(call safe_include,$(DEPS_FILE))) - -$(PACKAGE_DIR)/srcdist/.done: $(PACKAGE_DIR)/srcdist/.done.$(PACKAGE_VERSION) - touch $$@ - -$(PACKAGE_DIR)/srcdist/.done.$(PACKAGE_VERSION): - mkdir -p $(PACKAGE_DIR)/build/srcdist/ - rsync -a --exclude '.hg*' --exclude '.git*' --exclude 'build' $(PACKAGE_DIR) $(PACKAGE_DIR)/build/srcdist/$(APP_NAME)-$(PACKAGE_VERSION) - mkdir -p $(PACKAGE_DIR)/srcdist/ - tar cjf $(PACKAGE_DIR)/srcdist/$(APP_NAME)-$(PACKAGE_VERSION)-src.tar.bz2 -C $(PACKAGE_DIR)/build/srcdist/ $(APP_NAME)-$(PACKAGE_VERSION) - touch $$@ - -$(PACKAGE_DIR)+clean:: - rm -rf $(EBIN_DIR)/*.beam $(TEST_EBIN_DIR)/*.beam $(PACKAGE_DIR)/dist $(PACKAGE_DIR)/srcdist $(PACKAGE_DIR)/build $(PACKAGE_DIR)/erl_crash.dump - -$(PACKAGE_DIR)+clean-with-deps:: $(foreach P,$(DEP_PATHS),$(P)+clean-with-deps) - -ifdef RELEASABLE -all-releasable:: $(PACKAGE_DIR)/dist/.done - -copy-releasable:: $(PACKAGE_DIR)/dist/.done - cp $(PACKAGE_DIR)/dist/*.ez $(PLUGINS_DIST_DIR) - -copy-srcdist:: $(PLUGINS_SRC_DIST_DIR)/$(PACKAGE_DIR)/.srcdist_done - -endif - -$(PLUGINS_SRC_DIST_DIR)/$(PACKAGE_DIR)/.srcdist_done:: $(ORIGINAL_APP_FILE) $(foreach P,$(DEP_PATHS),$(PLUGINS_SRC_DIST_DIR)/$(P)/.srcdist_done) - rsync -a --exclude '.hg*' --exclude '.git*' $(PACKAGE_DIR) $(PLUGINS_SRC_DIST_DIR)/ - [ -f $(PACKAGE_DIR)/license_info ] && cp $(PACKAGE_DIR)/license_info $(PLUGINS_SRC_DIST_DIR)/licensing/license_info_$(PACKAGE_NAME) || true - find $(PACKAGE_DIR) -maxdepth 1 -name 'LICENSE-*' -exec cp '{}' $(PLUGINS_SRC_DIST_DIR)/licensing/ \; - touch $(PLUGINS_SRC_DIST_DIR)/$(PACKAGE_DIR)/.srcdist_done - -# A hook to allow packages to verify that prerequisites are satisfied -# before running. -.PHONY: $(PACKAGE_DIR)+pre-run -$(PACKAGE_DIR)+pre-run:: - -# Run erlang with the package, its tests, and all its dependencies -# available. -.PHONY: $(PACKAGE_DIR)+run -$(PACKAGE_DIR)+run: $(PACKAGE_DIR)/dist/.done $(TEST_EBIN_BEAMS) $(PACKAGE_DIR)+pre-run - ERL_LIBS=$(PACKAGE_DIR)/dist $(ERL) $(ERL_OPTS) -pa $(TEST_EBIN_DIR) - -# Run the broker with the package, its tests, and all its dependencies -# available. -.PHONY: $(PACKAGE_DIR)+run-in-broker -$(PACKAGE_DIR)+run-in-broker: $(PACKAGE_DIR)/dist/.done $(RABBITMQ_SERVER_PATH)/dist/.done $(TEST_EBIN_BEAMS) - $(call run_broker,'-pa $(TEST_EBIN_DIR)',RABBITMQ_ALLOW_INPUT=true) - -# A hook to allow packages to verify that prerequisites are satisfied -# before running tests. -.PHONY: $(PACKAGE_DIR)+pre-test -$(PACKAGE_DIR)+pre-test:: - -# Runs the package's tests that operate within (or in conjuction with) -# a running broker. -.PHONY: $(PACKAGE_DIR)+in-broker-test -$(PACKAGE_DIR)+in-broker-test: $(PACKAGE_DIR)/dist/.done $(RABBITMQ_SERVER_PATH)/dist/.done $(TEST_EBIN_BEAMS) $(PACKAGE_DIR)+pre-test $(PACKAGE_DIR)+standalone-test $(if $(RELEASABLE),$(call chain_test,$(PACKAGE_DIR)+in-broker-test)) - $(call run_with_broker_tests) - -# Running the coverage tests requires Erlang/OTP R14. Note that -# coverage only covers the in-broker tests. -.PHONY: $(PACKAGE_DIR)+coverage -$(PACKAGE_DIR)+coverage: $(PACKAGE_DIR)/dist/.done $(COVERAGE_PATH)/dist/.done $(TEST_EBIN_BEAMS) $(PACKAGE_DIR)+pre-test - $(call run_with_broker_tests,$(COVERAGE_PATH)/dist/*.ez) - -# Runs the package's tests that don't need a running broker -.PHONY: $(PACKAGE_DIR)+standalone-test -$(PACKAGE_DIR)+standalone-test: $(PACKAGE_DIR)/dist/.done $(TEST_EBIN_BEAMS) $(PACKAGE_DIR)+pre-test $(if $(RELEASABLE),$(call chain_test,$(PACKAGE_DIR)+standalone-test)) - $$(if $(STANDALONE_TEST_COMMANDS),\ - $$(foreach CMD,$(STANDALONE_TEST_COMMANDS),\ - ERL_LIBS=$(PACKAGE_DIR)/dist $(ERL) -noinput $(ERL_OPTS) -pa $(TEST_EBIN_DIR) -sname standalone_test -eval "init:stop(case $$(CMD) of ok -> 0; passed -> 0; _Else -> 1 end)" &&\ - )\ - :) - $$(if $(STANDALONE_TEST_SCRIPTS),$$(foreach SCRIPT,$(STANDALONE_TEST_SCRIPTS),$$(SCRIPT) &&) :) - -# Run all the package's tests -.PHONY: $(PACKAGE_DIR)+test -$(PACKAGE_DIR)+test:: $(PACKAGE_DIR)+standalone-test $(PACKAGE_DIR)+in-broker-test - -.PHONY: $(PACKAGE_DIR)+check-xref -$(PACKAGE_DIR)+check-xref: $(PACKAGE_DIR)/dist/.done - UNPACKDIR=$$$$(mktemp -d $(TMPDIR)/tmp.XXXXXXXXXX) && \ - for ez in $$$$(find $(PACKAGE_DIR)/dist -type f -name "*.ez"); do \ - unzip -q $$$${ez} -d $$$${UNPACKDIR}; \ - done && \ - rm -rf $$$${UNPACKDIR}/rabbit_common-* && \ - ln -sf $$$$(pwd)/$(RABBITMQ_SERVER_PATH)/ebin $$$${UNPACKDIR} && \ - OK=true && \ - { $(UMBRELLA_BASE_DIR)/check_xref $(PACKAGE_DIR) $$$${UNPACKDIR} || OK=false; } && \ - rm -rf $$$${UNPACKDIR} && \ - $$$${OK} - -check-xref-packages:: $(PACKAGE_DIR)+check-xref - -endef -$(eval $(package_rules)) - -# Recursing into dependency packages has to be the last thing we do -# because it will trample all over the per-package variables. - -# Recurse into dependency packages -$(foreach DEP_PATH,$(DEP_PATHS),$(eval $(call do_package,$(DEP_PATH)))) - -else # NON_INTEGRATED_$(PACKAGE_DIR) - -define package_rules - -# When the package version changes, our .ez filename will change, and -# we need to regenerate the dist directory. So the dependency needs -# to go via a stamp file that incorporates the version in its name. -# But we need a target with a fixed name for other packages to depend -# on. And it can't be a phony, as a phony will always get rebuilt. -# Hence the need for two stamp files here. -$(PACKAGE_DIR)/dist/.done: $(PACKAGE_DIR)/dist/.done.$(VERSION) - touch $$@ - -# Non-integrated packages (rabbitmq-server and rabbitmq-erlang-client) -# present a dilemma. We could re-make the package every time we need -# it. But that will cause a huge amount of unnecessary rebuilding. -# Or we could not worry about rebuilding non-integrated packages. -# That's good for those developing plugins, but not for those who want -# to work on the broker and erlang client in the context of the -# plugins. So instead, we use a conservative approximation to the -# dependency structure within the package, to tell when to re-run the -# makefile. -$(PACKAGE_DIR)/dist/.done.$(VERSION): $(PACKAGE_DIR)/Makefile $(wildcard $(PACKAGE_DIR)/*.mk) $(wildcard $(PACKAGE_DIR)/src/*.erl) $(wildcard $(PACKAGE_DIR)/include/*.hrl) $(wildcard $(PACKAGE_DIR)/*.py) $(foreach DEP,$(NON_INTEGRATED_DEPS_$(PACKAGE_DIR)),$(call package_to_path,$(DEP))/dist/.done) - rm -rf $$(@D) - $$(MAKE) -C $(PACKAGE_DIR) - mkdir -p $$(@D) - touch $$@ - -# When building plugins-src we want to "make clean", but some -# non-integrated packages will not be there. Don't fall over in that case. -$(PACKAGE_DIR)+clean:: - if [ -d $(PACKAGE_DIR) ] ; then $$(MAKE) -C $(PACKAGE_DIR) clean ; fi - rm -rf $(PACKAGE_DIR)/dist - -endef -$(eval $(package_rules)) - -endif # NON_INTEGRATED_$(PACKAGE_DIR) diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/.srcdist_done b/rabbitmq-server/plugins-src/eldap-wrapper/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/Makefile b/rabbitmq-server/plugins-src/eldap-wrapper/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-appify.patch b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-appify.patch deleted file mode 100644 index 90ad3d2..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-appify.patch +++ /dev/null @@ -1,14 +0,0 @@ -diff -Naur eldap.orig/ebin/eldap.app eldap/ebin/eldap.app ---- eldap.orig/ebin/eldap.app 1970-01-01 01:00:00.000000000 +0100 -+++ eldap/ebin/eldap.app 2011-01-20 12:47:04.377399296 +0000 -@@ -0,0 +1,10 @@ -+{application, eldap, -+ [{description, "LDAP Client Library"}, -+ {vsn, "0.01"}, -+ {modules, [ -+ eldap, -+ 'ELDAPv3' -+ ]}, -+ {registered, []}, -+ {applications, [kernel, stdlib]} -+ ]}. diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/.done b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/.done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/Makefile b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/Makefile deleted file mode 100644 index f5ecba4..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/Makefile +++ /dev/null @@ -1,7 +0,0 @@ - -all: - (cd src;$(MAKE)) - -clean: - (cd src;$(MAKE) clean) - diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/README b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/README deleted file mode 100644 index e1bde9d..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/README +++ /dev/null @@ -1,33 +0,0 @@ -Hi, - -This is 'eldap', the Erlang LDAP library. - -It exports an API that can do all possible operations -you may want to do against an LDAP server. The code has -been tested to work at some point, but only the bind -and search operations are running daily in our products, -so there may be bugs lurking in some parts of the code. - -To just use eldap for doing authentication, do like in: - - {ok,X} = eldap:open(["ldap.mycorp.com"], []). - eldap:simple_bind(X, "uid=tobbe,ou=People,dc=mycorp,dc=com", "passwd"). - -In the doc/README.example you'll find a trace from a -Erlang shell session as an example on how to setup a -connection, authenticate (bind) and perform a search. -Note that by using the option {ssl, true}, you should -be able to setup an SSL tunnel (LDAPS) if your Erlang -system has been configured with SSL. - -In the test directory there are some hints and examples -on how to test the code and how to setup and populate -an OpenLDAP server. The 'eldap' code has been tested -agains OpenLDAP, IPlanet and ActiveDirectory servers. - -If you plan to incorporate this code into your system -I suggest that you build a server/supervisor harnesk -that uses 'eldap' (as we have done in our products). - -Good luck ! -/Tobbe diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/doc/README.example b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/doc/README.example deleted file mode 100644 index b96d5ef..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/doc/README.example +++ /dev/null @@ -1,44 +0,0 @@ -1> {_,S} = eldap:open(["192.168.128.47"], []). -{ok,<0.30.0>} -2> eldap:simple_bind(S,"cn=Torbjorn Tornkvist,cn=Users,dc=bluetail,dc=com","qwe123"). -ok -3> Base = {base, "dc=bluetail,dc=com"}. -{base,"dc=bluetail,dc=com"} -4> Scope = {scope, eldap:wholeSubtree()}. -{scope,wholeSubtree} -5> Filter = {filter, eldap:equalityMatch("sAMAccountName", "tobbe")}. -{filter,{equalityMatch,{'AttributeValueAssertion',"sAMAccountName","tobbe"}}} -6> Search = [Base, Scope, Filter]. -[{base,"dc=bluetail,dc=com"}, - {scope,wholeSubtree}, - {filter,{equalityMatch,{'AttributeValueAssertion',"sAMAccountName","tobbe"}}}] -7> eldap:search(S, Search). -{ok,{eldap_search_result,[{eldap_entry, - "CN=Torbjorn Tornkvist,CN=Users,DC=bluetail,DC=com", - [{"manager", - ["CN=Tord Larsson,CN=Users,DC=bluetail,DC=com"]}, - {"memberOf", - ["CN=TestGroup2,CN=Users,DC=bluetail,DC=com", - "CN=TestGroup,CN=Users,DC=bluetail,DC=com", - "CN=Pre-Windows 2000 Compatible Access,CN=Builtin,DC=bluetail,DC=com", - "CN=Server Operators,CN=Builtin,DC=bluetail,DC=com"]}, - {"accountExpires",["0"]}, - {"adminCount",["1"]}, - {"badPasswordTime",["127119104851642448"]}, - {"badPwdCount",["0"]}, - {"codePage",["0"]}, - {"cn",["Torbjorn Tornkvist"]}, - {"company",["Alteon Web Systems"]}, - {"countryCode",["0"]}, - {"department",["Bluetail"]}, - {"displayName",["Torbjorn Tornkvist"]}, - {"mail",["tobbe@bluetail.com"]}, - {"givenName",["Torbjorn"]}, - {"instanceType",["4"]}, - {"lastLogoff",["0"]}, - {"lastLogon",["127119109376267104"]}, - {"logonCount",[...]}, - {"msNPAllowDialin"|...}, - {...}|...]}], - [["ldap://bluetail.com/CN=Configuration,DC=bluetail,DC=com"]]}} -8> diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/doc/short-desc b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/doc/short-desc deleted file mode 100644 index e236da3..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/doc/short-desc +++ /dev/null @@ -1 +0,0 @@ -This is 'eldap', the Erlang LDAP library. diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/ebin/eldap.app b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/ebin/eldap.app deleted file mode 100644 index 3c4e87e..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/ebin/eldap.app +++ /dev/null @@ -1,10 +0,0 @@ -{application, eldap, - [{description, "LDAP Client Library"}, - {vsn, "0.01"}, - {modules, [ - eldap, - 'ELDAPv3' - ]}, - {registered, []}, - {applications, [kernel, stdlib]} - ]}. diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/include/eldap.hrl b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/include/eldap.hrl deleted file mode 100644 index ee5ad2f..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/include/eldap.hrl +++ /dev/null @@ -1,32 +0,0 @@ --ifndef( _ELDAP_HRL ). --define( _ELDAP_HRL , 1 ). - -%%% -%%% Search input parameters -%%% --record(eldap_search, { - base = [], % Baseobject - filter = [], % Search conditions - scope, % Search scope - attributes = [], % Attributes to be returned - types_only = false, % Return types+values or types - timeout = 0 % Timelimit for search - }). - -%%% -%%% Returned search result -%%% --record(eldap_search_result, { - entries = [], % List of #eldap_entry{} records - referrals = [] % List of referrals - }). - -%%% -%%% LDAP entry -%%% --record(eldap_entry, { - object_name = "", % The DN for the entry - attributes = [] % List of {Attribute, Value} pairs - }). - --endif. diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/ELDAPv3.asn b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/ELDAPv3.asn deleted file mode 100644 index 0cfac48..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/ELDAPv3.asn +++ /dev/null @@ -1,291 +0,0 @@ --- LDAPv3 ASN.1 specification, taken from RFC 2251 - --- Lightweight-Directory-Access-Protocol-V3 DEFINITIONS -ELDAPv3 DEFINITIONS -IMPLICIT TAGS ::= - -BEGIN - -LDAPMessage ::= SEQUENCE { - messageID MessageID, - protocolOp CHOICE { - bindRequest BindRequest, - bindResponse BindResponse, - unbindRequest UnbindRequest, - searchRequest SearchRequest, - searchResEntry SearchResultEntry, - searchResDone SearchResultDone, - searchResRef SearchResultReference, - modifyRequest ModifyRequest, - modifyResponse ModifyResponse, - addRequest AddRequest, - addResponse AddResponse, - delRequest DelRequest, - delResponse DelResponse, - modDNRequest ModifyDNRequest, - modDNResponse ModifyDNResponse, - compareRequest CompareRequest, - compareResponse CompareResponse, - abandonRequest AbandonRequest, - extendedReq ExtendedRequest, - extendedResp ExtendedResponse }, - controls [0] Controls OPTIONAL } - -MessageID ::= INTEGER (0 .. maxInt) - -maxInt INTEGER ::= 2147483647 -- (2^^31 - 1) -- - -LDAPString ::= OCTET STRING - -LDAPOID ::= OCTET STRING - -LDAPDN ::= LDAPString - -RelativeLDAPDN ::= LDAPString - -AttributeType ::= LDAPString - -AttributeDescription ::= LDAPString - - - - --- Wahl, et. al. Standards Track [Page 44] --- --- RFC 2251 LDAPv3 December 1997 - - -AttributeDescriptionList ::= SEQUENCE OF - AttributeDescription - -AttributeValue ::= OCTET STRING - -AttributeValueAssertion ::= SEQUENCE { - attributeDesc AttributeDescription, - assertionValue AssertionValue } - -AssertionValue ::= OCTET STRING - -Attribute ::= SEQUENCE { - type AttributeDescription, - vals SET OF AttributeValue } - -MatchingRuleId ::= LDAPString - -LDAPResult ::= SEQUENCE { - resultCode ENUMERATED { - success (0), - operationsError (1), - protocolError (2), - timeLimitExceeded (3), - sizeLimitExceeded (4), - compareFalse (5), - compareTrue (6), - authMethodNotSupported (7), - strongAuthRequired (8), - -- 9 reserved -- - referral (10), -- new - adminLimitExceeded (11), -- new - unavailableCriticalExtension (12), -- new - confidentialityRequired (13), -- new - saslBindInProgress (14), -- new - noSuchAttribute (16), - undefinedAttributeType (17), - inappropriateMatching (18), - constraintViolation (19), - attributeOrValueExists (20), - invalidAttributeSyntax (21), - -- 22-31 unused -- - noSuchObject (32), - aliasProblem (33), - invalidDNSyntax (34), - -- 35 reserved for undefined isLeaf -- - aliasDereferencingProblem (36), - -- 37-47 unused -- - inappropriateAuthentication (48), - --- Wahl, et. al. Standards Track [Page 45] --- --- RFC 2251 LDAPv3 December 1997 - - - invalidCredentials (49), - insufficientAccessRights (50), - busy (51), - unavailable (52), - unwillingToPerform (53), - loopDetect (54), - -- 55-63 unused -- - namingViolation (64), - objectClassViolation (65), - notAllowedOnNonLeaf (66), - notAllowedOnRDN (67), - entryAlreadyExists (68), - objectClassModsProhibited (69), - -- 70 reserved for CLDAP -- - affectsMultipleDSAs (71), -- new - -- 72-79 unused -- - other (80) }, - -- 81-90 reserved for APIs -- - matchedDN LDAPDN, - errorMessage LDAPString, - referral [3] Referral OPTIONAL } - -Referral ::= SEQUENCE OF LDAPURL - -LDAPURL ::= LDAPString -- limited to characters permitted in URLs - -Controls ::= SEQUENCE OF Control - -Control ::= SEQUENCE { - controlType LDAPOID, - criticality BOOLEAN DEFAULT FALSE, - controlValue OCTET STRING OPTIONAL } - -BindRequest ::= [APPLICATION 0] SEQUENCE { - version INTEGER (1 .. 127), - name LDAPDN, - authentication AuthenticationChoice } - -AuthenticationChoice ::= CHOICE { - simple [0] OCTET STRING, - -- 1 and 2 reserved - sasl [3] SaslCredentials } - -SaslCredentials ::= SEQUENCE { - mechanism LDAPString, - credentials OCTET STRING OPTIONAL } - -BindResponse ::= [APPLICATION 1] SEQUENCE { - --- Wahl, et. al. Standards Track [Page 46] --- --- RFC 2251 LDAPv3 December 1997 - - - COMPONENTS OF LDAPResult, - serverSaslCreds [7] OCTET STRING OPTIONAL } - -UnbindRequest ::= [APPLICATION 2] NULL - -SearchRequest ::= [APPLICATION 3] SEQUENCE { - baseObject LDAPDN, - scope ENUMERATED { - baseObject (0), - singleLevel (1), - wholeSubtree (2) }, - derefAliases ENUMERATED { - neverDerefAliases (0), - derefInSearching (1), - derefFindingBaseObj (2), - derefAlways (3) }, - sizeLimit INTEGER (0 .. maxInt), - timeLimit INTEGER (0 .. maxInt), - typesOnly BOOLEAN, - filter Filter, - attributes AttributeDescriptionList } - -Filter ::= CHOICE { - and [0] SET OF Filter, - or [1] SET OF Filter, - not [2] Filter, - equalityMatch [3] AttributeValueAssertion, - substrings [4] SubstringFilter, - greaterOrEqual [5] AttributeValueAssertion, - lessOrEqual [6] AttributeValueAssertion, - present [7] AttributeDescription, - approxMatch [8] AttributeValueAssertion, - extensibleMatch [9] MatchingRuleAssertion } - -SubstringFilter ::= SEQUENCE { - type AttributeDescription, - -- at least one must be present - substrings SEQUENCE OF CHOICE { - initial [0] LDAPString, - any [1] LDAPString, - final [2] LDAPString } } - -MatchingRuleAssertion ::= SEQUENCE { - matchingRule [1] MatchingRuleId OPTIONAL, - type [2] AttributeDescription OPTIONAL, - matchValue [3] AssertionValue, - dnAttributes [4] BOOLEAN DEFAULT FALSE } - --- Wahl, et. al. Standards Track [Page 47] --- --- RFC 2251 LDAPv3 December 1997 - -SearchResultEntry ::= [APPLICATION 4] SEQUENCE { - objectName LDAPDN, - attributes PartialAttributeList } - -PartialAttributeList ::= SEQUENCE OF SEQUENCE { - type AttributeDescription, - vals SET OF AttributeValue } - -SearchResultReference ::= [APPLICATION 19] SEQUENCE OF LDAPURL - -SearchResultDone ::= [APPLICATION 5] LDAPResult - -ModifyRequest ::= [APPLICATION 6] SEQUENCE { - object LDAPDN, - modification SEQUENCE OF SEQUENCE { - operation ENUMERATED { - add (0), - delete (1), - replace (2) }, - modification AttributeTypeAndValues } } - -AttributeTypeAndValues ::= SEQUENCE { - type AttributeDescription, - vals SET OF AttributeValue } - -ModifyResponse ::= [APPLICATION 7] LDAPResult - -AddRequest ::= [APPLICATION 8] SEQUENCE { - entry LDAPDN, - attributes AttributeList } - -AttributeList ::= SEQUENCE OF SEQUENCE { - type AttributeDescription, - vals SET OF AttributeValue } - -AddResponse ::= [APPLICATION 9] LDAPResult - -DelRequest ::= [APPLICATION 10] LDAPDN - -DelResponse ::= [APPLICATION 11] LDAPResult - -ModifyDNRequest ::= [APPLICATION 12] SEQUENCE { - entry LDAPDN, - newrdn RelativeLDAPDN, - deleteoldrdn BOOLEAN, - newSuperior [0] LDAPDN OPTIONAL } - -ModifyDNResponse ::= [APPLICATION 13] LDAPResult - --- Wahl, et. al. Standards Track [Page 48] --- --- RFC 2251 LDAPv3 December 1997 - - -CompareRequest ::= [APPLICATION 14] SEQUENCE { - entry LDAPDN, - ava AttributeValueAssertion } - -CompareResponse ::= [APPLICATION 15] LDAPResult - -AbandonRequest ::= [APPLICATION 16] MessageID - -ExtendedRequest ::= [APPLICATION 23] SEQUENCE { - requestName [0] LDAPOID, - requestValue [1] OCTET STRING OPTIONAL } - -ExtendedResponse ::= [APPLICATION 24] SEQUENCE { - COMPONENTS OF LDAPResult, - responseName [10] LDAPOID OPTIONAL, - response [11] OCTET STRING OPTIONAL } - -END - - diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/Makefile b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/Makefile deleted file mode 100644 index dc15604..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/Makefile +++ /dev/null @@ -1,26 +0,0 @@ - -ERLC = erlc -EBIN_DIR = ../ebin -ERLC_FLAGS += -I ./src -I ../include +debug_info -ERL_OBJECTS := ${EBIN_DIR}/eldap.beam ${EBIN_DIR}/ELDAPv3.beam ${EBIN_DIR}/eldap_fsm.beam - -.SUFFIXES: .asn .erl .beam - -$(EBIN_DIR)/%.beam: %.erl - $(ERLC) $(ERLC_FLAGS) -o $(EBIN_DIR) $< - -.PHONY: all -all: asn $(ERL_OBJECTS) - -.PHONY: asn -asn: ELDAPv3.erl ../ebin/ELDAPv3.beam - -ELDAPv3.erl: ELDAPv3.asn - ${ERLC} ELDAPv3.asn - mv ELDAPv3.beam ${EBIN_DIR} - -.PHONY: clean -clean: - -rm $(ERL_OBJECTS) ELDAPv3.erl ELDAPv3.asn1db ELDAPv3.hrl - - diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/eldap.erl b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/eldap.erl deleted file mode 100644 index b8422f2..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/src/eldap.erl +++ /dev/null @@ -1,1078 +0,0 @@ --module(eldap). -%%% -------------------------------------------------------------------- -%%% Created: 12 Oct 2000 by Tobbe -%%% Function: Erlang client LDAP implementation according RFC 2251,2253 -%%% and 2255. The interface is based on RFC 1823, and -%%% draft-ietf-asid-ldap-c-api-00.txt -%%% -%%% Copyright (c) 2010 Torbjorn Tornkvist -%%% See MIT-LICENSE at the top dir for licensing information. -%%% -------------------------------------------------------------------- --vc('$Id$ '). --export([open/1,open/2,simple_bind/3,controlling_process/2, - baseObject/0,singleLevel/0,wholeSubtree/0,close/1, - equalityMatch/2,greaterOrEqual/2,lessOrEqual/2, - approxMatch/2,search/2,substrings/2,present/1, - 'and'/1,'or'/1,'not'/1,modify/3, mod_add/2, mod_delete/2, - mod_replace/2, add/3, delete/2, modify_dn/5,parse_dn/1, - parse_ldap_url/1]). - --import(lists,[concat/1]). - --include("ELDAPv3.hrl"). --include("eldap.hrl"). - --define(LDAP_VERSION, 3). --define(LDAP_PORT, 389). --define(LDAPS_PORT, 636). - --record(eldap, {version = ?LDAP_VERSION, - host, % Host running LDAP server - port = ?LDAP_PORT, % The LDAP server port - fd, % Socket filedescriptor. - binddn = "", % Name of the entry to bind as - passwd, % Password for (above) entry - id = 0, % LDAP Request ID - log, % User provided log function - timeout = infinity, % Request timeout - anon_auth = false, % Allow anonymous authentication - use_tls = false % LDAP/LDAPS - }). - -%%% For debug purposes -%%-define(PRINT(S, A), io:fwrite("~w(~w): " ++ S, [?MODULE,?LINE|A])). --define(PRINT(S, A), true). - --define(elog(S, A), error_logger:info_msg("~w(~w): "++S,[?MODULE,?LINE|A])). - -%%% ==================================================================== -%%% Exported interface -%%% ==================================================================== - -%%% -------------------------------------------------------------------- -%%% open(Hosts [,Opts] ) -%%% -------------------- -%%% Setup a connection to on of the Hosts in the argument -%%% list. Stop at the first successful connection attempt. -%%% Valid Opts are: Where: -%%% -%%% {port, Port} - Port is the port number -%%% {log, F} - F(LogLevel, FormatString, ListOfArgs) -%%% {timeout, milliSec} - request timeout -%%% -%%% -------------------------------------------------------------------- -open(Hosts) -> - open(Hosts, []). - -open(Hosts, Opts) when list(Hosts), list(Opts) -> - Self = self(), - Pid = spawn_link(fun() -> init(Hosts, Opts, Self) end), - recv(Pid). - -%%% -------------------------------------------------------------------- -%%% Shutdown connection (and process) asynchronous. -%%% -------------------------------------------------------------------- - -close(Handle) when pid(Handle) -> - send(Handle, close). - -%%% -------------------------------------------------------------------- -%%% Set who we should link ourselves to -%%% -------------------------------------------------------------------- - -controlling_process(Handle, Pid) when pid(Handle),pid(Pid) -> - link(Pid), - send(Handle, {cnt_proc, Pid}), - recv(Handle). - -%%% -------------------------------------------------------------------- -%%% Authenticate ourselves to the Directory -%%% using simple authentication. -%%% -%%% Dn - The name of the entry to bind as -%%% Passwd - The password to be used -%%% -%%% Returns: ok | {error, Error} -%%% -------------------------------------------------------------------- -simple_bind(Handle, Dn, Passwd) when pid(Handle) -> - send(Handle, {simple_bind, Dn, Passwd}), - recv(Handle). - -%%% -------------------------------------------------------------------- -%%% Add an entry. The entry field MUST NOT exist for the AddRequest -%%% to succeed. The parent of the entry MUST exist. -%%% Example: -%%% -%%% add(Handle, -%%% "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com", -%%% [{"objectclass", ["person"]}, -%%% {"cn", ["Bill Valentine"]}, -%%% {"sn", ["Valentine"]}, -%%% {"telephoneNumber", ["545 555 00"]}] -%%% ) -%%% -------------------------------------------------------------------- -add(Handle, Entry, Attributes) when pid(Handle),list(Entry),list(Attributes) -> - send(Handle, {add, Entry, add_attrs(Attributes)}), - recv(Handle). - -%%% Do sanity check ! -add_attrs(Attrs) -> - F = fun({Type,Vals}) when list(Type),list(Vals) -> - %% Confused ? Me too... :-/ - {'AddRequest_attributes',Type, Vals} - end, - case catch lists:map(F, Attrs) of - {'EXIT', _} -> throw({error, attribute_values}); - Else -> Else - end. - -%%% -------------------------------------------------------------------- -%%% Delete an entry. The entry consists of the DN of -%%% the entry to be deleted. -%%% Example: -%%% -%%% delete(Handle, -%%% "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com" -%%% ) -%%% -------------------------------------------------------------------- -delete(Handle, Entry) when pid(Handle), list(Entry) -> - send(Handle, {delete, Entry}), - recv(Handle). - -%%% -------------------------------------------------------------------- -%%% Modify an entry. Given an entry a number of modification -%%% operations can be performed as one atomic operation. -%%% Example: -%%% -%%% modify(Handle, -%%% "cn=Torbjorn Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com", -%%% [replace("telephoneNumber", ["555 555 00"]), -%%% add("description", ["LDAP hacker"])] -%%% ) -%%% -------------------------------------------------------------------- -modify(Handle, Object, Mods) when pid(Handle), list(Object), list(Mods) -> - send(Handle, {modify, Object, Mods}), - recv(Handle). - -%%% -%%% Modification operations. -%%% Example: -%%% replace("telephoneNumber", ["555 555 00"]) -%%% -mod_add(Type, Values) when list(Type), list(Values) -> m(add, Type, Values). -mod_delete(Type, Values) when list(Type), list(Values) -> m(delete, Type, Values). -mod_replace(Type, Values) when list(Type), list(Values) -> m(replace, Type, Values). - -m(Operation, Type, Values) -> - #'ModifyRequest_modification_SEQOF'{ - operation = Operation, - modification = #'AttributeTypeAndValues'{ - type = Type, - vals = Values}}. - -%%% -------------------------------------------------------------------- -%%% Modify an entry. Given an entry a number of modification -%%% operations can be performed as one atomic operation. -%%% Example: -%%% -%%% modify_dn(Handle, -%%% "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com", -%%% "cn=Ben Emerson", -%%% true, -%%% "" -%%% ) -%%% -------------------------------------------------------------------- -modify_dn(Handle, Entry, NewRDN, DelOldRDN, NewSup) - when pid(Handle),list(Entry),list(NewRDN),atom(DelOldRDN),list(NewSup) -> - send(Handle, {modify_dn, Entry, NewRDN, - bool_p(DelOldRDN), optional(NewSup)}), - recv(Handle). - -%%% Sanity checks ! - -bool_p(Bool) when Bool==true;Bool==false -> Bool. - -optional([]) -> asn1_NOVALUE; -optional(Value) -> Value. - -%%% -------------------------------------------------------------------- -%%% Synchronous search of the Directory returning a -%%% requested set of attributes. -%%% -%%% Example: -%%% -%%% Filter = eldap:substrings("sn", [{any,"o"}]), -%%% eldap:search(S, [{base, "dc=bluetail, dc=com"}, -%%% {filter, Filter}, -%%% {attributes,["cn"]}])), -%%% -%%% Returned result: {ok, #eldap_search_result{}} -%%% -%%% Example: -%%% -%%% {ok,{eldap_search_result, -%%% [{eldap_entry, -%%% "cn=Magnus Froberg, dc=bluetail, dc=com", -%%% [{"cn",["Magnus Froberg"]}]}, -%%% {eldap_entry, -%%% "cn=Torbjorn Tornkvist, dc=bluetail, dc=com", -%%% [{"cn",["Torbjorn Tornkvist"]}]}], -%%% []}} -%%% -%%% -------------------------------------------------------------------- -search(Handle, A) when pid(Handle), record(A, eldap_search) -> - call_search(Handle, A); -search(Handle, L) when pid(Handle), list(L) -> - case catch parse_search_args(L) of - {error, Emsg} -> {error, Emsg}; - A when record(A, eldap_search) -> call_search(Handle, A) - end. - -call_search(Handle, A) -> - send(Handle, {search, A}), - recv(Handle). - -parse_search_args(Args) -> - parse_search_args(Args, #eldap_search{scope = wholeSubtree}). - -parse_search_args([{base, Base}|T],A) -> - parse_search_args(T,A#eldap_search{base = Base}); -parse_search_args([{filter, Filter}|T],A) -> - parse_search_args(T,A#eldap_search{filter = Filter}); -parse_search_args([{scope, Scope}|T],A) -> - parse_search_args(T,A#eldap_search{scope = Scope}); -parse_search_args([{attributes, Attrs}|T],A) -> - parse_search_args(T,A#eldap_search{attributes = Attrs}); -parse_search_args([{types_only, TypesOnly}|T],A) -> - parse_search_args(T,A#eldap_search{types_only = TypesOnly}); -parse_search_args([{timeout, Timeout}|T],A) when integer(Timeout) -> - parse_search_args(T,A#eldap_search{timeout = Timeout}); -parse_search_args([H|_],_) -> - throw({error,{unknown_arg, H}}); -parse_search_args([],A) -> - A. - -%%% -%%% The Scope parameter -%%% -baseObject() -> baseObject. -singleLevel() -> singleLevel. -wholeSubtree() -> wholeSubtree. - -%%% -%%% Boolean filter operations -%%% -'and'(ListOfFilters) when list(ListOfFilters) -> {'and',ListOfFilters}. -'or'(ListOfFilters) when list(ListOfFilters) -> {'or', ListOfFilters}. -'not'(Filter) when tuple(Filter) -> {'not',Filter}. - -%%% -%%% The following Filter parameters consist of an attribute -%%% and an attribute value. Example: F("uid","tobbe") -%%% -equalityMatch(Desc, Value) -> {equalityMatch, av_assert(Desc, Value)}. -greaterOrEqual(Desc, Value) -> {greaterOrEqual, av_assert(Desc, Value)}. -lessOrEqual(Desc, Value) -> {lessOrEqual, av_assert(Desc, Value)}. -approxMatch(Desc, Value) -> {approxMatch, av_assert(Desc, Value)}. - -av_assert(Desc, Value) -> - #'AttributeValueAssertion'{attributeDesc = Desc, - assertionValue = Value}. - -%%% -%%% Filter to check for the presence of an attribute -%%% -present(Attribute) when list(Attribute) -> - {present, Attribute}. - - -%%% -%%% A substring filter seem to be based on a pattern: -%%% -%%% InitValue*AnyValue*FinalValue -%%% -%%% where all three parts seem to be optional (at least when -%%% talking with an OpenLDAP server). Thus, the arguments -%%% to substrings/2 looks like this: -%%% -%%% Type ::= string( ) -%%% SubStr ::= listof( {initial,Value} | {any,Value}, {final,Value}) -%%% -%%% Example: substrings("sn",[{initial,"To"},{any,"kv"},{final,"st"}]) -%%% will match entries containing: 'sn: Tornkvist' -%%% -substrings(Type, SubStr) when list(Type), list(SubStr) -> - Ss = {'SubstringFilter_substrings',v_substr(SubStr)}, - {substrings,#'SubstringFilter'{type = Type, - substrings = Ss}}. - -%%% -------------------------------------------------------------------- -%%% Worker process. We keep track of a controlling process to -%%% be able to terminate together with it. -%%% -------------------------------------------------------------------- - -init(Hosts, Opts, Cpid) -> - Data = parse_args(Opts, Cpid, #eldap{}), - case try_connect(Hosts, Data) of - {ok,Data2} -> - send(Cpid, {ok,self()}), - put(req_timeout, Data#eldap.timeout), % kludge... - loop(Cpid, Data2); - Else -> - send(Cpid, Else), - unlink(Cpid), - exit(Else) - end. - -parse_args([{port, Port}|T], Cpid, Data) when integer(Port) -> - parse_args(T, Cpid, Data#eldap{port = Port}); -parse_args([{timeout, Timeout}|T], Cpid, Data) when integer(Timeout),Timeout>0 -> - parse_args(T, Cpid, Data#eldap{timeout = Timeout}); -parse_args([{anon_auth, true}|T], Cpid, Data) -> - parse_args(T, Cpid, Data#eldap{anon_auth = false}); -parse_args([{anon_auth, _}|T], Cpid, Data) -> - parse_args(T, Cpid, Data); -parse_args([{ssl, true}|T], Cpid, Data) -> - parse_args(T, Cpid, Data#eldap{use_tls = true}); -parse_args([{ssl, _}|T], Cpid, Data) -> - parse_args(T, Cpid, Data); -parse_args([{log, F}|T], Cpid, Data) when function(F) -> - parse_args(T, Cpid, Data#eldap{log = F}); -parse_args([{log, _}|T], Cpid, Data) -> - parse_args(T, Cpid, Data); -parse_args([H|_], Cpid, _) -> - send(Cpid, {error,{wrong_option,H}}), - exit(wrong_option); -parse_args([], _, Data) -> - Data. - -%%% Try to connect to the hosts in the listed order, -%%% and stop with the first one to which a successful -%%% connection is made. - -try_connect([Host|Hosts], Data) -> - TcpOpts = [{packet, asn1}, {active,false}], - case do_connect(Host, Data, TcpOpts) of - {ok,Fd} -> {ok,Data#eldap{host = Host, fd = Fd}}; - _ -> try_connect(Hosts, Data) - end; -try_connect([],_) -> - {error,"connect failed"}. - -do_connect(Host, Data, Opts) when Data#eldap.use_tls == false -> - gen_tcp:connect(Host, Data#eldap.port, Opts, Data#eldap.timeout); -do_connect(Host, Data, Opts) when Data#eldap.use_tls == true -> - ssl:connect(Host, Data#eldap.port, [{verify,0}|Opts]). - - -loop(Cpid, Data) -> - receive - - {From, {search, A}} -> - {Res,NewData} = do_search(Data, A), - send(From,Res), - loop(Cpid, NewData); - - {From, {modify, Obj, Mod}} -> - {Res,NewData} = do_modify(Data, Obj, Mod), - send(From,Res), - loop(Cpid, NewData); - - {From, {modify_dn, Obj, NewRDN, DelOldRDN, NewSup}} -> - {Res,NewData} = do_modify_dn(Data, Obj, NewRDN, DelOldRDN, NewSup), - send(From,Res), - loop(Cpid, NewData); - - {From, {add, Entry, Attrs}} -> - {Res,NewData} = do_add(Data, Entry, Attrs), - send(From,Res), - loop(Cpid, NewData); - - {From, {delete, Entry}} -> - {Res,NewData} = do_delete(Data, Entry), - send(From,Res), - loop(Cpid, NewData); - - {From, {simple_bind, Dn, Passwd}} -> - {Res,NewData} = do_simple_bind(Data, Dn, Passwd), - send(From,Res), - loop(Cpid, NewData); - - {From, {cnt_proc, NewCpid}} -> - unlink(Cpid), - send(From,ok), - ?PRINT("New Cpid is: ~p~n",[NewCpid]), - loop(NewCpid, Data); - - {From, close} -> - unlink(Cpid), - exit(closed); - - {Cpid, 'EXIT', Reason} -> - ?PRINT("Got EXIT from Cpid, reason=~p~n",[Reason]), - exit(Reason); - - _XX -> - ?PRINT("loop got: ~p~n",[_XX]), - loop(Cpid, Data) - - end. - -%%% -------------------------------------------------------------------- -%%% bindRequest -%%% -------------------------------------------------------------------- - -%%% Authenticate ourselves to the directory using -%%% simple authentication. - -do_simple_bind(Data, anon, anon) -> %% For testing - do_the_simple_bind(Data, "", ""); -do_simple_bind(Data, Dn, _Passwd) when Dn=="",Data#eldap.anon_auth==false -> - {{error,anonymous_auth},Data}; -do_simple_bind(Data, _Dn, Passwd) when Passwd=="",Data#eldap.anon_auth==false -> - {{error,anonymous_auth},Data}; -do_simple_bind(Data, Dn, Passwd) -> - do_the_simple_bind(Data, Dn, Passwd). - -do_the_simple_bind(Data, Dn, Passwd) -> - case catch exec_simple_bind(Data#eldap{binddn = Dn, - passwd = Passwd, - id = bump_id(Data)}) of - {ok,NewData} -> {ok,NewData}; - {error,Emsg} -> {{error,Emsg},Data}; - Else -> {{error,Else},Data} - end. - -exec_simple_bind(Data) -> - Req = #'BindRequest'{version = Data#eldap.version, - name = Data#eldap.binddn, - authentication = {simple, Data#eldap.passwd}}, - log2(Data, "bind request = ~p~n", [Req]), - Reply = request(Data#eldap.fd, Data, Data#eldap.id, {bindRequest, Req}), - log2(Data, "bind reply = ~p~n", [Reply]), - exec_simple_bind_reply(Data, Reply). - -exec_simple_bind_reply(Data, {ok,Msg}) when - Msg#'LDAPMessage'.messageID == Data#eldap.id -> - case Msg#'LDAPMessage'.protocolOp of - {bindResponse, Result} -> - case Result#'BindResponse'.resultCode of - success -> {ok,Data}; - Error -> {error, Error} - end; - Other -> {error, Other} - end; -exec_simple_bind_reply(_, Error) -> - {error, Error}. - - -%%% -------------------------------------------------------------------- -%%% searchRequest -%%% -------------------------------------------------------------------- - -do_search(Data, A) -> - case catch do_search_0(Data, A) of - {error,Emsg} -> {ldap_closed_p(Data, Emsg),Data}; - {'EXIT',Error} -> {ldap_closed_p(Data, Error),Data}; - {ok,Res,Ref,NewData} -> {{ok,polish(Res, Ref)},NewData}; - Else -> {ldap_closed_p(Data, Else),Data} - end. - -%%% -%%% Polish the returned search result -%%% - -polish(Res, Ref) -> - R = polish_result(Res), - %%% No special treatment of referrals at the moment. - #eldap_search_result{entries = R, - referrals = Ref}. - -polish_result([H|T]) when record(H, 'SearchResultEntry') -> - ObjectName = H#'SearchResultEntry'.objectName, - F = fun({_,A,V}) -> {A,V} end, - Attrs = lists:map(F, H#'SearchResultEntry'.attributes), - [#eldap_entry{object_name = ObjectName, - attributes = Attrs}| - polish_result(T)]; -polish_result([]) -> - []. - -do_search_0(Data, A) -> - Req = #'SearchRequest'{baseObject = A#eldap_search.base, - scope = v_scope(A#eldap_search.scope), - derefAliases = neverDerefAliases, - sizeLimit = 0, % no size limit - timeLimit = v_timeout(A#eldap_search.timeout), - typesOnly = v_bool(A#eldap_search.types_only), - filter = v_filter(A#eldap_search.filter), - attributes = v_attributes(A#eldap_search.attributes) - }, - Id = bump_id(Data), - collect_search_responses(Data#eldap{id=Id}, Req, Id). - -%%% The returned answers cames in one packet per entry -%%% mixed with possible referals - -collect_search_responses(Data, Req, ID) -> - S = Data#eldap.fd, - log2(Data, "search request = ~p~n", [Req]), - send_request(S, Data, ID, {searchRequest, Req}), - Resp = recv_response(S, Data), - log2(Data, "search reply = ~p~n", [Resp]), - collect_search_responses(Data, S, ID, Resp, [], []). - -collect_search_responses(Data, S, ID, {ok,Msg}, Acc, Ref) - when record(Msg,'LDAPMessage') -> - case Msg#'LDAPMessage'.protocolOp of - {'searchResDone',R} when R#'LDAPResult'.resultCode == success -> - log2(Data, "search reply = searchResDone ~n", []), - {ok,Acc,Ref,Data}; - {'searchResEntry',R} when record(R,'SearchResultEntry') -> - Resp = recv_response(S, Data), - log2(Data, "search reply = ~p~n", [Resp]), - collect_search_responses(Data, S, ID, Resp, [R|Acc], Ref); - {'searchResRef',R} -> - %% At the moment we don't do anyting sensible here since - %% I haven't been able to trigger the server to generate - %% a response like this. - Resp = recv_response(S, Data), - log2(Data, "search reply = ~p~n", [Resp]), - collect_search_responses(Data, S, ID, Resp, Acc, [R|Ref]); - Else -> - throw({error,Else}) - end; -collect_search_responses(_, _, _, Else, _, _) -> - throw({error,Else}). - -%%% -------------------------------------------------------------------- -%%% addRequest -%%% -------------------------------------------------------------------- - -do_add(Data, Entry, Attrs) -> - case catch do_add_0(Data, Entry, Attrs) of - {error,Emsg} -> {ldap_closed_p(Data, Emsg),Data}; - {'EXIT',Error} -> {ldap_closed_p(Data, Error),Data}; - {ok,NewData} -> {ok,NewData}; - Else -> {ldap_closed_p(Data, Else),Data} - end. - -do_add_0(Data, Entry, Attrs) -> - Req = #'AddRequest'{entry = Entry, - attributes = Attrs}, - S = Data#eldap.fd, - Id = bump_id(Data), - log2(Data, "add request = ~p~n", [Req]), - Resp = request(S, Data, Id, {addRequest, Req}), - log2(Data, "add reply = ~p~n", [Resp]), - check_reply(Data#eldap{id = Id}, Resp, addResponse). - - -%%% -------------------------------------------------------------------- -%%% deleteRequest -%%% -------------------------------------------------------------------- - -do_delete(Data, Entry) -> - case catch do_delete_0(Data, Entry) of - {error,Emsg} -> {ldap_closed_p(Data, Emsg),Data}; - {'EXIT',Error} -> {ldap_closed_p(Data, Error),Data}; - {ok,NewData} -> {ok,NewData}; - Else -> {ldap_closed_p(Data, Else),Data} - end. - -do_delete_0(Data, Entry) -> - S = Data#eldap.fd, - Id = bump_id(Data), - log2(Data, "del request = ~p~n", [Entry]), - Resp = request(S, Data, Id, {delRequest, Entry}), - log2(Data, "del reply = ~p~n", [Resp]), - check_reply(Data#eldap{id = Id}, Resp, delResponse). - - -%%% -------------------------------------------------------------------- -%%% modifyRequest -%%% -------------------------------------------------------------------- - -do_modify(Data, Obj, Mod) -> - case catch do_modify_0(Data, Obj, Mod) of - {error,Emsg} -> {ldap_closed_p(Data, Emsg),Data}; - {'EXIT',Error} -> {ldap_closed_p(Data, Error),Data}; - {ok,NewData} -> {ok,NewData}; - Else -> {ldap_closed_p(Data, Else),Data} - end. - -do_modify_0(Data, Obj, Mod) -> - v_modifications(Mod), - Req = #'ModifyRequest'{object = Obj, - modification = Mod}, - S = Data#eldap.fd, - Id = bump_id(Data), - log2(Data, "modify request = ~p~n", [Req]), - Resp = request(S, Data, Id, {modifyRequest, Req}), - log2(Data, "modify reply = ~p~n", [Resp]), - check_reply(Data#eldap{id = Id}, Resp, modifyResponse). - -%%% -------------------------------------------------------------------- -%%% modifyDNRequest -%%% -------------------------------------------------------------------- - -do_modify_dn(Data, Entry, NewRDN, DelOldRDN, NewSup) -> - case catch do_modify_dn_0(Data, Entry, NewRDN, DelOldRDN, NewSup) of - {error,Emsg} -> {ldap_closed_p(Data, Emsg),Data}; - {'EXIT',Error} -> {ldap_closed_p(Data, Error),Data}; - {ok,NewData} -> {ok,NewData}; - Else -> {ldap_closed_p(Data, Else),Data} - end. - -do_modify_dn_0(Data, Entry, NewRDN, DelOldRDN, NewSup) -> - Req = #'ModifyDNRequest'{entry = Entry, - newrdn = NewRDN, - deleteoldrdn = DelOldRDN, - newSuperior = NewSup}, - S = Data#eldap.fd, - Id = bump_id(Data), - log2(Data, "modify DN request = ~p~n", [Req]), - Resp = request(S, Data, Id, {modDNRequest, Req}), - log2(Data, "modify DN reply = ~p~n", [Resp]), - check_reply(Data#eldap{id = Id}, Resp, modDNResponse). - -%%% -------------------------------------------------------------------- -%%% Send an LDAP request and receive the answer -%%% -------------------------------------------------------------------- - -request(S, Data, ID, Request) -> - send_request(S, Data, ID, Request), - recv_response(S, Data). - -send_request(S, Data, ID, Request) -> - Message = #'LDAPMessage'{messageID = ID, - protocolOp = Request}, - {ok,Bytes} = asn1rt:encode('ELDAPv3', 'LDAPMessage', Message), - case do_send(S, Data, Bytes) of - {error,Reason} -> throw({gen_tcp_error,Reason}); - Else -> Else - end. - -do_send(S, Data, Bytes) when Data#eldap.use_tls == false -> - gen_tcp:send(S, Bytes); -do_send(S, Data, Bytes) when Data#eldap.use_tls == true -> - ssl:send(S, Bytes). - -do_recv(S, Data, Len, Timeout) when Data#eldap.use_tls == false -> - gen_tcp:recv(S, Len, Timeout); -do_recv(S, Data, Len, Timeout) when Data#eldap.use_tls == true -> - ssl:recv(S, Len, Timeout). - -recv_response(S, Data) -> - Timeout = get(req_timeout), % kludge... - case do_recv(S, Data, 0, Timeout) of - {ok, Packet} -> - check_tag(Packet), - case asn1rt:decode('ELDAPv3', 'LDAPMessage', Packet) of - {ok,Resp} -> {ok,Resp}; - Error -> throw(Error) - end; - {error,Reason} -> - throw({gen_tcp_error, Reason}); - Error -> - throw(Error) - end. - -%%% Sanity check of received packet -check_tag(Data) -> - case asn1rt_ber_bin:decode_tag(b2l(Data)) of - {_Tag, Data1, _Rb} -> - case asn1rt_ber_bin:decode_length(b2l(Data1)) of - {{_Len, _Data2}, _Rb2} -> ok; - _ -> throw({error,decoded_tag_length}) - end; - _ -> throw({error,decoded_tag}) - end. - -%%% Check for expected kind of reply -check_reply(Data, {ok,Msg}, Op) when - Msg#'LDAPMessage'.messageID == Data#eldap.id -> - case Msg#'LDAPMessage'.protocolOp of - {Op, Result} -> - case Result#'LDAPResult'.resultCode of - success -> {ok,Data}; - Error -> {error, Error} - end; - Other -> {error, Other} - end; -check_reply(_, Error, _) -> - {error, Error}. - - -%%% -------------------------------------------------------------------- -%%% Verify the input data -%%% -------------------------------------------------------------------- - -v_filter({'and',L}) -> {'and',L}; -v_filter({'or', L}) -> {'or',L}; -v_filter({'not',L}) -> {'not',L}; -v_filter({equalityMatch,AV}) -> {equalityMatch,AV}; -v_filter({greaterOrEqual,AV}) -> {greaterOrEqual,AV}; -v_filter({lessOrEqual,AV}) -> {lessOrEqual,AV}; -v_filter({approxMatch,AV}) -> {approxMatch,AV}; -v_filter({present,A}) -> {present,A}; -v_filter({substrings,S}) when record(S,'SubstringFilter') -> {substrings,S}; -v_filter(_Filter) -> throw({error,concat(["unknown filter: ",_Filter])}). - -v_modifications(Mods) -> - F = fun({_,Op,_}) -> - case lists:member(Op,[add,delete,replace]) of - true -> true; - _ -> throw({error,{mod_operation,Op}}) - end - end, - lists:foreach(F, Mods). - -v_substr([{Key,Str}|T]) when list(Str),Key==initial;Key==any;Key==final -> - [{Key,Str}|v_substr(T)]; -v_substr([H|_]) -> - throw({error,{substring_arg,H}}); -v_substr([]) -> - []. -v_scope(baseObject) -> baseObject; -v_scope(singleLevel) -> singleLevel; -v_scope(wholeSubtree) -> wholeSubtree; -v_scope(_Scope) -> throw({error,concat(["unknown scope: ",_Scope])}). - -v_bool(true) -> true; -v_bool(false) -> false; -v_bool(_Bool) -> throw({error,concat(["not Boolean: ",_Bool])}). - -v_timeout(I) when integer(I), I>=0 -> I; -v_timeout(_I) -> throw({error,concat(["timeout not positive integer: ",_I])}). - -v_attributes(Attrs) -> - F = fun(A) when list(A) -> A; - (A) -> throw({error,concat(["attribute not String: ",A])}) - end, - lists:map(F,Attrs). - - -%%% -------------------------------------------------------------------- -%%% Log routines. Call a user provided log routine F. -%%% -------------------------------------------------------------------- - -log1(Data, Str, Args) -> log(Data, Str, Args, 1). -log2(Data, Str, Args) -> log(Data, Str, Args, 2). - -log(Data, Str, Args, Level) when function(Data#eldap.log) -> - catch (Data#eldap.log)(Level, Str, Args); -log(_, _, _, _) -> - ok. - - -%%% -------------------------------------------------------------------- -%%% Misc. routines -%%% -------------------------------------------------------------------- - -send(To,Msg) -> To ! {self(),Msg}. -recv(From) -> receive {From,Msg} -> Msg end. - -ldap_closed_p(Data, Emsg) when Data#eldap.use_tls == true -> - %% Check if the SSL socket seems to be alive or not - case catch ssl:sockname(Data#eldap.fd) of - {error, _} -> - ssl:close(Data#eldap.fd), - {error, ldap_closed}; - {ok, _} -> - {error, Emsg}; - _ -> - %% sockname crashes if the socket pid is not alive - {error, ldap_closed} - end; -ldap_closed_p(Data, Emsg) -> - %% non-SSL socket - case inet:port(Data#eldap.fd) of - {error,_} -> {error, ldap_closed}; - _ -> {error,Emsg} - end. - -bump_id(Data) -> Data#eldap.id + 1. - - -%%% -------------------------------------------------------------------- -%%% parse_dn/1 - Implementation of RFC 2253: -%%% -%%% "UTF-8 String Representation of Distinguished Names" -%%% -%%% Test cases: -%%% -%%% The simplest case: -%%% -%%% 1> eldap:parse_dn("CN=Steve Kille,O=Isode Limited,C=GB"). -%%% {ok,[[{attribute_type_and_value,"CN","Steve Kille"}], -%%% [{attribute_type_and_value,"O","Isode Limited"}], -%%% [{attribute_type_and_value,"C","GB"}]]} -%%% -%%% The first RDN is multi-valued: -%%% -%%% 2> eldap:parse_dn("OU=Sales+CN=J. Smith,O=Widget Inc.,C=US"). -%%% {ok,[[{attribute_type_and_value,"OU","Sales"}, -%%% {attribute_type_and_value,"CN","J. Smith"}], -%%% [{attribute_type_and_value,"O","Widget Inc."}], -%%% [{attribute_type_and_value,"C","US"}]]} -%%% -%%% Quoting a comma: -%%% -%%% 3> eldap:parse_dn("CN=L. Eagle,O=Sue\\, Grabbit and Runn,C=GB"). -%%% {ok,[[{attribute_type_and_value,"CN","L. Eagle"}], -%%% [{attribute_type_and_value,"O","Sue\\, Grabbit and Runn"}], -%%% [{attribute_type_and_value,"C","GB"}]]} -%%% -%%% A value contains a carriage return: -%%% -%%% 4> eldap:parse_dn("CN=Before -%%% 4> After,O=Test,C=GB"). -%%% {ok,[[{attribute_type_and_value,"CN","Before\nAfter"}], -%%% [{attribute_type_and_value,"O","Test"}], -%%% [{attribute_type_and_value,"C","GB"}]]} -%%% -%%% 5> eldap:parse_dn("CN=Before\\0DAfter,O=Test,C=GB"). -%%% {ok,[[{attribute_type_and_value,"CN","Before\\0DAfter"}], -%%% [{attribute_type_and_value,"O","Test"}], -%%% [{attribute_type_and_value,"C","GB"}]]} -%%% -%%% An RDN in OID form: -%%% -%%% 6> eldap:parse_dn("1.3.6.1.4.1.1466.0=#04024869,O=Test,C=GB"). -%%% {ok,[[{attribute_type_and_value,"1.3.6.1.4.1.1466.0","#04024869"}], -%%% [{attribute_type_and_value,"O","Test"}], -%%% [{attribute_type_and_value,"C","GB"}]]} -%%% -%%% -%%% -------------------------------------------------------------------- - -parse_dn("") -> % empty DN string - {ok,[]}; -parse_dn([H|_] = Str) when H=/=$, -> % 1:st name-component ! - case catch parse_name(Str,[]) of - {'EXIT',Reason} -> {parse_error,internal_error,Reason}; - Else -> Else - end. - -parse_name("",Acc) -> - {ok,lists:reverse(Acc)}; -parse_name([$,|T],Acc) -> % N:th name-component ! - parse_name(T,Acc); -parse_name(Str,Acc) -> - {Rest,NameComponent} = parse_name_component(Str), - parse_name(Rest,[NameComponent|Acc]). - -parse_name_component(Str) -> - parse_name_component(Str,[]). - -parse_name_component(Str,Acc) -> - case parse_attribute_type_and_value(Str) of - {[$+|Rest], ATV} -> - parse_name_component(Rest,[ATV|Acc]); - {Rest,ATV} -> - {Rest,lists:reverse([ATV|Acc])} - end. - -parse_attribute_type_and_value(Str) -> - case parse_attribute_type(Str) of - {Rest,[]} -> - error(expecting_attribute_type,Str); - {Rest,Type} -> - Rest2 = parse_equal_sign(Rest), - {Rest3,Value} = parse_attribute_value(Rest2), - {Rest3,{attribute_type_and_value,Type,Value}} - end. - --define(IS_ALPHA(X) , X>=$a,X=<$z;X>=$A,X=<$Z ). --define(IS_DIGIT(X) , X>=$0,X=<$9 ). --define(IS_SPECIAL(X) , X==$,;X==$=;X==$+;X==$<;X==$>;X==$#;X==$; ). --define(IS_QUOTECHAR(X) , X=/=$\\,X=/=$" ). --define(IS_STRINGCHAR(X) , - X=/=$,,X=/=$=,X=/=$+,X=/=$<,X=/=$>,X=/=$#,X=/=$;,?IS_QUOTECHAR(X) ). --define(IS_HEXCHAR(X) , ?IS_DIGIT(X);X>=$a,X=<$f;X>=$A,X=<$F ). - -parse_attribute_type([H|T]) when ?IS_ALPHA(H) -> - %% NB: It must be an error in the RFC in the definition - %% of 'attributeType', should be: (ALPHA *keychar) - {Rest,KeyChars} = parse_keychars(T), - {Rest,[H|KeyChars]}; -parse_attribute_type([H|_] = Str) when ?IS_DIGIT(H) -> - parse_oid(Str); -parse_attribute_type(Str) -> - error(invalid_attribute_type,Str). - - - -%%% Is a hexstring ! -parse_attribute_value([$#,X,Y|T]) when ?IS_HEXCHAR(X),?IS_HEXCHAR(Y) -> - {Rest,HexString} = parse_hexstring(T), - {Rest,[$#,X,Y|HexString]}; -%%% Is a "quotation-sequence" ! -parse_attribute_value([$"|T]) -> - {Rest,Quotation} = parse_quotation(T), - {Rest,[$"|Quotation]}; -%%% Is a stringchar , pair or Empty ! -parse_attribute_value(Str) -> - parse_string(Str). - -parse_hexstring(Str) -> - parse_hexstring(Str,[]). - -parse_hexstring([X,Y|T],Acc) when ?IS_HEXCHAR(X),?IS_HEXCHAR(Y) -> - parse_hexstring(T,[Y,X|Acc]); -parse_hexstring(T,Acc) -> - {T,lists:reverse(Acc)}. - -parse_quotation([$"|T]) -> % an empty: "" is ok ! - {T,[$"]}; -parse_quotation(Str) -> - parse_quotation(Str,[]). - -%%% Parse to end of quotation -parse_quotation([$"|T],Acc) -> - {T,lists:reverse([$"|Acc])}; -parse_quotation([X|T],Acc) when ?IS_QUOTECHAR(X) -> - parse_quotation(T,[X|Acc]); -parse_quotation([$\\,X|T],Acc) when ?IS_SPECIAL(X) -> - parse_quotation(T,[X,$\\|Acc]); -parse_quotation([$\\,$\\|T],Acc) -> - parse_quotation(T,[$\\,$\\|Acc]); -parse_quotation([$\\,$"|T],Acc) -> - parse_quotation(T,[$",$\\|Acc]); -parse_quotation([$\\,X,Y|T],Acc) when ?IS_HEXCHAR(X),?IS_HEXCHAR(Y) -> - parse_quotation(T,[Y,X,$\\|Acc]); -parse_quotation(T,_) -> - error(expecting_double_quote_mark,T). - -parse_string(Str) -> - parse_string(Str,[]). - -parse_string("",Acc) -> - {"",lists:reverse(Acc)}; -parse_string([H|T],Acc) when ?IS_STRINGCHAR(H) -> - parse_string(T,[H|Acc]); -parse_string([$\\,X|T],Acc) when ?IS_SPECIAL(X) -> % is a pair ! - parse_string(T,[X,$\\|Acc]); -parse_string([$\\,$\\|T],Acc) -> % is a pair ! - parse_string(T,[$\\,$\\|Acc]); -parse_string([$\\,$" |T],Acc) -> % is a pair ! - parse_string(T,[$" ,$\\|Acc]); -parse_string([$\\,X,Y|T],Acc) when ?IS_HEXCHAR(X),?IS_HEXCHAR(Y) -> % is a pair! - parse_string(T,[Y,X,$\\|Acc]); -parse_string(T,Acc) -> - {T,lists:reverse(Acc)}. - -parse_equal_sign([$=|T]) -> T; -parse_equal_sign(T) -> error(expecting_equal_sign,T). - -parse_keychars(Str) -> parse_keychars(Str,[]). - -parse_keychars([H|T],Acc) when ?IS_ALPHA(H) -> parse_keychars(T,[H|Acc]); -parse_keychars([H|T],Acc) when ?IS_DIGIT(H) -> parse_keychars(T,[H|Acc]); -parse_keychars([$-|T],Acc) -> parse_keychars(T,[$-|Acc]); -parse_keychars(T,Acc) -> {T,lists:reverse(Acc)}. - -parse_oid(Str) -> parse_oid(Str,[]). - -parse_oid([H,$.|T], Acc) when ?IS_DIGIT(H) -> - parse_oid(T,[$.,H|Acc]); -parse_oid([H|T], Acc) when ?IS_DIGIT(H) -> - parse_oid(T,[H|Acc]); -parse_oid(T, Acc) -> - {T,lists:reverse(Acc)}. - -error(Emsg,Rest) -> - throw({parse_error,Emsg,Rest}). - - -%%% -------------------------------------------------------------------- -%%% Parse LDAP url according to RFC 2255 -%%% -%%% Test case: -%%% -%%% 2> eldap:parse_ldap_url("ldap://10.42.126.33:389/cn=Administrative%20CA,o=Post%20Danmark,c=DK?certificateRevokationList;binary"). -%%% {ok,{{10,42,126,33},389}, -%%% [[{attribute_type_and_value,"cn","Administrative%20CA"}], -%%% [{attribute_type_and_value,"o","Post%20Danmark"}], -%%% [{attribute_type_and_value,"c","DK"}]], -%%% {attributes,["certificateRevokationList;binary"]}} -%%% -%%% -------------------------------------------------------------------- - -parse_ldap_url("ldap://" ++ Rest1 = Str) -> - {Rest2,HostPort} = parse_hostport(Rest1), - %% Split the string into DN and Attributes+etc - {Sdn,Rest3} = split_string(rm_leading_slash(Rest2),$?), - case parse_dn(Sdn) of - {parse_error,internal_error,_Reason} -> - {parse_error,internal_error,{Str,[]}}; - {parse_error,Emsg,Tail} -> - Head = get_head(Str,Tail), - {parse_error,Emsg,{Head,Tail}}; - {ok,DN} -> - %% We stop parsing here for now and leave - %% 'scope', 'filter' and 'extensions' to - %% be implemented later if needed. - {_Rest4,Attributes} = parse_attributes(Rest3), - {ok,HostPort,DN,Attributes} - end. - -rm_leading_slash([$/|Tail]) -> Tail; -rm_leading_slash(Tail) -> Tail. - -parse_attributes([$?|Tail]) -> - case split_string(Tail,$?) of - {[],Attributes} -> - {[],{attributes,string:tokens(Attributes,",")}}; - {Attributes,Rest} -> - {Rest,{attributes,string:tokens(Attributes,",")}} - end. - -parse_hostport(Str) -> - {HostPort,Rest} = split_string(Str,$/), - case split_string(HostPort,$:) of - {Shost,[]} -> - {Rest,{parse_host(Rest,Shost),?LDAP_PORT}}; - {Shost,[$:|Sport]} -> - {Rest,{parse_host(Rest,Shost), - parse_port(Rest,Sport)}} - end. - -parse_port(Rest,Sport) -> - case list_to_integer(Sport) of - Port when integer(Port) -> Port; - _ -> error(parsing_port,Rest) - end. - -parse_host(Rest,Shost) -> - case catch validate_host(Shost) of - {parse_error,Emsg,_} -> error(Emsg,Rest); - Host -> Host - end. - -validate_host(Shost) -> - case inet_parse:address(Shost) of - {ok,Host} -> Host; - _ -> - case inet_parse:domain(Shost) of - true -> Shost; - _ -> error(parsing_host,Shost) - end - end. - - -split_string(Str,Key) -> - Pred = fun(X) when X==Key -> false; (_) -> true end, - lists:splitwith(Pred, Str). - -get_head(Str,Tail) -> - get_head(Str,Tail,[]). - -%%% Should always succeed ! -get_head([H|Tail],Tail,Rhead) -> lists:reverse([H|Rhead]); -get_head([H|Rest],Tail,Rhead) -> get_head(Rest,Tail,[H|Rhead]). - -b2l(B) when binary(B) -> B; -b2l(L) when list(L) -> list_to_binary(L). - diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/README.test b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/README.test deleted file mode 100644 index 9816216..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/README.test +++ /dev/null @@ -1,96 +0,0 @@ -%%% $Id$ - -%%% -------------------------------------------------------------------- -%%% Init setup -%%% -------------------------------------------------------------------- - -I set up the OpenLDAP (2.0.6) server using the following -/usr/local/etc/openldap/slapd.conf file: - - include /usr/local/etc/openldap/schema/core.schema - pidfile /var/run/slapd.pid - argsfile /var/run/slapd.args - database ldbm - suffix "dc=bluetail, dc=com" - rootdn "dc=bluetail, dc=com" - rootpw hejsan - directory /usr/local/var/openldap-ldbm - index objectClass eq - - -%%% I started it on the console with some debug output: - - /usr/local/libexec/slapd -d 255 -f /usr/local/etc/openldap/slapd.conf - -%%% Then I defined the following data in: bluetail.ldif - - dn: dc=bluetail, dc=com - objectclass: organization - objectclass: dcObject - dc: bluetail - o: Bluetail AB - -%%% and in: tobbe.ldif - - dn: cn=Torbjorn Tornkvist, dc=bluetail, dc=com - objectclass: person - cn: Torbjorn Tornkvist - sn: Tornkvist - -%%% I load the data with: - - ldapadd -D "dc=bluetail, dc=com" -w hejsan < bluetail.ldif - ldapadd -D "dc=bluetail, dc=com" -w hejsan < people.ldif - -%%%% To search from a Unix shell: - - ldapsearch -L -b "dc=bluetail, dc=com" -w hejsan "(objectclass=*)" - ldapsearch -L -b "dc=bluetail, dc=com" -w hejsan "cn=Torbjorn Tornkvist" - ldapsearch -L -b "dc=bluetail, dc=com" -w hejsan "cn=Torb*kvist" - -%%% -------------------------------------------------------------------- -%%% Example with certificateRevocationList -%%% -------------------------------------------------------------------- - -%%% Using two ldif files: - -%%% post_danmark.ldif - -dn: o=Post Danmark, c=DK -objectclass: country -objectclass: organization -c: DK -o: Post Danmark - -%%% crl.ldif - -dn: cn=Administrative CA, o=Post Danmark, c=DK -objectclass: cRLDistributionPoint -cn: Administrative CA -certificateRevocationList;binary:< file:/home/tobbe/erlang/eldap/server1.crl - -%%% Note the definition of the CRL file !! - -%%% To add the difinitions - -ldapadd -D "o=Post Danmark, c=DK" -w hejsan < post_danmark.ldif -ldapadd -D "o=Post Danmark, c=DK" -w hejsan < crl.ldif - -%%% And to retreive the CRL - -ldapsearch -L -b "o=Post Danmark, c=DK" -w hejsan "(objectclass=*)" -ldapsearch -L -b "o=Post Danmark, c=DK" -w hejsan "(cn=Administrative CA)" \ - certificateRevocationList - -### Put the retrieved binary in a file (tmp) with -### the following header and footer - ------BEGIN X509 CRL----- - <...binary....> ------END X509 CRL----- - -### To verify it with openssl - - openssl crl -inform PEM -in tmp -text - -ldapsearch -L -D "cn=Torbjorn Tornkvist,o=Post Danmark,c=DK" -b "o=Post Danmark, c=DK" -w qwe123 "(cn=Torbjorn Tornkvist)" cn diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/bill.ldif b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/bill.ldif deleted file mode 100644 index 59022ad..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/bill.ldif +++ /dev/null @@ -1,13 +0,0 @@ -dn: mail=bill@bluetail.com, dc=bluetail, dc=com -objectclass: posixAccount -mail: bill@bluetail.com -cn: Bill Valentine -sn: Valentine -uid: bill -uidNumber: 400 -gidNumber: 400 -homeDirectory: /home/accounts/bill -mailDirectory: /home/accounts/bill/INBOX -userPassword: baltazar -birMailAccept: accept -birCluster: bc1 diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/bluetail.ldif b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/bluetail.ldif deleted file mode 100644 index 914532e..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/bluetail.ldif +++ /dev/null @@ -1,18 +0,0 @@ -dn: dc=bluetail, dc=com -objectclass: dcObject -dc: bluetail - -dn: o=Bluetail AB, dc=bluetail, dc=com -objectclass: organization -o: Bluetail AB -street: St.Eriksgatan 44 -postalCode: 112 34 - -dn: ou=people, o=Bluetail AB, dc=bluetail, dc=com -objectclass: organizationalUnit -ou: people -description: People working at Bluetail - - - - diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/crl.ldif b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/crl.ldif deleted file mode 100644 index 2e52873..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/crl.ldif +++ /dev/null @@ -1,5 +0,0 @@ -dn: cn=Administrative CA,o=Post Danmark,c=DK -objectclass: cRLDistributionPoint -cn: Administrative CA -certificateRevocationList;binary:< file:/home/tobbe/erlang/eldap/server1.crl - diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/eldap_test.erl b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/eldap_test.erl deleted file mode 100644 index db64615..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/eldap_test.erl +++ /dev/null @@ -1,537 +0,0 @@ --module(eldap_test). -%%% -------------------------------------------------------------------- -%%% Created: 12 Oct 2000 by Tobbe -%%% Function: Test code for the eldap module -%%% -%%% Copyright (C) 2000 Torbjörn Törnkvist -%%% Copyright (c) 2010 Torbjorn Tornkvist -%%% See MIT-LICENSE at the top dir for licensing information. -%%% -%%% -------------------------------------------------------------------- --vc('$Id$ '). --export([topen_bind/1,topen_bind/2,all/0,t10/0,t20/0,t21/0,t22/0, - t23/0,t24/0,t25/0,t26/0,t27/0,debug/1,t30/0,t31/0, - t40/0,t41/0,t50/0,t51/0]). --export([crl1/0]). --export([switch/1]). --export([junk/0]). - --include("ELDAPv3.hrl"). --include("eldap.hrl"). - -junk() -> - DN = "cn=Torbjorn Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com", - Msg = #'LDAPMessage'{messageID = 1, - protocolOp = {delRequest,DN}}, - asn1rt:encode('ELDAPv3', 'LDAPMessage', Msg). - -%%% -------------------------------------------------------------------- -%%% TEST STUFF -%%% ---------- -%%% When adding a new test case it can be useful to -%%% switch on debugging, i.e debug(t) in the call to -%%% topen_bind/2. -%%% -------------------------------------------------------------------- - -all() -> - Check = "=== Check the result of the previous test case !~n", - t10(), - t20(),t21(),t22(),t23(),t24(),t25(),t26(),t27(), - t30(),t26(Check),t31(),t26(Check), - t40(),t26(Check),t41(),t26(Check), - t50(),t26(Check),t51(),t26(Check), - ok. - -%%% -%%% Setup a connection and bind using simple authentication -%%% -t10() -> - F = fun() -> - sleep(), - line(), - io:format("=== TEST 10 (connection setup + simple auth)~n"), - line(), - X = topen_bind("localhost", debug(f)), - io:format("~p~n",[X]), - X - end, - go(F). - -%%% -%%% Do an equality match: sn = Tornkvist -%%% -t20() -> - F = fun() -> - sleep(), - line(), - io:format("=== TEST 20 (equality match)~n"), - line(), - {ok,S} = topen_bind("localhost", debug(f)), - Filter = eldap:equalityMatch("sn","Tornkvist"), - X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"}, - {filter, Filter}])), - - io:format("~p~n",[X]), - X - end, - go(F). - -%%% -%%% Do a substring match: sn = To*kv*st -%%% -t21() -> - F = fun() -> - sleep(), - line(), - io:format("=== TEST 21 (substring match)~n"), - line(), - {ok,S} = topen_bind("localhost", debug(f)), - Filter = eldap:substrings("sn", [{initial,"To"}, - {any,"kv"}, - {final,"st"}]), - X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"}, - {filter, Filter}])), - io:format("~p~n",[X]), - X - end, - go(F). - -%%% -%%% Do a substring match: sn = *o* -%%% and do only retrieve the cn attribute -%%% -t22() -> - F = fun() -> - sleep(), - line(), - io:format("=== TEST 22 (substring match + return 'cn' only)~n"), - line(), - {ok,S} = topen_bind("localhost", debug(f)), - Filter = eldap:substrings("sn", [{any,"o"}]), - X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"}, - {filter, Filter}, - {attributes,["cn"]}])), - io:format("~p~n",[X]), - X - end, - go(F). - - -%%% -%%% Do a present search for the attribute 'objectclass' -%%% on the base level. -%%% -t23() -> - F = fun() -> - sleep(), - line(), - io:format("=== TEST 23 (objectclass=* , base level)~n"), - line(), - {ok,S} = topen_bind("localhost", debug(f)), - X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"}, - {filter, eldap:present("objectclass")}, - {scope,eldap:baseObject()}])), - io:format("~p~n",[X]), - X - end, - go(F). - -%%% -%%% Do a present search for the attribute 'objectclass' -%%% on a single level. -%%% -t24() -> - F = fun() -> - sleep(), - line(), - io:format("=== TEST 24 (objectclass=* , single level)~n"), - line(), - {ok,S} = topen_bind("localhost", debug(f)), - X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"}, - {filter, eldap:present("objectclass")}, - {scope,eldap:singleLevel()}])), - io:format("~p~n",[X]), - X - end, - go(F). - -%%% -%%% Do a present search for the attribute 'objectclass' -%%% on the whole subtree. -%%% -t25() -> - F = fun() -> - sleep(), - line(), - io:format("=== TEST 25 (objectclass=* , whole subtree)~n"), - line(), - {ok,S} = topen_bind("localhost", debug(f)), - X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"}, - {filter, eldap:present("objectclass")}, - {scope,eldap:wholeSubtree()}])), - io:format("~p~n",[X]), - X - end, - go(F). - -%%% -%%% Do a present search for the attributes -%%% 'objectclass' and 'sn' on the whole subtree. -%%% -t26() -> t26([]). -t26(Heading) -> - F = fun() -> - sleep(), - line(), - heading(Heading, - "=== TEST 26 (objectclass=* and sn=*)~n"), - line(), - {ok,S} = topen_bind("localhost", debug(f)), - Filter = eldap:'and'([eldap:present("objectclass"), - eldap:present("sn")]), - X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"}, - {filter, Filter}, - {scope,eldap:wholeSubtree()}])), - io:format("~p~n",[X]), - X - end, - go(F). - -%%% -%%% Do a present search for the attributes -%%% 'objectclass' and (not 'sn') on the whole subtree. -%%% -t27() -> - F = fun() -> - sleep(), - line(), - io:format("=== TEST 27 (objectclass=* and (not sn))~n"), - line(), - {ok,S} = topen_bind("localhost", debug(f)), - Filter = eldap:'and'([eldap:present("objectclass"), - eldap:'not'(eldap:present("sn"))]), - X=(catch eldap:search(S, [{base, "dc=bluetail, dc=com"}, - {filter, Filter}, - {scope,eldap:wholeSubtree()}])), - io:format("~p~n",[X]), - X - end, - go(F). - -%%% -%%% Replace the 'telephoneNumber' attribute and -%%% add a new attribute 'description' -%%% -t30() -> t30([]). -t30(Heading) -> - F = fun() -> - sleep(), - {_,_,Tno} = erlang:now(), - Stno = integer_to_list(Tno), - Desc = "LDAP hacker " ++ Stno, - line(), - heading(Heading, - "=== TEST 30 (replace telephoneNumber/" - ++ Stno ++ " add description/" ++ Desc - ++ ")~n"), - line(), - {ok,S} = topen_bind("localhost", debug(f)), - Obj = "cn=Torbjorn Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com", - Mod = [eldap:mod_replace("telephoneNumber", [Stno]), - eldap:mod_add("description", [Desc])], - X=(catch eldap:modify(S, Obj, Mod)), - io:format("~p~n",[X]), - X - end, - go(F). - -%%% -%%% Delete attribute 'description' -%%% -t31() -> t31([]). -t31(Heading) -> - F = fun() -> - sleep(), - {_,_,Tno} = erlang:now(), - line(), - heading(Heading, - "=== TEST 31 (delete 'description' attribute)~n"), - line(), - {ok,S} = topen_bind("localhost", debug(f)), - Obj = "cn=Torbjorn Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com", - Mod = [eldap:mod_delete("description", [])], - X=(catch eldap:modify(S, Obj, Mod)), - io:format("~p~n",[X]), - X - end, - go(F). - -%%% -%%% Add an entry -%%% -t40() -> t40([]). -t40(Heading) -> - F = fun() -> - sleep(), - {_,_,Tno} = erlang:now(), - line(), - heading(Heading, - "=== TEST 40 (add entry 'Bill Valentine')~n"), - line(), - {ok,S} = topen_bind("localhost", debug(f)), - Entry = "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com", - X=(catch eldap:add(S, Entry, - [{"objectclass", ["person"]}, - {"cn", ["Bill Valentine"]}, - {"sn", ["Valentine"]}, - {"telephoneNumber", ["545 555 00"]}])), - io:format("~p~n",[X]), - X - end, - go(F). - -%%% -%%% Delete an entry -%%% -t41() -> t41([]). -t41(Heading) -> - F = fun() -> - sleep(), - {_,_,Tno} = erlang:now(), - line(), - heading(Heading, - "=== TEST 41 (delete entry 'Bill Valentine')~n"), - line(), - {ok,S} = topen_bind("localhost", debug(f)), - Entry = "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com", - X=(catch eldap:delete(S, Entry)), - io:format("~p~n",[X]), - X - end, - go(F). - -%%% -%%% Modify the DN of an entry -%%% -t50() -> t50([]). -t50(Heading) -> - F = fun() -> - sleep(), - {_,_,Tno} = erlang:now(), - line(), - heading(Heading, - "=== TEST 50 (modify DN to: 'Torbjorn M.Tornkvist')~n"), - line(), - {ok,S} = topen_bind("localhost", debug(f)), - Entry = "cn=Torbjorn Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com", - X=(catch eldap:modify_dn(S, Entry, - "cn=Torbjorn M.Tornkvist", - false, - [])), - io:format("~p~n",[X]), - X - end, - go(F). - -%%% -%%% Modify the DN of an entry and remove the RDN attribute. -%%% NB: Must be run after: 't50' ! -%%% -t51() -> t51([]). -t51(Heading) -> - F = fun() -> - sleep(), - {_,_,Tno} = erlang:now(), - line(), - heading(Heading, - "=== TEST 51 (modify DN, remove the RDN attribute)~n"), - line(), - {ok,S} = topen_bind("localhost", debug(f)), - Entry = "cn=Torbjorn M.Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com", - X=(catch eldap:modify_dn(S, Entry, - "cn=Torbjorn Tornkvist", - true, - [])), - io:format("~p~n",[X]), - X - end, - go(F). - -%%% -------------------------------------------------------------------- -%%% Test cases for certificate revocation lists -%%% -------------------------------------------------------------------- - -crl1() -> - F = fun() -> - sleep(), - line(), - io:format("=== CRL-TEST 1 ~n"), - line(), - {ok,S} = crl_open_bind("localhost", debug(f)), - Filter = eldap:equalityMatch("cn","Administrative CA"), - X=(catch eldap:search(S, [{base, "o=Post Danmark, c=DK"}, - {filter, Filter}, - {attributes,["certificateRevocationList"]}])), - dump_to_file("test-crl1.result",X), - ok - end, - go(F). - - -dump_to_file(Fname,{ok,Res}) -> - case Res#eldap_search_result.entries of - [Entry|_] -> - case Entry#eldap_entry.attributes of - [{Attribute,Value}|_] -> - file:write_file(Fname,list_to_binary(Value)), - io:format("Value of '~s' dumped to file: ~s~n", - [Attribute,Fname]); - Else -> - io:format("ERROR(dump_to_file): no attributes found~n",[]) - end; - Else -> - io:format("ERROR(dump_to_file): no entries found~n",[]) - end. - -switch(1) -> - %% - %% SEARCH - %% - F = fun() -> - sleep(), - line(), - io:format("=== SWITCH-TEST 1 (short-search)~n"), - line(), - {ok,S} = sw_open_bind("korp", debug(t)), - Filter = eldap:equalityMatch("cn","Administrative CA"), - X=(catch eldap:search(S, [{base, "o=Post Danmark, c=DK"}, - {filter, Filter}, - {attributes,["cn"]}])), - io:format("RESULT: ~p~n", [X]), - %%dump_to_file("test-switch-1.result",X), - eldap:close(S), - ok - end, - go(F); -switch(2) -> - %% - %% ADD AN ENTRY - %% - F = fun() -> - sleep(), - line(), - io:format("=== SWITCH-TEST 2 (add-entry)~n"), - line(), - {ok,S} = sw_open_bind("korp", debug(t)), - Entry = "cn=Bill Valentine, o=Post Danmark, c=DK", - X=(catch eldap:add(S, Entry, - [{"objectclass", ["person"]}, - {"cn", ["Bill Valentine"]}, - {"sn", ["Valentine"]} - ])), - io:format("~p~n",[X]), - eldap:close(S), - X - end, - go(F); -switch(3) -> - %% - %% SEARCH FOR THE NEWLEY ADDED ENTRY - %% - F = fun() -> - sleep(), - line(), - io:format("=== SWITCH-TEST 3 (search-added)~n"), - line(), - {ok,S} = sw_open_bind("korp", debug(t)), - Filter = eldap:equalityMatch("cn","Bill Valentine"), - X=(catch eldap:search(S, [{base, "o=Post Danmark, c=DK"}, - {filter, Filter}, - {attributes,["cn"]}])), - io:format("RESULT: ~p~n", [X]), - %%dump_to_file("test-switch-1.result",X), - eldap:close(S), - ok - end, - go(F); -switch(4) -> - %% - %% DELETE THE NEWLEY ADDED ENTRY - %% - F = fun() -> - sleep(), - line(), - io:format("=== SWITCH-TEST 4 (delete-added)~n"), - line(), - {ok,S} = sw_open_bind("korp", debug(t)), - Entry = "cn=Bill Valentine, o=Post Danmark, c=DK", - X=(catch eldap:delete(S, Entry)), - io:format("RESULT: ~p~n", [X]), - %%dump_to_file("test-switch-1.result",X), - eldap:close(S), - ok - end, - go(F). - - - -%%% --------------- -%%% Misc. functions -%%% --------------- - -sw_open_bind(Host) -> - sw_open_bind(Host, debug(t)). - -sw_open_bind(Host, Dbg) -> - sw_open_bind(Host, Dbg, "cn=Torbjorn Tornkvist,o=Post Danmark,c=DK", "qwe123"). - -sw_open_bind(Host, LogFun, RootDN, Passwd) -> - Opts = [{log,LogFun},{port,9779}], - {ok,Handle} = eldap:open([Host], Opts), - {eldap:simple_bind(Handle, RootDN, Passwd), - Handle}. - -crl_open_bind(Host) -> - crl_open_bind(Host, debug(t)). - -crl_open_bind(Host, Dbg) -> - do_open_bind(Host, Dbg, "o=Post Danmark, c=DK", "hejsan"). - -topen_bind(Host) -> - topen_bind(Host, debug(t)). - -topen_bind(Host, Dbg) -> - do_open_bind(Host, Dbg, "dc=bluetail, dc=com", "hejsan"). - -do_open_bind(Host, LogFun, RootDN, Passwd) -> - Opts = [{log,LogFun}], - {ok,Handle} = eldap:open([Host], Opts), - {eldap:simple_bind(Handle, RootDN, Passwd), - Handle}. - -debug(t) -> fun(L,S,A) -> io:format("--- " ++ S, A) end; -debug(1) -> fun(L,S,A) when L =< 1 -> io:format("--- " ++ S, A) end; -debug(2) -> fun(L,S,A) when L =< 2 -> io:format("--- " ++ S, A) end; -debug(f) -> false. - -sleep() -> msleep(400). -%sleep(Sec) -> msleep(Sec*1000). -msleep(T) -> receive after T -> true end. - -line() -> - S = "==============================================================\n", - io:format(S). - -heading([], Heading) -> io:format(Heading); -heading(Heading, _ ) -> io:format(Heading). - -%%% -%%% Process to run the test case -%%% -go(F) -> - Self = self(), - Pid = spawn(fun() -> run(F,Self) end), - receive {Pid, X} -> ok end. - -run(F, Pid) -> - Pid ! {self(),catch F()}. diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/ldap.rc b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/ldap.rc deleted file mode 100644 index 6cbdfea..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/ldap.rc +++ /dev/null @@ -1,103 +0,0 @@ -#!/bin/sh -# -# ldap This shell script takes care of starting and stopping -# ldap servers (slapd and slurpd). -# -# chkconfig: - 39 61 -# description: LDAP stands for Lightweight Directory Access Protocol, used \ -# for implementing the industry standard directory services. -# processname: slapd -# config: /etc/openldap/slapd.conf -# pidfile: /var/run/slapd.pid - -# Source function library. -. /etc/init.d/functions - -# Source networking configuration and check that networking is up. -if [ -r /etc/sysconfig/network ] ; then - . /etc/sysconfig/network - [ ${NETWORKING} = "no" ] && exit 0 -fi - - -slapd=/usr/sbin/slapd -slurpd=/usr/sbin/slurpd -[ -x ${slapd} ] || exit 0 -[ -x ${slurpd} ] || exit 0 - -RETVAL=0 - -function start() { - # Start daemons. - echo -n "Starting slapd:" - daemon ${slapd} - RETVAL=$? - echo - if [ $RETVAL -eq 0 ]; then - if grep -q "^replogfile" /etc/openldap/slapd.conf; then - echo -n "Starting slurpd:" - daemon ${slurpd} - RETVAL=$? - echo - fi - fi - [ $RETVAL -eq 0 ] && touch /var/lock/subsys/ldap - return $RETVAL -} - -function stop() { - # Stop daemons. - echo -n "Shutting down ldap: " - killproc ${slapd} - RETVAL=$? - if [ $RETVAL -eq 0 ]; then - if grep -q "^replogfile" /etc/openldap/slapd.conf; then - killproc ${slurpd} - RETVAL=$? - fi - fi - echo - [ $RETVAL -eq 0 ] && rm -f /var/lock/subsys/ldap /var/run/slapd.args - return $RETVAL -} - -# See how we were called. -case "$1" in - start) - start - ;; - stop) - stop - ;; - status) - status ${slapd} - if grep -q "^replogfile" /etc/openldap/slapd.conf ; then - status ${slurpd} - fi - ;; - restart) - stop - start - ;; - reload) - killall -HUP ${slapd} - RETVAL=$? - if [ $RETVAL -eq 0 ]; then - if grep -q "^replogfile" /etc/openldap/slapd.conf; then - killall -HUP ${slurpd} - RETVAL=$? - fi - fi - ;; - condrestart) - if [ -f /var/lock/subsys/ldap ] ; then - stop - start - fi - ;; - *) - echo "Usage: $0 start|stop|restart|status|condrestart}" - RETVAL=1 -esac - -exit $RETVAL diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/people.ldif b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/people.ldif deleted file mode 100644 index 20af5a0..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/people.ldif +++ /dev/null @@ -1,11 +0,0 @@ -dn: cn=Torbjorn Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com -objectclass: person -cn: Torbjorn Tornkvist -sn: Tornkvist -telephoneNumber: 545 550 23 - -dn: cn=Magnus Froberg, ou=people, o=Bluetail AB, dc=bluetail, dc=com -objectclass: person -cn: Magnus Froberg -sn: Froberg -telephoneNumber: 545 550 26 diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/post_danmark.ldif b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/post_danmark.ldif deleted file mode 100644 index 24fbb3f..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/post_danmark.ldif +++ /dev/null @@ -1,5 +0,0 @@ -dn: o=Post Danmark,c=DK -objectclass: country -objectclass: organization -c: DK -o: Post Danmark diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/server1.crl b/rabbitmq-server/plugins-src/eldap-wrapper/eldap-git/test/server1.crl deleted file mode 100644 index 6be714ad40275d7003f4c4cbbb8a08c642f062c7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47075 zcmaJ~2bi8!l}^YcW@a)8DFjHJWCEm-e1HG{NhY0?{~OX9siaI`5&{~7h-FvMMOkHS zAR<`6)fE+YR}|~2xHj7E3d$;FDWXdg3!RW;-}Bvb?|*kZuOL28rZzenS8LN!y_2=~pbj;^nsf{hJ2%OD_BOTx-%iXuyW1K_gGtur4<$86kgY z8QFT)g@+Ga{N}^6F6=p+o0HV!e{CI^zvPljX7x1w&Fsai*R}UvxaXonZ=BV>`_lG9 zJ^N<0Prab`VDI5Kw_nh^Yxk^axzmyk_kSKYb zRdQt;)E$AUxx5Lw?@Jk|nosaI{@Vt9F$R_F;U2gV-;^Y|LM~S>=5x9KFR7oK_yfOg zfJ%jY$p$?Xfr`b733_-!2FfReTnURxB;DgcIZUgRVA=oQV){84m*CM!5vWSm>2VLL<;tZ}vCk>ci9hkqOj0o^U==Ef4SMw34upLv z7OP3YT!P%~gx} zLLZgVC3x<-3{=UiMuBY0bF~aqO`rwET3058aDy#cMf{R3+x{#Ul+# zxg@bI-Ai=`%IEXBq*Snt*z;dg5EQ*oAcpnDg$gRes@905yYD6eVK#X&7`Z-Y+1>?PJ=b9%O{pi(X`rrZX#$Dk4!?rjkW z%V|r>VwGHiuog>BHuwd<1Oh6NL-#@!)2XB~`MTFLW1oSl`F!6f+N;!!I`ML@nv@yn zQxT{__U?5A$YKujWgOo;v6-)D(V>D0#6-WYYMzJFha%Y9^7K-E&2*n;EE zw@3<$n6*SEo`pu3xPWTJ794xG4p+*R@P%aJ17lF%;6abdvF}D87WEu+9sy}-K8M3% zAK&9hyJdqBU*I&6l;r3y9SG}KES1?3%#1)#Dr@i3e)k4YrBJaGiL~FZ8lY04Z`7kp za6Ai;b2*7Fz#5o~nfBWsBvp&->yBs9p_5ds<;~-p_CHl6VJUIO?XT-UItJMjPy0_) zP#)nw4uZK({q@ix0x2T2C3E^4m4xJ`ShM#o?LS>X2(=5?WNWzk@y`Yj7PD^%=~w*o z5ePxDB`H1O`wk=~8u^s{btfFmK$tc1GQ`ogqyZizNfKFt0q;=|yk@Cpr%7pl4-!SF z*_25-;3}1bH7bGXFy}PDeH}gw8v}Q4gFfaY2_`{QXMyUktWywZoDv+K4LT3X_9&9(m~h7AUp6& z2kG&Z;YzS8=9~tO^^yQX3}J!>eOf^{Q6+4$2^zT5gYpR!!PdLMk|rT?KolnC&^A2- z#pOFtr9=&eq$D1Me8Zg6pydhzcL>&+eBGAtvh}<4-ZUEJaH8Sx5 zJq-{hZkwLL*F+$2H0E&49Ug>bBH>y(_!0#{SBiOi3DUubBM>t^&1VKs1rdg=2+fHD zVbQC|sBM4OtS2+eNvg!E4F0K?gmXd?({%773c>>55ZLFbMU)d0k^_fQiaqfbi3o{w1cIhumr~MJ$?^e$C8!mPHvg2i&i5chVpVc{TYsyd0xDb;+hEjRJfjS7-sOD`@YXIfJc##z!(y5YAO@fiKjr|Z+^Sm&liOsR5ZD%=2 zC{IcTWY4Khy}5uuu-T@k-!_#59j2hRi#!MmULhOY_JIt9ycCD7XeY~Qn=lV>2XcVn zTkNCKI#oduX#-|&%VQoS8%#WBo7)!n6t4ESGY72T6IP62#c|uLMaj z^?geOE}cw=wkQZinG#`*(xFWO1P;>XRnno(u*&YDieQt&=}>3sWVOW&n=4*F_Cp;G zy#+v-YFWi!a>u0>8=Tfcf0|;p;0lndt2!tAerFZq?Lp%uD3GUkhrNfRZ2sAUoOw;1~|pOj>HByrzj~PxE|Y%*N;m&DMtl)B3!kt!7sTm#DUVFmq1nFu#}rVvJwEcj$0nvfw)HISq485g7t9 zWzvU{4qFr?<%>l->Pd&C5vWW~!7!KNy5R^poDN$VNrK5RSG?8vU_^A#;DT*^(}B|S zfDML^Dj}1w7jxtp9)$B$A#uU*w+E2Ov(NbO4@IDY?KaY(F6Wf&9nlj3jrd=c1X7_? zCE?o08v>|``m7z$jGUq%bS;56vrj?mKSv<&V&;ms_~Aqnt8Kih0Zm>&q%k5L{xKa6 zI|qAU4>#&%1%Y{>`Q1ntu|fq9tFYXBw6g_ZnGtF;S2pT}Y&hv%6UL9s=}!@evHzo_ zZk|KF4M|8g>=hsNOa#Inn=3x*$p{3OXM#rmM*tyvCH&K<|BgV&H%v*bGa4XRG5b6X zk$SNEx=H~pn5LxBjo}ci*@efn%>@s#PBj=G({rXLyAaZW3M4=t?R+pmFyqW4v|XVS z2i?U|x3tX#4>)iDRf)eF?jrjP1lGuQ)NRh#I}ivxbDd7v>gOau7Gc^?*&Kn`It_PR z4~9b?jF840ZqVH-2_Ati!El%27Es6rEl9$(F)q|byB)$v=JH1#)Zv8aVDb1!sk+H| z5)kPYOovKMF#|!TSWI{7QwplVV1Q*cm*7;#&=e3E0z`c9$nGGiBHaqMBo{x4qzd9b zTcyTq@sdCYA!0KnjXB^zNJkR*bNf7v{(^#_Et27xl1BW&gD_T=gph6DQxG@|$zM%L zgPi9S^NduPFb`wyQb`htu@tx2afc{f;heCl+o*jiNxEy9%^dTl0D^%g8TD9Ib7|m2 z5@@gZ*d8wli7DyENr!$`K~MtLXp@dT96*TZ2zNO4`UnJ;#$54nqdW*|06NbG4UR!} z4^29*H3I!b`<~`^Z3Rp#A+?5`L?*orvN*e!=f)Mw>(%FY@ykATKRfts?FZE#gI_L`P zS4oGd=>Z7+%%qk-%zvGLVD9XR&kMuB(vi?;{G14cEjA^MeZqra3rHJLI!+A+{AR}Y z*pgHc5Z0msPZEi7E@&@E?2Ce$y}=V4nI|BmrR1ngNOj__8^=DrW7QnOLrEu~tq9}Q z2O}mx2NXFf!(Am1fkUQww#}U2N=^m20E6Jcb{i8vsuP#uI-{8`j3qs!9;BjBgKt4EBw*M#w5v$Vf^iJl4(iJ)5?JjR|pb9C?wL4>A;hRBH8N79Zo=uM4z}P29e<=pB+G;@`&~A za4AVq3X}<3f{s_5Bmq%vnQ}n@p%O^K|0%rzgrX`Tghu|o2Vn_nBrzTFbO1@}V!Mqg z?+GB`>TFxkzP|yIM9JLXDPIX7u>XXD>rmxHpAl$u60uI+sgk6V3UsYG++;^E7RC9% z{@4NXWN87Dh8+RHahQ@i-k~5UF!at|r}oPO2&SGG)^?X8z^r28n3Or(6c=O(2>0EH z5$kv`3zv4fS^z$ojkqlhC@Urj3yaws)Ep9&MwEMXXxG`vIjd(RBDWdAsL8VuuRW* ze;5ws9i|9-0|?zDDLL{k5(xj=EDF~S>%Fp&0pZc%|VOLmVX-8K8VOnhBzfuq`p~7$2 z5yrF;0fbv+c7U8t8yi4)oq#l!j#9TFSsCaM`|D2f8;lD~1x8i2tC|a)i>Sw*_|(lh zC$#Gz8nsWsY3f@bNm>VNQ2UcX5~@jrj-U2l9t0AD+>uD9X)ubc3gzWtqdxaFhwEHj=-SlK6}tsU)ysNri0i3|CWxBp5`r?3=@#cA82;WB_fk?bF2V z0fa1xP5jXagw|4XxS1;g2<^9ITc(c*AlwcnQO`^lt3a7>##v2qhGVre5W+E=6PxKG zdw{?eFyAusKXp#h=2#|+IqMV;Lg2^F)3m>HAXJResX!dite*#v(7yH=pZ1c15EMXz zZL2ctX8{EIlo+wuuCE8tg)r?T>X|*aAqjN6eSBxnZ-8(i&OUU5KOR7GlaClNS8tJs zNB*`YO<$mr;Hptau#fMIW(A=e3IDOzX?AY_iMz8+&#do9AdqL~8K3pT7-Z)y^{0Q+ z0D*t9wdKt70tks4X-i97-h%E-$uo|9MVRHHXq1WM6p%yL;hbUyLTjygRO)|R7KQ^sLx{8F zN(G5OK<}_Q@iRWz08wgbrq^I0Vi~JF^VJ{;q$gRY#E~!3p^G9tQv?^#WFXM?<`Sec zo(;pvJ43e3Jk!PFP7+*)xdf9w7$nKvNqe1g2R%rF2bN2zVMW4)z@8}Q(5XQZm;jN5Yc<#+X z5>|kmf@#A72v_t7$C}*J03klIZD#I>2O)lf@v-OR7#a|#;-!eqoVvnE5|ETrOi44Y zjX>mzbUNdg4G>Ci_8HGPl1k!XX?oW2u*hcG~2Zii6k@*5F&PVA#e6S^b zEr4)?i+J_8reG#36P5~>co+9gJ!n1rSMz?st{ve zbW|nI5)5lX!rG+6NdgFGj+p3@x(!LY(XwNI-5FO0NqF6goPyJDi9y7hpZ+fagwU5n zhiACboO~TXBncd%;XgpqlV$H+X;T;u3=Nr6Ns~ZGLa0rQ{h5bUk`xkH=S%U60VEeG zY*$u(Qvl&=EYaW@dQ@On(QigfbfQ!MKzNvd9J;d0&C9}yBEql_-HgN8oCL(l;Yulh zPz)tx-kC0F7fFcd$jK~UuaXdg(DyKAIungbx6GT6d4(4mlE4?(ryz0KB9i0=h;775 z1A-)JFt$OJ-5!KZVVPptk;51cAo9{(T6T)yKgi;Ff7Tac){NBkeCv37QsqmUg5)c?o6I6O@03kyl9UU|N9D&MYgA0!C zDoe)_O_r;U#u1QQ$*>indbb}AAhfoa6R+M8f!L+=>gN=M)1kFs7jFp5Ha>uN1r+nj-ENEs$>5ZL>eQi#b{29o|ydc0IH!_X0Lc{d<0^3 zgkdh~!JMS(Mp{(!y-pHNC5|0&ujOwA5WFy<q# zJSWr%r4+Otn%h$AiX^eV!_K^d#EOGrG$nPa9nVZXmE=f;vb2~;qoXOQ5{3f^r3({O zepu%O`wV)*&J^2S_>ab7biOd&H-9`xk{%kePO6+D2`g^Tsq)SsN&3F+bvpH24}!tO zMK<&6X0}Bj{vRWA@jb?&l=+Zp4u0aHnq%lB7M+-n-6c zB2a|{?bY)<2%W6#Mt1qk0Fss+Tg}VU8z5>9NBygUklEk{kG%wQ9O)_TNa%|u0Zr#` zgCw-Nl3vK#M?DDYf;;}^VwSxmLR3oPe0&?_=~j73LKu($dG1vas6--+mR~4Hw2p-#6UPP+ zq7_0g&W%8rlpSHr{f{6CXMlwNbG?bi%_OuG+8R8k*-JuR%4!>3CpwT=OpsZ&!RS(h zk%7oNerZ=rB&k4J2j(7&Kp^VOqtf}00fam7B;gwBhz`j#kOGn==u!;^2qv1;Ub|E| zrS%7?o4uG_lXOmScz7qvR;eyW#G-00AR;NhC`bYWM?jqi8z9=a)v1;)YwL%pvlr7* z-!dmOSdc^4xhoou(SoO5sUTrSSqRxRCx9eTw9j~#<9bkX1doNiF(=+NMJ2&zqPf(T zG}#sT$|#f}3Ly??-i1LDSQV1u)^2KmPzkgRYh{!N!So{|M{ zg7A1Vt!>QLL_&H)a1v~lntUKgLcczlQ`MUuskg8+X|8(C?1BJ$24Iec{{TT0p}spE z{{#XDCzB=c^FO2$7Z5wXoeKj9RZbGF&2#+)G8}kOW-x9HlF+$E^lrgK55jtrpNL3n z-eAaW1a?Paf${}lLSR+;>RnUz8zAWdzjxW&f<%LJKh!y)5ipPF*F2dE4ttPv))Mzx z{Xzs{lLLcg??w2hl8fmCgcdNOEsGpKj`mHdl9147(Rlqj0ipfJocJPVRh%U9n=Ijev4l3<@0snB_k1IY~nvC;O|{V4*mmYw;! zn2-eOg=G12-sUACKFl#~ak)8&cnKoO-pz50p98T3evu1}1eDPs_SfAU<^&K~@kNef zRY}AnEF7eg!@x(LK7 z@bm9hkfeav8FMigemj8hn?PiP7Y>L(Rg#-8`a}Tn?6u4BPY8_uO0l8!xR-=Dll+`j zI{#1v0IlS|TB=Vd2obB$dFF6S zPYfU|DB*gRwlzSsGcP$ifDmaCm0CO`0+q?3TRb~}Q0OJ=bk+l^{z%>ayP2KyjR;IU)%5n|=1yo=)#bqOP z;;_E-w(1H^ld@YWY%!O=(f^?a8H%W$~dPKu!` zTziaw5NH!~m^xd4x2=TOC(1eh2RbLYi_6~CP94z!^MwNrS8blBZs({4B&@c5R8~yW z;b5ZCt6|5PE8ZJGc>Re`mG!G55UzTeb6Vkoc9|12n4HY?e}g2b0ijRpPVYn#UXmiQ zO1+|z5ZfmeVl$TyY=DrJ*xJ&)K7jB`pCm5mUK2ogR)U1J-TMMa!dkL-Zx0~c2qfWu z>Wr9diKK3}^)m ztCWCPta5gklYofn>2?vT8?HxIOZ~!s#W%0uS1p7-EnkDaWAbEHK zIi5YIW%o8fI4bs>mi;OM;b-a0i7#{HutbKLKQ}?0%OXjPMDNZ;Ao9E2>58v=P*yIq z4aV7eGKGQzgKVFI?)L;qV6};{U)dRf&}nae-AZ+e!mhyw+bh2ET`CFr4?KcB@sVQ# z2-OYJxV6gBg2<=w+Acept6XivftY<-u|S7|LZM1xhaoE+6(BZ6T5#>fT=`iq32{C7 zdD(Q;>k7)68AzkeDwpD-Duv=YbFZsc1xYpBU9lbY>h1^x>tpWS>Jn+Rz!Ct&?sKkwpAIL7m%U%G#Kq&_{b4THC79Lf zTLgrLB-@p(zT6K7&zX=obGfTa$>j$adt#r~Xsm(+9u0*ge(Kg!01$XlvcZ#XcXP@> zxSeEf%W92%GY}XN6SU@&K@##{5^AqeM-55x3%mAkOD>Hhq1V$KuG=LyNJ^o=ED~F- z$|)cu9QJUlF7d+&h{P(Ze;I;F0E^rGD=+5}izZ=>%<1{nlu zpBS+*0AhqtdX|Eu#{_|- zwFK#Uf9Mcui{_c2nHtv0&nJV>Gq)wJtoFmf_%Q0bTYWGBb~sn!?>5YKl7th3y|7b~ z4cB-Oj6M4uuBAUyP{taONNlz8PtZ9y4WbC^3z;NoNkjfaB&~PFs|*BtK~~)PULwG1$spm{icfh-I0bNd z=EPS!ZGj|!*iV$MefdN-*}~$1Q2`FXX8T(k^q^#C${mC2*j?BZhSHVF@kZU<9!ihO6iL9iEVtQ zAqfRq+vRU^R3#)Kq`~ixnj5@oqn9Lc4861Jwvs^<)#UK*?Z}OH-%Gl)h%F?Ba zf+RRtl5>v!xd#c=PNKunUW3u`fJQ)DgGW2xf^8Afh#1z&ps~Sfxga4V#83NDgM647d5$0R%aO6I9@6siX z!*C#Wd*>XrPa=s@OSAUqoCL(|_yRrSkd)PN%{iSjJV-)ygRIl$k2FBim1RoWJS%{t zZf@sN=Q!RMO8~|oPol%k>dFvc3A1mXr~1n}9HKB73>&oODG$P#V~<3weSZW3-C=&+ zmU#`394QmD`E3f4LQ$1a0r|Ho2-F$cRqUuI?}!d$2mrCm`)mDT3W!}=So^opaI6b% z<9z`nM$BH!Exi$_OeVhBC0sIbr~yg1wp<(}p#(yLtSyc&5J~cqpKUOfek_v2h>p4j zS(zmE9>&%oUJ{H7BZNw86oe!Yw!k#eX-Qofri~l4#FfqWmJZ*B*JEr+TW@v4fy@IP zPs0DLE(t{42B{6}t=;-vm4vg4FvJeBw)`Z3P~{}dmLZ>IO1?RlcgP`UW60{Fd zML-yapI;?0-8R1jh}Q6{P`2K!`GXE86-B1OoBIb43=1*#g>D6b=s`eZE5bGnXtJAE zuq&o5nE0h2Nz9ln>74gPAl9+JU2TChB!P~$Th%-d zmk85iuXwjQpbUf?Q|1zEf6NaDN%)a16VzQ)5K07aE%w)KyfA?9>n&t4w>ZiK|5_Xw z1Np@i5bIaj{<$z5YPiG!ZA(1}2be{Nn;jd1msRj}?AeI zHy3l8|2mO`q|yX!zey*K7F_x~^?D7pF`WDsfxRu;U5y%F2f6^OX%2VJem@*&ZDv?^ zd_Y0s%Gk>qJ2bKfEhw~!{dGIClmsV9o|V8QL~~ACGiu3!V94z=zB!9n6~w~Yo%hPb z1tbbZ0>9363KCPtZideOmk7jO;@mMg0m>k$aoK26ZhEW&WZ35RTg z>RH5^f#hIelar#al#?2Kn1UQODOMHm7SqJyl{xmjWg z5VAyEII;Q6bF(Zz1A+P_%DK}ICpV3m1KK)QmjHJ;aeVD1*#0385?Y0so~t5IkvO1L znJdG@u_d_FYj5ywzrpg`ql_=uk%dMg2_S5Zxh*?&F_8xgmt`;J`Ycuf1jT-d4^2sX zbc3@Lw@m!qt|4v-gwMs2+lOxNm;gea$cDSqgJ4wfi%#a8wq=1ICN3JqE_!u76eJ;l zA{o%$g$lyWbtws(!|ljeZ43t@7J;Anb@kt!uaX3Wrb0_n{dZFvAk-4fh_(L2wGoH~ zS-Z37Fxy^v$7EbjaB#06s%$midwTj6 z^JRNmgCxB8LN<8!4FQA~^9aq{^YsP@nogGBz6ituntAFuF>$=|K<2b2i+ZxJ!_PpH zTG_U@>u^%%WaP^nzfM`jl;l!9ohk{*B#3nT>vsHO1Y#`~d(I0WFk<$}O!sB^9llOp zq+-5h*EA;yRvd8wS*LT?2ax;>ig{G(5B?|ukz#25!JkGTL|NAHtv~$k2*d)KeY!2E zz{Av$mco5%4l@u7XnL~p4vZlt4I6FSr=BdGR1mwN)^oW|TqNNwDeLRf9WN+IYKrVq z)o$H8!BW1luO)q!%L)OOr;oL?BkVX}%)@u|E4{n5hZU1PF=Bdp{nvq^ zK?9$yoZGL{;Q)eq+GqTLb7dG#*i%B8G<{hm313hli`i2NApE?n-7T0l-5i1N^Ka&2 z4u7rzk}TH*4RfXkm$8v6qDj*PP5hro5_@-T{|NyEE`$uX-^IT2Ye+&3+SWI@-Alq^ zvLfI9*$P5fgkM3l58VM5KcQrSE?su#ruWJq3BSxpPQgBB3!EhOql>33jU=%rv)644 zAP@n>ZJg&~IvEbX+d--%`%jM~u^%L$J_;U#dob`2**b9>fyxyVF2IU5O38uT~P$Xfc2*G&Z zc$gC|oDdFU)N2t4OozGR9ge4y;i_;OwjH0~95q0Hr5JjFw{!wxzsz{x!#XE<3y%G6 z_XXegAQ_HC_6Hp~jFcOn#-0J#-w`B1&q;x{=_d`4P}b%;?cJats1tZP+imQ1;To=; z0mK~j-nEe=*0prO+aeI`uQ{h3*F+%3KV2}h0fINTzizOL4l!|9I(Eh9;Mrc1w9^s8 zI>T=;B*}>~C%(;ZFhC;91T|mX7*6HY*f?VFl-j-bDM1_qw8t zfNHpmYAfdfHHR4pbuM#G2Y%`%E+7OW_KFYudIVzcj2^hxgEA(-{<_^?i$JWa@}MKp z@pai6*h_HgTUCD`E?g&l|U?}fDo;ipo{tiNmwP4Z5;fH2f_RN4X!SZ%ii2A3B;&Nk+eG)E`gYJ#s_Wt;bvX9XPW3J=Z?}``!OFCTSbg zJpGF0SG?TTzkk!fegiJMtlzfkp?TMAU--p-8`>w%d;1I3Bd^?f5tY=JMoh<$4|NAE5F$D;)%;o+x3aLe{A{q^{)*$Htv~A zZ@lgNxwl@@_SEU`|K;6n_YeK-BkObD+jQ%TS3LLX%hSH|vwxU)bL$y*ZM}2wFOF>Z W@y$ - gen_tcp:connect(Host, Data#eldap.port, Opts, Data#eldap.timeout); - do_connect(Host, Data, Opts) when Data#eldap.use_tls == true -> -- Vsn = erlang:system_info(version), -- if Vsn >= "5.3" -> -- %% In R9C, but not in R9B -- {_,_,X} = erlang:now(), -- ssl:seed("bkrlnateqqo" ++ integer_to_list(X)); -- true -> true -- end, - ssl:connect(Host, Data#eldap.port, [{verify,0}|Opts]). - - diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/hash.mk b/rabbitmq-server/plugins-src/eldap-wrapper/hash.mk deleted file mode 100644 index 262b7cc..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/hash.mk +++ /dev/null @@ -1 +0,0 @@ -UPSTREAM_SHORT_HASH:=e309de4 diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/license_info b/rabbitmq-server/plugins-src/eldap-wrapper/license_info deleted file mode 100644 index 0a0e13c..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/license_info +++ /dev/null @@ -1,3 +0,0 @@ -Eldap is "Copyright (c) 2010, Torbjorn Tornkvist" and is covered by -the MIT license. It was downloaded from https://github.com/etnt/eldap - diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/package.mk b/rabbitmq-server/plugins-src/eldap-wrapper/package.mk deleted file mode 100644 index 02c8b4e..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/package.mk +++ /dev/null @@ -1,30 +0,0 @@ -APP_NAME:=eldap - -UPSTREAM_GIT:=https://github.com/rabbitmq/eldap.git -UPSTREAM_REVISION:=e309de4db4b78d67d623 -WRAPPER_PATCHES:=eldap-appify.patch remove-eldap-fsm.patch eldap-no-ssl-seed.patch remove-ietf-doc.patch - -ORIGINAL_APP_FILE:=$(CLONE_DIR)/ebin/$(APP_NAME).app -DO_NOT_GENERATE_APP_FILE=true - -GENERATED_DIR:=$(CLONE_DIR)/generated -PACKAGE_ERLC_OPTS+=-I $(GENERATED_DIR) -INCLUDE_HRLS+=$(GENERATED_DIR)/ELDAPv3.hrl -EBIN_BEAMS+=$(GENERATED_DIR)/ELDAPv3.beam - -define package_rules - -$(CLONE_DIR)/src/ELDAPv3.asn: $(CLONE_DIR)/.done - -$(GENERATED_DIR)/ELDAPv3.hrl $(GENERATED_DIR)/ELDAPv3.beam: $(CLONE_DIR)/src/ELDAPv3.asn - @mkdir -p $(GENERATED_DIR) - $(ERLC) $(PACKAGE_ERLC_OPTS) -o $(GENERATED_DIR) $$< - -$(PACKAGE_DIR)+clean:: - rm -rf $(GENERATED_DIR) $(EBIN_DIR) - -# This rule is run *before* the one in do_package.mk -$(PLUGINS_SRC_DIST_DIR)/$(PACKAGE_DIR)/.srcdist_done:: - cp $(CLONE_DIR)/LICENSE $(PACKAGE_DIR)/LICENSE-MIT-eldap - -endef diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/remove-eldap-fsm.patch b/rabbitmq-server/plugins-src/eldap-wrapper/remove-eldap-fsm.patch deleted file mode 100644 index f6b05f6..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/remove-eldap-fsm.patch +++ /dev/null @@ -1,952 +0,0 @@ -diff --git a/src/eldap_fsm.erl b/src/eldap_fsm.erl -deleted file mode 100644 -index 381ce69..0000000 ---- a/src/eldap_fsm.erl -+++ /dev/null -@@ -1,946 +0,0 @@ ---module(eldap_fsm). --%%% -------------------------------------------------------------------- --%%% Created: 12 Oct 2000 by Tobbe --%%% Function: Erlang client LDAP implementation according RFC 2251. --%%% The interface is based on RFC 1823, and --%%% draft-ietf-asid-ldap-c-api-00.txt --%%% --%%% Copyright (C) 2000 Torbjörn Törnkvist --%%% Copyright (c) 2010 Torbjorn Tornkvist --%%% See MIT-LICENSE at the top dir for licensing information. --%%% --%%% Modified by Sean Hinde 7th Dec 2000 --%%% Turned into gen_fsm, made non-blocking, added timers etc to support this. --%%% Now has the concept of a name (string() or atom()) per instance which allows --%%% multiple users to call by name if so desired. --%%% --%%% Can be configured with start_link parameters or use a config file to get --%%% host to connect to, dn, password, log function etc. --%%% -------------------------------------------------------------------- -- -- --%%%---------------------------------------------------------------------- --%%% LDAP Client state machine. --%%% Possible states are: --%%% connecting - actually disconnected, but retrying periodically --%%% wait_bind_response - connected and sent bind request --%%% active - bound to LDAP Server and ready to handle commands --%%%---------------------------------------------------------------------- -- --%%-compile(export_all). --%%-export([Function/Arity, ...]). -- ---behaviour(gen_fsm). -- --%% External exports ---export([start_link/1, start_link/5, start_link/6]). -- ---export([baseObject/0,singleLevel/0,wholeSubtree/0,close/1, -- equalityMatch/2,greaterOrEqual/2,lessOrEqual/2, -- approxMatch/2,search/2,substrings/2,present/1, -- 'and'/1,'or'/1,'not'/1,modify/3, mod_add/2, mod_delete/2, -- mod_replace/2, add/3, delete/2, modify_dn/5]). ---export([debug_level/2, get_status/1]). -- --%% gen_fsm callbacks ---export([init/1, connecting/2, -- connecting/3, wait_bind_response/3, active/3, handle_event/3, -- handle_sync_event/4, handle_info/3, terminate/3, code_change/4]). -- -- ---import(lists,[concat/1]). -- ---include("ELDAPv3.hrl"). ---include("eldap.hrl"). -- ---define(LDAP_VERSION, 3). ---define(RETRY_TIMEOUT, 5000). ---define(BIND_TIMEOUT, 10000). ---define(CMD_TIMEOUT, 5000). ---define(MAX_TRANSACTION_ID, 65535). ---define(MIN_TRANSACTION_ID, 0). -- ---record(eldap, {version = ?LDAP_VERSION, -- hosts, % Possible hosts running LDAP servers -- host = null, % Connected Host LDAP server -- port = 389 , % The LDAP server port -- fd = null, % Socket filedescriptor. -- rootdn = "", % Name of the entry to bind as -- passwd, % Password for (above) entry -- id = 0, % LDAP Request ID -- log, % User provided log function -- bind_timer, % Ref to bind timeout -- dict, % dict holding operation params and results -- debug_level % Integer debug/logging level -- }). -- --%%%---------------------------------------------------------------------- --%%% API --%%%---------------------------------------------------------------------- --start_link(Name) -> -- Reg_name = list_to_atom("eldap_" ++ Name), -- gen_fsm:start_link({local, Reg_name}, ?MODULE, [], []). -- --start_link(Name, Hosts, Port, Rootdn, Passwd) -> -- Log = fun(N, Fmt, Args) -> io:format("---- " ++ Fmt, [Args]) end, -- Reg_name = list_to_atom("eldap_" ++ Name), -- gen_fsm:start_link({local, Reg_name}, ?MODULE, {Hosts, Port, Rootdn, Passwd, Log}, []). -- --start_link(Name, Hosts, Port, Rootdn, Passwd, Log) -> -- Reg_name = list_to_atom("eldap_" ++ Name), -- gen_fsm:start_link({local, Reg_name}, ?MODULE, {Hosts, Port, Rootdn, Passwd, Log}, []). -- --%%% -------------------------------------------------------------------- --%%% Set Debug Level. 0 - none, 1 - errors, 2 - ldap events --%%% -------------------------------------------------------------------- --debug_level(Handle, N) when integer(N) -> -- Handle1 = get_handle(Handle), -- gen_fsm:sync_send_all_state_event(Handle1, {debug_level,N}). -- --%%% -------------------------------------------------------------------- --%%% Get status of connection. --%%% -------------------------------------------------------------------- --get_status(Handle) -> -- Handle1 = get_handle(Handle), -- gen_fsm:sync_send_all_state_event(Handle1, get_status). -- --%%% -------------------------------------------------------------------- --%%% Shutdown connection (and process) asynchronous. --%%% -------------------------------------------------------------------- --close(Handle) -> -- Handle1 = get_handle(Handle), -- gen_fsm:send_all_state_event(Handle1, close). -- --%%% -------------------------------------------------------------------- --%%% Add an entry. The entry field MUST NOT exist for the AddRequest --%%% to succeed. The parent of the entry MUST exist. --%%% Example: --%%% --%%% add(Handle, --%%% "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com", --%%% [{"objectclass", ["person"]}, --%%% {"cn", ["Bill Valentine"]}, --%%% {"sn", ["Valentine"]}, --%%% {"telephoneNumber", ["545 555 00"]}] --%%% ) --%%% -------------------------------------------------------------------- --add(Handle, Entry, Attributes) when list(Entry),list(Attributes) -> -- Handle1 = get_handle(Handle), -- gen_fsm:sync_send_event(Handle1, {add, Entry, add_attrs(Attributes)}). -- --%%% Do sanity check ! --add_attrs(Attrs) -> -- F = fun({Type,Vals}) when list(Type),list(Vals) -> -- %% Confused ? Me too... :-/ -- {'AddRequest_attributes',Type, Vals} -- end, -- case catch lists:map(F, Attrs) of -- {'EXIT', _} -> throw({error, attribute_values}); -- Else -> Else -- end. -- -- --%%% -------------------------------------------------------------------- --%%% Delete an entry. The entry consists of the DN of --%%% the entry to be deleted. --%%% Example: --%%% --%%% delete(Handle, --%%% "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com" --%%% ) --%%% -------------------------------------------------------------------- --delete(Handle, Entry) when list(Entry) -> -- Handle1 = get_handle(Handle), -- gen_fsm:sync_send_event(Handle1, {delete, Entry}). -- --%%% -------------------------------------------------------------------- --%%% Modify an entry. Given an entry a number of modification --%%% operations can be performed as one atomic operation. --%%% Example: --%%% --%%% modify(Handle, --%%% "cn=Torbjorn Tornkvist, ou=people, o=Bluetail AB, dc=bluetail, dc=com", --%%% [replace("telephoneNumber", ["555 555 00"]), --%%% add("description", ["LDAP hacker"])] --%%% ) --%%% -------------------------------------------------------------------- --modify(Handle, Object, Mods) when list(Object), list(Mods) -> -- Handle1 = get_handle(Handle), -- gen_fsm:sync_send_event(Handle1, {modify, Object, Mods}). -- --%%% --%%% Modification operations. --%%% Example: --%%% replace("telephoneNumber", ["555 555 00"]) --%%% --mod_add(Type, Values) when list(Type), list(Values) -> m(add, Type, Values). --mod_delete(Type, Values) when list(Type), list(Values) -> m(delete, Type, Values). --mod_replace(Type, Values) when list(Type), list(Values) -> m(replace, Type, Values). -- --m(Operation, Type, Values) -> -- #'ModifyRequest_modification_SEQOF'{ -- operation = Operation, -- modification = #'AttributeTypeAndValues'{ -- type = Type, -- vals = Values}}. -- --%%% -------------------------------------------------------------------- --%%% Modify an entry. Given an entry a number of modification --%%% operations can be performed as one atomic operation. --%%% Example: --%%% --%%% modify_dn(Handle, --%%% "cn=Bill Valentine, ou=people, o=Bluetail AB, dc=bluetail, dc=com", --%%% "cn=Ben Emerson", --%%% true, --%%% "" --%%% ) --%%% -------------------------------------------------------------------- --modify_dn(Handle, Entry, NewRDN, DelOldRDN, NewSup) -- when list(Entry),list(NewRDN),atom(DelOldRDN),list(NewSup) -> -- Handle1 = get_handle(Handle), -- gen_fsm:sync_send_event(Handle1, {modify_dn, Entry, NewRDN, bool_p(DelOldRDN), optional(NewSup)}). -- --%%% Sanity checks ! -- --bool_p(Bool) when Bool==true;Bool==false -> Bool. -- --optional([]) -> asn1_NOVALUE; --optional(Value) -> Value. -- --%%% -------------------------------------------------------------------- --%%% Synchronous search of the Directory returning a --%%% requested set of attributes. --%%% --%%% Example: --%%% --%%% Filter = eldap:substrings("sn", [{any,"o"}]), --%%% eldap:search(S, [{base, "dc=bluetail, dc=com"}, --%%% {filter, Filter}, --%%% {attributes,["cn"]}])), --%%% --%%% Returned result: {ok, #eldap_search_result{}} --%%% --%%% Example: --%%% --%%% {ok,{eldap_search_result, --%%% [{eldap_entry, --%%% "cn=Magnus Froberg, dc=bluetail, dc=com", --%%% [{"cn",["Magnus Froberg"]}]}, --%%% {eldap_entry, --%%% "cn=Torbjorn Tornkvist, dc=bluetail, dc=com", --%%% [{"cn",["Torbjorn Tornkvist"]}]}], --%%% []}} --%%% --%%% -------------------------------------------------------------------- --search(Handle, A) when record(A, eldap_search) -> -- call_search(Handle, A); --search(Handle, L) when list(Handle), list(L) -> -- case catch parse_search_args(L) of -- {error, Emsg} -> {error, Emsg}; -- {'EXIT', Emsg} -> {error, Emsg}; -- A when record(A, eldap_search) -> call_search(Handle, A) -- end. -- --call_search(Handle, A) -> -- Handle1 = get_handle(Handle), -- gen_fsm:sync_send_event(Handle1, {search, A}). -- --parse_search_args(Args) -> -- parse_search_args(Args, #eldap_search{scope = wholeSubtree}). -- --parse_search_args([{base, Base}|T],A) -> -- parse_search_args(T,A#eldap_search{base = Base}); --parse_search_args([{filter, Filter}|T],A) -> -- parse_search_args(T,A#eldap_search{filter = Filter}); --parse_search_args([{scope, Scope}|T],A) -> -- parse_search_args(T,A#eldap_search{scope = Scope}); --parse_search_args([{attributes, Attrs}|T],A) -> -- parse_search_args(T,A#eldap_search{attributes = Attrs}); --parse_search_args([{types_only, TypesOnly}|T],A) -> -- parse_search_args(T,A#eldap_search{types_only = TypesOnly}); --parse_search_args([{timeout, Timeout}|T],A) when integer(Timeout) -> -- parse_search_args(T,A#eldap_search{timeout = Timeout}); --parse_search_args([H|T],A) -> -- throw({error,{unknown_arg, H}}); --parse_search_args([],A) -> -- A. -- --%%% --%%% The Scope parameter --%%% --baseObject() -> baseObject. --singleLevel() -> singleLevel. --wholeSubtree() -> wholeSubtree. -- --%%% --%%% Boolean filter operations --%%% --'and'(ListOfFilters) when list(ListOfFilters) -> {'and',ListOfFilters}. --'or'(ListOfFilters) when list(ListOfFilters) -> {'or', ListOfFilters}. --'not'(Filter) when tuple(Filter) -> {'not',Filter}. -- --%%% --%%% The following Filter parameters consist of an attribute --%%% and an attribute value. Example: F("uid","tobbe") --%%% --equalityMatch(Desc, Value) -> {equalityMatch, av_assert(Desc, Value)}. --greaterOrEqual(Desc, Value) -> {greaterOrEqual, av_assert(Desc, Value)}. --lessOrEqual(Desc, Value) -> {lessOrEqual, av_assert(Desc, Value)}. --approxMatch(Desc, Value) -> {approxMatch, av_assert(Desc, Value)}. -- --av_assert(Desc, Value) -> -- #'AttributeValueAssertion'{attributeDesc = Desc, -- assertionValue = Value}. -- --%%% --%%% Filter to check for the presence of an attribute --%%% --present(Attribute) when list(Attribute) -> -- {present, Attribute}. -- -- --%%% --%%% A substring filter seem to be based on a pattern: --%%% --%%% InitValue*AnyValue*FinalValue --%%% --%%% where all three parts seem to be optional (at least when --%%% talking with an OpenLDAP server). Thus, the arguments --%%% to substrings/2 looks like this: --%%% --%%% Type ::= string( ) --%%% SubStr ::= listof( {initial,Value} | {any,Value}, {final,Value}) --%%% --%%% Example: substrings("sn",[{initial,"To"},{any,"kv"},{final,"st"}]) --%%% will match entries containing: 'sn: Tornkvist' --%%% --substrings(Type, SubStr) when list(Type), list(SubStr) -> -- Ss = {'SubstringFilter_substrings',v_substr(SubStr)}, -- {substrings,#'SubstringFilter'{type = Type, -- substrings = Ss}}. -- -- --get_handle(Pid) when pid(Pid) -> Pid; --get_handle(Atom) when atom(Atom) -> Atom; --get_handle(Name) when list(Name) -> list_to_atom("eldap_" ++ Name). --%%%---------------------------------------------------------------------- --%%% Callback functions from gen_fsm --%%%---------------------------------------------------------------------- -- --%%---------------------------------------------------------------------- --%% Func: init/1 --%% Returns: {ok, StateName, StateData} | --%% {ok, StateName, StateData, Timeout} | --%% ignore | --%% {stop, StopReason} --%% I use the trick of setting a timeout of 0 to pass control into the --%% process. --%%---------------------------------------------------------------------- --init([]) -> -- case get_config() of -- {ok, Hosts, Rootdn, Passwd, Log} -> -- init({Hosts, Rootdn, Passwd, Log}); -- {error, Reason} -> -- {stop, Reason} -- end; --init({Hosts, Port, Rootdn, Passwd, Log}) -> -- {ok, connecting, #eldap{hosts = Hosts, -- port = Port, -- rootdn = Rootdn, -- passwd = Passwd, -- id = 0, -- log = Log, -- dict = dict:new(), -- debug_level = 0}, 0}. -- --%%---------------------------------------------------------------------- --%% Func: StateName/2 --%% Called when gen_fsm:send_event/2,3 is invoked (async) --%% Returns: {next_state, NextStateName, NextStateData} | --%% {next_state, NextStateName, NextStateData, Timeout} | --%% {stop, Reason, NewStateData} --%%---------------------------------------------------------------------- --connecting(timeout, S) -> -- {ok, NextState, NewS} = connect_bind(S), -- {next_state, NextState, NewS}. -- --%%---------------------------------------------------------------------- --%% Func: StateName/3 --%% Called when gen_fsm:sync_send_event/2,3 is invoked. --%% Returns: {next_state, NextStateName, NextStateData} | --%% {next_state, NextStateName, NextStateData, Timeout} | --%% {reply, Reply, NextStateName, NextStateData} | --%% {reply, Reply, NextStateName, NextStateData, Timeout} | --%% {stop, Reason, NewStateData} | --%% {stop, Reason, Reply, NewStateData} --%%---------------------------------------------------------------------- --connecting(Event, From, S) -> -- Reply = {error, connecting}, -- {reply, Reply, connecting, S}. -- --wait_bind_response(Event, From, S) -> -- Reply = {error, wait_bind_response}, -- {reply, Reply, wait_bind_response, S}. -- --active(Event, From, S) -> -- case catch send_command(Event, From, S) of -- {ok, NewS} -> -- {next_state, active, NewS}; -- {error, Reason} -> -- {reply, {error, Reason}, active, S}; -- {'EXIT', Reason} -> -- {reply, {error, Reason}, active, S} -- end. -- --%%---------------------------------------------------------------------- --%% Func: handle_event/3 --%% Called when gen_fsm:send_all_state_event/2 is invoked. --%% Returns: {next_state, NextStateName, NextStateData} | --%% {next_state, NextStateName, NextStateData, Timeout} | --%% {stop, Reason, NewStateData} --%%---------------------------------------------------------------------- --handle_event(close, StateName, S) -> -- gen_tcp:close(S#eldap.fd), -- {stop, closed, S}; -- --handle_event(Event, StateName, S) -> -- {next_state, StateName, S}. -- --%%---------------------------------------------------------------------- --%% Func: handle_sync_event/4 --%% Called when gen_fsm:sync_send_all_state_event/2,3 is invoked --%% Returns: {next_state, NextStateName, NextStateData} | --%% {next_state, NextStateName, NextStateData, Timeout} | --%% {reply, Reply, NextStateName, NextStateData} | --%% {reply, Reply, NextStateName, NextStateData, Timeout} | --%% {stop, Reason, NewStateData} | --%% {stop, Reason, Reply, NewStateData} --%%---------------------------------------------------------------------- --handle_sync_event({debug_level, N}, From, StateName, S) -> -- {reply, ok, StateName, S#eldap{debug_level = N}}; -- --handle_sync_event(Event, From, StateName, S) -> -- {reply, {StateName, S}, StateName, S}; -- --handle_sync_event(Event, From, StateName, S) -> -- Reply = ok, -- {reply, Reply, StateName, S}. -- --%%---------------------------------------------------------------------- --%% Func: handle_info/3 --%% Returns: {next_state, NextStateName, NextStateData} | --%% {next_state, NextStateName, NextStateData, Timeout} | --%% {stop, Reason, NewStateData} --%%---------------------------------------------------------------------- -- --%% --%% Packets arriving in various states --%% --handle_info({tcp, Socket, Data}, connecting, S) -> -- log1("eldap. tcp packet received when disconnected!~n~p~n", [Data], S), -- {next_state, connecting, S}; -- --handle_info({tcp, Socket, Data}, wait_bind_response, S) -> -- cancel_timer(S#eldap.bind_timer), -- case catch recvd_wait_bind_response(Data, S) of -- bound -> {next_state, active, S}; -- {fail_bind, Reason} -> close_and_retry(S), -- {next_state, connecting, S#eldap{fd = null}}; -- {'EXIT', Reason} -> close_and_retry(S), -- {next_state, connecting, S#eldap{fd = null}}; -- {error, Reason} -> close_and_retry(S), -- {next_state, connecting, S#eldap{fd = null}} -- end; -- --handle_info({tcp, Socket, Data}, active, S) -> -- case catch recvd_packet(Data, S) of -- {reply, Reply, To, NewS} -> gen_fsm:reply(To, Reply), -- {next_state, active, NewS}; -- {ok, NewS} -> {next_state, active, NewS}; -- {'EXIT', Reason} -> {next_state, active, S}; -- {error, Reason} -> {next_state, active, S} -- end; -- --handle_info({tcp_closed, Socket}, All_fsm_states, S) -> -- F = fun(Id, [{Timer, From, Name}|Res]) -> -- gen_fsm:reply(From, {error, tcp_closed}), -- cancel_timer(Timer) -- end, -- dict:map(F, S#eldap.dict), -- retry_connect(), -- {next_state, connecting, S#eldap{fd = null, -- dict = dict:new()}}; -- --handle_info({tcp_error, Socket, Reason}, Fsm_state, S) -> -- log1("eldap received tcp_error: ~p~nIn State: ~p~n", [Reason, Fsm_state], S), -- {next_state, Fsm_state, S}; --%% --%% Timers --%% --handle_info({timeout, Timer, {cmd_timeout, Id}}, active, S) -> -- case cmd_timeout(Timer, Id, S) of -- {reply, To, Reason, NewS} -> gen_fsm:reply(To, Reason), -- {next_state, active, NewS}; -- {error, Reason} -> {next_state, active, S} -- end; -- --handle_info({timeout, retry_connect}, connecting, S) -> -- {ok, NextState, NewS} = connect_bind(S), -- {next_state, NextState, NewS}; -- --handle_info({timeout, Timer, bind_timeout}, wait_bind_response, S) -> -- close_and_retry(S), -- {next_state, connecting, S#eldap{fd = null}}; -- --%% --%% Make sure we don't fill the message queue with rubbish --%% --handle_info(Info, StateName, S) -> -- log1("eldap. Unexpected Info: ~p~nIn state: ~p~n when StateData is: ~p~n", -- [Info, StateName, S], S), -- {next_state, StateName, S}. -- --%%---------------------------------------------------------------------- --%% Func: terminate/3 --%% Purpose: Shutdown the fsm --%% Returns: any --%%---------------------------------------------------------------------- --terminate(Reason, StateName, StatData) -> -- ok. -- --%%---------------------------------------------------------------------- --%% Func: code_change/4 --%% Purpose: Convert process state when code is changed --%% Returns: {ok, NewState, NewStateData} --%%---------------------------------------------------------------------- --code_change(OldVsn, StateName, S, Extra) -> -- {ok, StateName, S}. -- --%%%---------------------------------------------------------------------- --%%% Internal functions --%%%---------------------------------------------------------------------- --send_command(Command, From, S) -> -- Id = bump_id(S), -- {Name, Request} = gen_req(Command), -- Message = #'LDAPMessage'{messageID = Id, -- protocolOp = {Name, Request}}, -- log2("~p~n",[{Name, Request}], S), -- {ok, Bytes} = asn1rt:encode('ELDAPv3', 'LDAPMessage', Message), -- ok = gen_tcp:send(S#eldap.fd, Bytes), -- Timer = erlang:start_timer(?CMD_TIMEOUT, self(), {cmd_timeout, Id}), -- New_dict = dict:store(Id, [{Timer, From, Name}], S#eldap.dict), -- {ok, S#eldap{id = Id, -- dict = New_dict}}. -- --gen_req({search, A}) -> -- {searchRequest, -- #'SearchRequest'{baseObject = A#eldap_search.base, -- scope = v_scope(A#eldap_search.scope), -- derefAliases = neverDerefAliases, -- sizeLimit = 0, % no size limit -- timeLimit = v_timeout(A#eldap_search.timeout), -- typesOnly = v_bool(A#eldap_search.types_only), -- filter = v_filter(A#eldap_search.filter), -- attributes = v_attributes(A#eldap_search.attributes) -- }}; --gen_req({add, Entry, Attrs}) -> -- {addRequest, -- #'AddRequest'{entry = Entry, -- attributes = Attrs}}; --gen_req({delete, Entry}) -> -- {delRequest, Entry}; --gen_req({modify, Obj, Mod}) -> -- v_modifications(Mod), -- {modifyRequest, -- #'ModifyRequest'{object = Obj, -- modification = Mod}}; --gen_req({modify_dn, Entry, NewRDN, DelOldRDN, NewSup}) -> -- {modDNRequest, -- #'ModifyDNRequest'{entry = Entry, -- newrdn = NewRDN, -- deleteoldrdn = DelOldRDN, -- newSuperior = NewSup}}. -- --%%----------------------------------------------------------------------- --%% recvd_packet --%% Deals with incoming packets in the active state --%% Will return one of: --%% {ok, NewS} - Don't reply to client yet as this is part of a search --%% result and we haven't got all the answers yet. --%% {reply, Result, From, NewS} - Reply with result to client From --%% {error, Reason} --%% {'EXIT', Reason} - Broke --%%----------------------------------------------------------------------- --recvd_packet(Pkt, S) -> -- check_tag(Pkt), -- case asn1rt:decode('ELDAPv3', 'LDAPMessage', Pkt) of -- {ok,Msg} -> -- Op = Msg#'LDAPMessage'.protocolOp, -- log2("~p~n",[Op], S), -- Dict = S#eldap.dict, -- Id = Msg#'LDAPMessage'.messageID, -- {Timer, From, Name, Result_so_far} = get_op_rec(Id, Dict), -- case {Name, Op} of -- {searchRequest, {searchResEntry, R}} when -- record(R,'SearchResultEntry') -> -- New_dict = dict:append(Id, R, Dict), -- {ok, S#eldap{dict = New_dict}}; -- {searchRequest, {searchResDone, Result}} -> -- case Result#'LDAPResult'.resultCode of -- success -> -- {Res, Ref} = polish(Result_so_far), -- New_dict = dict:erase(Id, Dict), -- cancel_timer(Timer), -- {reply, #eldap_search_result{entries = Res, -- referrals = Ref}, From, -- S#eldap{dict = New_dict}}; -- Reason -> -- New_dict = dict:erase(Id, Dict), -- cancel_timer(Timer), -- {reply, {error, Reason}, From, S#eldap{dict = New_dict}} -- end; -- {searchRequest, {searchResRef, R}} -> -- New_dict = dict:append(Id, R, Dict), -- {ok, S#eldap{dict = New_dict}}; -- {addRequest, {addResponse, Result}} -> -- New_dict = dict:erase(Id, Dict), -- cancel_timer(Timer), -- Reply = check_reply(Result, From), -- {reply, Reply, From, S#eldap{dict = New_dict}}; -- {delRequest, {delResponse, Result}} -> -- New_dict = dict:erase(Id, Dict), -- cancel_timer(Timer), -- Reply = check_reply(Result, From), -- {reply, Reply, From, S#eldap{dict = New_dict}}; -- {modifyRequest, {modifyResponse, Result}} -> -- New_dict = dict:erase(Id, Dict), -- cancel_timer(Timer), -- Reply = check_reply(Result, From), -- {reply, Reply, From, S#eldap{dict = New_dict}}; -- {modDNRequest, {modDNResponse, Result}} -> -- New_dict = dict:erase(Id, Dict), -- cancel_timer(Timer), -- Reply = check_reply(Result, From), -- {reply, Reply, From, S#eldap{dict = New_dict}}; -- {OtherName, OtherResult} -> -- New_dict = dict:erase(Id, Dict), -- cancel_timer(Timer), -- {reply, {error, {invalid_result, OtherName, OtherResult}}, -- From, S#eldap{dict = New_dict}} -- end; -- Error -> Error -- end. -- --check_reply(#'LDAPResult'{resultCode = success}, From) -> -- ok; --check_reply(#'LDAPResult'{resultCode = Reason}, From) -> -- {error, Reason}; --check_reply(Other, From) -> -- {error, Other}. -- --get_op_rec(Id, Dict) -> -- case dict:find(Id, Dict) of -- {ok, [{Timer, From, Name}|Res]} -> -- {Timer, From, Name, Res}; -- error -> -- throw({error, unkown_id}) -- end. -- --%%----------------------------------------------------------------------- --%% recvd_wait_bind_response packet --%% Deals with incoming packets in the wait_bind_response state --%% Will return one of: --%% bound - Success - move to active state --%% {fail_bind, Reason} - Failed --%% {error, Reason} --%% {'EXIT', Reason} - Broken packet --%%----------------------------------------------------------------------- --recvd_wait_bind_response(Pkt, S) -> -- check_tag(Pkt), -- case asn1rt:decode('ELDAPv3', 'LDAPMessage', Pkt) of -- {ok,Msg} -> -- log2("~p", [Msg], S), -- check_id(S#eldap.id, Msg#'LDAPMessage'.messageID), -- case Msg#'LDAPMessage'.protocolOp of -- {bindResponse, Result} -> -- case Result#'LDAPResult'.resultCode of -- success -> bound; -- Error -> {fail_bind, Error} -- end -- end; -- Else -> -- {fail_bind, Else} -- end. -- --check_id(Id, Id) -> ok; --check_id(_, _) -> throw({error, wrong_bind_id}). -- --%%----------------------------------------------------------------------- --%% General Helpers --%%----------------------------------------------------------------------- -- --cancel_timer(Timer) -> -- erlang:cancel_timer(Timer), -- receive -- {timeout, Timer, _} -> -- ok -- after 0 -> -- ok -- end. -- -- --%%% Sanity check of received packet --check_tag(Data) -> -- case asn1rt_ber:decode_tag(Data) of -- {Tag, Data1, Rb} -> -- case asn1rt_ber:decode_length(Data1) of -- {{Len,Data2}, Rb2} -> ok; -- _ -> throw({error,decoded_tag_length}) -- end; -- _ -> throw({error,decoded_tag}) -- end. -- --close_and_retry(S) -> -- gen_tcp:close(S#eldap.fd), -- retry_connect(). -- --retry_connect() -> -- erlang:send_after(?RETRY_TIMEOUT, self(), -- {timeout, retry_connect}). -- -- --%%----------------------------------------------------------------------- --%% Sort out timed out commands --%%----------------------------------------------------------------------- --cmd_timeout(Timer, Id, S) -> -- Dict = S#eldap.dict, -- case dict:find(Id, Dict) of -- {ok, [{Id, Timer, From, Name}|Res]} -> -- case Name of -- searchRequest -> -- {Res1, Ref1} = polish(Res), -- New_dict = dict:erase(Id, Dict), -- {reply, From, {timeout, -- #eldap_search_result{entries = Res1, -- referrals = Ref1}}, -- S#eldap{dict = New_dict}}; -- Others -> -- New_dict = dict:erase(Id, Dict), -- {reply, From, {error, timeout}, S#eldap{dict = New_dict}} -- end; -- error -> -- {error, timed_out_cmd_not_in_dict} -- end. -- --%%----------------------------------------------------------------------- --%% Common stuff for results --%%----------------------------------------------------------------------- --%%% --%%% Polish the returned search result --%%% -- --polish(Entries) -> -- polish(Entries, [], []). -- --polish([H|T], Res, Ref) when record(H, 'SearchResultEntry') -> -- ObjectName = H#'SearchResultEntry'.objectName, -- F = fun({_,A,V}) -> {A,V} end, -- Attrs = lists:map(F, H#'SearchResultEntry'.attributes), -- polish(T, [#eldap_entry{object_name = ObjectName, -- attributes = Attrs}|Res], Ref); --polish([H|T], Res, Ref) -> % No special treatment of referrals at the moment. -- polish(T, Res, [H|Ref]); --polish([], Res, Ref) -> -- {Res, Ref}. -- --%%----------------------------------------------------------------------- --%% Connect to next server in list and attempt to bind to it. --%%----------------------------------------------------------------------- --connect_bind(S) -> -- Host = next_host(S#eldap.host, S#eldap.hosts), -- TcpOpts = [{packet, asn1}, {active, true}], -- case gen_tcp:connect(Host, S#eldap.port, TcpOpts) of -- {ok, Socket} -> -- case bind_request(Socket, S) of -- {ok, NewS} -> -- Timer = erlang:start_timer(?BIND_TIMEOUT, self(), -- {timeout, bind_timeout}), -- {ok, wait_bind_response, NewS#eldap{fd = Socket, -- host = Host, -- bind_timer = Timer}}; -- {error, Reason} -> -- gen_tcp:close(Socket), -- erlang:send_after(?RETRY_TIMEOUT, self(), -- {timeout, retry_connect}), -- {ok, connecting, S#eldap{host = Host}} -- end; -- {error, Reason} -> -- erlang:send_after(?RETRY_TIMEOUT, self(), -- {timeout, retry_connect}), -- {ok, connecting, S#eldap{host = Host}} -- end. -- --bind_request(Socket, S) -> -- Id = bump_id(S), -- Req = #'BindRequest'{version = S#eldap.version, -- name = S#eldap.rootdn, -- authentication = {simple, S#eldap.passwd}}, -- Message = #'LDAPMessage'{messageID = Id, -- protocolOp = {bindRequest, Req}}, -- log2("Message:~p~n",[Message], S), -- {ok, Bytes} = asn1rt:encode('ELDAPv3', 'LDAPMessage', Message), -- ok = gen_tcp:send(Socket, Bytes), -- {ok, S#eldap{id = Id}}. -- --%% Given last tried Server, find next one to try --next_host(null, [H|_]) -> H; % First time, take first --next_host(Host, Hosts) -> % Find next in turn -- next_host(Host, Hosts, Hosts). -- --next_host(Host, [Host], Hosts) -> hd(Hosts); % Wrap back to first --next_host(Host, [Host|Tail], Hosts) -> hd(Tail); % Take next --next_host(Host, [], Hosts) -> hd(Hosts); % Never connected before? (shouldn't happen) --next_host(Host, [H|T], Hosts) -> next_host(Host, T, Hosts). -- -- --%%% -------------------------------------------------------------------- --%%% Verify the input data --%%% -------------------------------------------------------------------- -- --v_filter({'and',L}) -> {'and',L}; --v_filter({'or', L}) -> {'or',L}; --v_filter({'not',L}) -> {'not',L}; --v_filter({equalityMatch,AV}) -> {equalityMatch,AV}; --v_filter({greaterOrEqual,AV}) -> {greaterOrEqual,AV}; --v_filter({lessOrEqual,AV}) -> {lessOrEqual,AV}; --v_filter({approxMatch,AV}) -> {approxMatch,AV}; --v_filter({present,A}) -> {present,A}; --v_filter({substrings,S}) when record(S,'SubstringFilter') -> {substrings,S}; --v_filter(_Filter) -> throw({error,concat(["unknown filter: ",_Filter])}). -- --v_modifications(Mods) -> -- F = fun({_,Op,_}) -> -- case lists:member(Op,[add,delete,replace]) of -- true -> true; -- _ -> throw({error,{mod_operation,Op}}) -- end -- end, -- lists:foreach(F, Mods). -- --v_substr([{Key,Str}|T]) when list(Str),Key==initial;Key==any;Key==final -> -- [{Key,Str}|v_substr(T)]; --v_substr([H|T]) -> -- throw({error,{substring_arg,H}}); --v_substr([]) -> -- []. --v_scope(baseObject) -> baseObject; --v_scope(singleLevel) -> singleLevel; --v_scope(wholeSubtree) -> wholeSubtree; --v_scope(_Scope) -> throw({error,concat(["unknown scope: ",_Scope])}). -- --v_bool(true) -> true; --v_bool(false) -> false; --v_bool(_Bool) -> throw({error,concat(["not Boolean: ",_Bool])}). -- --v_timeout(I) when integer(I), I>=0 -> I; --v_timeout(_I) -> throw({error,concat(["timeout not positive integer: ",_I])}). -- --v_attributes(Attrs) -> -- F = fun(A) when list(A) -> A; -- (A) -> throw({error,concat(["attribute not String: ",A])}) -- end, -- lists:map(F,Attrs). -- -- --%%% -------------------------------------------------------------------- --%%% Get and Validate the initial configuration --%%% -------------------------------------------------------------------- --get_config() -> -- Priv_dir = code:priv_dir(eldap), -- File = filename:join(Priv_dir, "eldap.conf"), -- case file:consult(File) of -- {ok, Entries} -> -- case catch parse(Entries) of -- {ok, Hosts, Port, Rootdn, Passwd, Log} -> -- {ok, Hosts, Port, Rootdn, Passwd, Log}; -- {error, Reason} -> -- {error, Reason}; -- {'EXIT', Reason} -> -- {error, Reason} -- end; -- {error, Reason} -> -- {error, Reason} -- end. -- --parse(Entries) -> -- {ok, -- get_hosts(host, Entries), -- get_integer(port, Entries), -- get_list(rootdn, Entries), -- get_list(passwd, Entries), -- get_log(log, Entries)}. -- --get_integer(Key, List) -> -- case lists:keysearch(Key, 1, List) of -- {value, {Key, Value}} when integer(Value) -> -- Value; -- {value, {Key, Value}} -> -- throw({error, "Bad Value in Config for " ++ atom_to_list(Key)}); -- false -> -- throw({error, "No Entry in Config for " ++ atom_to_list(Key)}) -- end. -- --get_list(Key, List) -> -- case lists:keysearch(Key, 1, List) of -- {value, {Key, Value}} when list(Value) -> -- Value; -- {value, {Key, Value}} -> -- throw({error, "Bad Value in Config for " ++ atom_to_list(Key)}); -- false -> -- throw({error, "No Entry in Config for " ++ atom_to_list(Key)}) -- end. -- --get_log(Key, List) -> -- case lists:keysearch(Key, 1, List) of -- {value, {Key, Value}} when function(Value) -> -- Value; -- {value, {Key, Else}} -> -- false; -- false -> -- fun(Level, Format, Args) -> io:format("--- " ++ Format, Args) end -- end. -- --get_hosts(Key, List) -> -- lists:map(fun({Key1, {A,B,C,D}}) when integer(A), -- integer(B), -- integer(C), -- integer(D), -- Key == Key1-> -- {A,B,C,D}; -- ({Key1, Value}) when list(Value), -- Key == Key1-> -- Value; -- ({Else, Value}) -> -- throw({error, "Bad Hostname in config"}) -- end, List). -- --%%% -------------------------------------------------------------------- --%%% Other Stuff --%%% -------------------------------------------------------------------- --bump_id(#eldap{id = Id}) when Id > ?MAX_TRANSACTION_ID -> -- ?MIN_TRANSACTION_ID; --bump_id(#eldap{id = Id}) -> -- Id + 1. -- --%%% -------------------------------------------------------------------- --%%% Log routines. Call a user provided log routine Fun. --%%% -------------------------------------------------------------------- -- --log1(Str, Args, #eldap{log = Fun, debug_level = N}) -> log(Fun, Str, Args, 1, N). --log2(Str, Args, #eldap{log = Fun, debug_level = N}) -> log(Fun, Str, Args, 2, N). -- --log(Fun, Str, Args, This_level, Status) when function(Fun), This_level =< Status -> -- catch Fun(This_level, Str, Args); --log(_, _, _, _, _) -> -- ok. diff --git a/rabbitmq-server/plugins-src/eldap-wrapper/remove-ietf-doc.patch b/rabbitmq-server/plugins-src/eldap-wrapper/remove-ietf-doc.patch deleted file mode 100644 index e1f55d9..0000000 --- a/rabbitmq-server/plugins-src/eldap-wrapper/remove-ietf-doc.patch +++ /dev/null @@ -1,3036 +0,0 @@ -diff --git a/doc/draft-ietf-asid-ldap-c-api-00.txt b/doc/draft-ietf-asid-ldap-c-api-00.txt -deleted file mode 100755 -index 5f2e856..0000000 ---- a/doc/draft-ietf-asid-ldap-c-api-00.txt -+++ /dev/null -@@ -1,3030 +0,0 @@ -- -- -- -- -- -- --Network Working Group T. Howes --INTERNET-DRAFT Netscape Communications Corp. --Intended Category: Standards Track M. Smith --Obsoletes: RFC 1823 Netscape Communications Corp. --Expires: January 1998 A. Herron -- Microsoft Corp. -- C. Weider -- Microsoft Corp. -- M. Wahl -- Critical Angle, Inc. -- -- 29 July 1997 -- -- -- The C LDAP Application Program Interface -- -- -- -- --1. Status of this Memo -- --This draft document will be submitted to the RFC Editor as a Standards --Track document. Distribution of this memo is unlimited. Please send com- --ments to the authors. -- --This document is an Internet-Draft. Internet-Drafts are working docu- --ments of the Internet Engineering Task Force (IETF), its areas, and its --working groups. Note that other groups may also distribute working --documents as Internet-Drafts. -- --Internet-Drafts are draft documents valid for a maximum of six months --and may be updated, replaced, or obsoleted by other documents at any --time. It is inappropriate to use Internet-Drafts as reference material --or to cite them other than as ``work in progress.'' -- --To learn the current status of any Internet-Draft, please check the --``1id-abstracts.txt'' listing contained in the Internet-Drafts Shadow --Directories on ds.internic.net (US East Coast), nic.nordu.net (Europe), --ftp.isi.edu (US West Coast), or munnari.oz.au (Pacific Rim). -- --2. Introduction -- --This document defines a C language application program interface to the --lightweight directory access protocol (LDAP). This document replaces the --previous definition of this API, defined in RFC 1823, updating it to --include support for features found in version 3 of the LDAP protocol. --New extended operation functions were added to support LDAPv3 features --such as controls. In addition, other LDAP API changes were made to -- -- -- --Expires: January 1998 [Page 1] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --support information hiding and thread safety. -- --The C LDAP API is designed to be powerful, yet simple to use. It defines --compatible synchronous and asynchronous interfaces to LDAP to suit a --wide variety of applications. This document gives a brief overview of --the LDAP model, then an overview of how the API is used by an applica- --tion program to obtain LDAP information. The API calls are described in --detail, followed by an appendix that provides some example code demon- --strating the use of the API. This document provides information to the --Internet community. It does not specify any standard. -- --3. Overview of the LDAP Model -- --LDAP is the lightweight directory access protocol, described in [2] and --[6]. It can provide a lightweight frontend to the X.500 directory [1], --or a stand-alone service. In either mode, LDAP is based on a client- --server model in which a client makes a TCP connection to an LDAP server, --over which it sends requests and receives responses. -- --The LDAP information model is based on the entry, which contains infor- --mation about some object (e.g., a person). Entries are composed of --attributes, which have a type and one or more values. Each attribute has --a syntax that determines what kinds of values are allowed in the attri- --bute (e.g., ASCII characters, a jpeg photograph, etc.) and how those --values behave during directory operations (e.g., is case significant --during comparisons). -- --Entries may be organized in a tree structure, usually based on politi- --cal, geographical, and organizational boundaries. Each entry is uniquely --named relative to its sibling entries by its relative distinguished name --(RDN) consisting of one or more distinguished attribute values from the --entry. At most one value from each attribute may be used in the RDN. --For example, the entry for the person Babs Jensen might be named with --the "Barbara Jensen" value from the commonName attribute. -- --A globally unique name for an entry, called a distinguished name or DN, --is constructed by concatenating the sequence of RDNs from the entry up --to the root of the tree. For example, if Babs worked for the University --of Michigan, the DN of her U-M entry might be "cn=Barbara Jensen, --o=University of Michigan, c=US". The DN format used by LDAP is defined --in [4]. -- --Operations are provided to authenticate, search for and retrieve infor- --mation, modify information, and add and delete entries from the tree. --The next sections give an overview of how the API is used and detailed --descriptions of the LDAP API calls that implement all of these func- --tions. -- -- -- -- --Expires: January 1998 [Page 2] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --4. Overview of LDAP API Use -- --An application generally uses the C LDAP API in four simple steps. -- --- Initialize an LDAP session with a default LDAP server. The -- ldap_init() function returns a handle to the session, allowing mul- -- tiple connections to be open at once. -- --- Authenticate to the LDAP server. The ldap_bind() function and -- friends support a variety of authentication methods. -- --- Perform some LDAP operations and obtain some results. ldap_search() -- and friends return results which can be parsed by -- ldap_result2error(), ldap_first_entry(), ldap_next_entry(), etc. -- --- Close the session. The ldap_unbind() function closes the connec- -- tion. -- --Operations can be performed either synchronously or asynchronously. The --names of the synchronous functions end in _s. For example, a synchronous --search can be completed by calling ldap_search_s(). An asynchronous --search can be initiated by calling ldap_search(). All synchronous rou- --tines return an indication of the outcome of the operation (e.g, the --constant LDAP_SUCCESS or some other error code). The asynchronous rou- --tines return the message id of the operation initiated. This id can be --used in subsequent calls to ldap_result() to obtain the result(s) of the --operation. An asynchronous operation can be abandoned by calling --ldap_abandon(). -- --Results and errors are returned in an opaque structure called LDAPMes- --sage. Routines are provided to parse this structure, step through --entries and attributes returned, etc. Routines are also provided to --interpret errors. Later sections of this document describe these rou- --tines in more detail. -- --LDAP version 3 servers may return referrals to other servers. By --default, implementations of this API will attempt to follow referrals --automatically for the application. This behavior can be disabled glo- --bally (using the ldap_set_option() call) or on a per-request basis --through the use of a client control. -- --As in the LDAPv3 protocol itself, all DNs and string values that are --passed into or produced by the C LDAP API are represented as UTF-8[10] --characters. -- --For compatibility with existing applications, implementations of this --API will by default use version 2 of the LDAP protocol. Applications --that intend to take advantage of LDAP version 3 features will need to -- -- -- --Expires: January 1998 [Page 3] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --use the ldap_set_option() call with a LDAP_OPT_PROTOCOL_VERSION to --switch to version 3. -- -- --5. Common Data Structures -- --Some data structures that are common to several LDAP API functions are --defined here: -- -- typedef struct ldap LDAP; -- -- typedef struct ldapmsg LDAPMessage; -- -- struct berval { -- unsigned long bv_len; -- char *bv_val; -- }; -- -- struct timeval { -- long tv_sec; -- long tv_usec; -- }; -- --The LDAP structure is an opaque data type that represents an LDAP ses- --sion Typically this corresponds to a connection to a single server, but --it may encompass several server connections in the face of LDAPv3 refer- --rals. -- --The LDAPMessage structure is an opaque data type that is used to return --results and error information. -- --The berval structure is used to represent arbitrary binary data and its --fields have the following meanings: -- --bv_len Length of data in bytes. -- --bv_val A pointer to the data itself. -- -- --The timeval structure is used to represent an interval of time and its --fields have the following meanings: -- --tv_sec Seconds component of time interval. -- --tv_usec Microseconds component of time interval. -- -- -- -- -- -- --Expires: January 1998 [Page 4] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --6. LDAP Error Codes -- --Many of the LDAP API routines return LDAP error codes, some of which --indicate local errors and some of which may be returned by servers. --Supported error codes are (hexadecimal values are given in parentheses --after the constant): -- -- LDAP_SUCCESS (0x00) -- LDAP_OPERATIONS_ERROR( 0x01) -- LDAP_PROTOCOL_ERROR (0x02) -- LDAP_TIMELIMIT_EXCEEDED (0x03) -- LDAP_SIZELIMIT_EXCEEDED (0x04) -- LDAP_COMPARE_FALSE (0x05) -- LDAP_COMPARE_TRUE (0x06) -- LDAP_STRONG_AUTH_NOT_SUPPORTED (0x07) -- LDAP_STRONG_AUTH_REQUIRED (0x08) -- LDAP_REFERRAL (0x0a) -- new in LDAPv3 -- LDAP_ADMINLIMIT_EXCEEDED (0x0b) -- new in LDAPv3 -- LDAP_UNAVAILABLE_CRITICAL_EXTENSION (0x0c) -- new in LDAPv3 -- LDAP_CONFIDENTIALITY_REQUIRED (0x0d) -- new in LDAPv3 -- LDAP_NO_SUCH_ATTRIBUTE (0x10) -- LDAP_UNDEFINED_TYPE (0x11) -- LDAP_INAPPROPRIATE_MATCHING (0x12) -- LDAP_CONSTRAINT_VIOLATION (0x13) -- LDAP_TYPE_OR_VALUE_EXISTS (0x14) -- LDAP_INVALID_SYNTAX (0x15) -- LDAP_NO_SUCH_OBJECT (0x20) -- LDAP_ALIAS_PROBLEM (0x21) -- LDAP_INVALID_DN_SYNTAX (0x22) -- LDAP_IS_LEAF (0x23) -- not used in LDAPv3 -- LDAP_ALIAS_DEREF_PROBLEM (0x24) -- LDAP_INAPPROPRIATE_AUTH (0x30) -- LDAP_INVALID_CREDENTIALS (0x31) -- LDAP_INSUFFICIENT_ACCESS (0x32) -- LDAP_BUSY (0x33) -- LDAP_UNAVAILABLE (0x34) -- LDAP_UNWILLING_TO_PERFORM (0x35) -- LDAP_LOOP_DETECT (0x36) -- LDAP_NAMING_VIOLATION (0x40) -- LDAP_OBJECT_CLASS_VIOLATION (0x41) -- LDAP_NOT_ALLOWED_ON_NONLEAF (0x42) -- LDAP_NOT_ALLOWED_ON_RDN (0x43) -- LDAP_ALREADY_EXISTS (0x44) -- LDAP_NO_OBJECT_CLASS_MODS (0x45) -- LDAP_RESULTS_TOO_LARGE (0x46) -- LDAP_AFFECTS_MULTIPLE_DSAS (0x47) -- new in LDAPv3 -- LDAP_OTHER (0x50) -- LDAP_SERVER_DOWN (0x51) -- -- -- --Expires: January 1998 [Page 5] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- LDAP_LOCAL_ERROR (0x52) -- LDAP_ENCODING_ERROR (0x53) -- LDAP_DECODING_ERROR (0x54) -- LDAP_TIMEOUT (0x55) -- LDAP_AUTH_UNKNOWN (0x56) -- LDAP_FILTER_ERROR (0x57) -- LDAP_USER_CANCELLED (0x58) -- LDAP_PARAM_ERROR (0x59) -- LDAP_NO_MEMORY (0x5a) -- LDAP_CONNECT_ERROR (0x5b) -- LDAP_NOT_SUPPORTED (0x5c) -- LDAP_CONTROL_NOT_FOUND (0x5d) -- LDAP_NO_RESULTS_RETURNED (0x5e) -- LDAP_MORE_RESULTS_TO_RETURN (0x5f) -- LDAP_CLIENT_LOOP (0x60) -- LDAP_REFERRAL_LIMIT_EXCEEDED (0x61) -- -- --7. Performing LDAP Operations -- --This section describes each LDAP operation API call in detail. All func- --tions take a "session handle," a pointer to an LDAP structure containing --per-connection information. Many routines return results in an LDAPMes- --sage structure. These structures and others are described as needed --below. -- -- --7.1. Initializing an LDAP Session -- --ldap_init() initializes a session with an LDAP server. The server is not --actually contacted until an operation is performed that requires it, --allowing various options to be set after initialization. -- -- LDAP *ldap_init( -- char *hostname, -- int portno -- ); -- --Use of the following routine is deprecated. -- -- LDAP *ldap_open( -- char *hostname, -- int portno -- ); -- --Parameters are: -- --hostname Contains a space-separated list of hostnames or dotted strings -- -- -- --Expires: January 1998 [Page 6] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- representing the IP address of hosts running an LDAP server to -- connect to. Each hostname in the list can include an optional -- port number which is separated from the host itself with a -- colon (:) character. The hosts are tried in the order listed, -- stopping with the first one to which a successful connection is -- made. Note that only ldap_open() attempts to make the connec- -- tion before returning to the caller. ldap_init() does not con- -- nect to the LDAP server. -- --portno Contains the TCP port number to connect to. The default LDAP -- port of 389 can be obtained by supplying the constant -- LDAP_PORT. If a host includes a port number then this parame- -- ter is ignored. -- --ldap_init() and ldap_open() both return a "session handle," a pointer to --an opaque structure that should be passed to subsequent calls pertaining --to the session. These routines return NULL if the session cannot be ini- --tialized in which case the operating system error reporting mechanism --can be checked to see why the call failed. -- --Note that if you connect to an LDAPv2 server, one of the ldap_bind() --calls described below must be completed before other operations can be --performed on the session. LDAPv3 does not require that a bind operation --be completed before other operations can be performed. -- --The calling program can set various attributes of the session by calling --the routines described in the next section. -- -- --7.2. LDAP Session Handle Options -- --The LDAP session handle returned by ldap_init() is a pointer to an --opaque data type representing an LDAP session. Formerly, this data type --was a structure exposed to the caller, and various fields in the struc- --ture could be set to control aspects of the session, such as size and --time limits on searches. -- --In the interest of insulating callers from inevitable changes to this --structure, these aspects of the session are now accessed through a pair --of accessor functions, described below. -- --ldap_get_option() is used to access the current value of various --session-wide parameters. ldap_set_option() is used to set the value of --these parameters. -- -- int ldap_get_option( -- LDAP *ld, -- int option, -- -- -- --Expires: January 1998 [Page 7] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- void *outvalue -- ); -- -- int ldap_set_option( -- LDAP *ld, -- int option, -- void *invalue -- ); -- --Parameters are: -- --ld The session handle. -- --option The name of the option being accessed or set. This parameter -- should be one of the following constants, which have the indi- -- cated meanings. After the constant the actual value of the con- -- stant is listed in hexadecimal in parentheses followed by the -- type of the corresponding outvalue or invalue parameter. -- -- LDAP_OPT_DESC (0x01) int * -- The underlying socket descriptor corresponding to the default -- LDAP connection. -- -- LDAP_OPT_DEREF (0x02) int * -- Controls how aliases are handled during search. It can have -- one of the following values: LDAP_DEREF_NEVER (0x00), -- LDAP_DEREF_SEARCHING (0x01), LDAP_DEREF_FINDING (0x02), or -- LDAP_DEREF_ALWAYS (0x03). The LDAP_DEREF_SEARCHING value -- means aliases should be dereferenced during the search but not -- when locating the base object of the search. The -- LDAP_DEREF_FINDING value means aliases should be dereferenced -- when locating the base object but not during the search. -- -- LDAP_OPT_SIZELIMIT (0x03) int * -- A limit on the number of entries to return from a search. A -- value of zero means no limit. -- -- LDAP_OPT_TIMELIMIT (0x04) int * -- A limit on the number of seconds to spend on a search. A value -- of zero means no limit -- -- LDAP_OPT_REBIND_FN (0x06) function pointer -- See the discussion of ldap_bind() and friends below. -- -- LDAP_OPT_REBIND_ARG (0x07) void * -- See the discussion of ldap_bind() and friends below. -- -- LDAP_OPT_REFERRALS (0x08) void * -- -- -- --Expires: January 1998 [Page 8] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- This option controls whether the LDAP library automatically -- follows referrals returned by LDAP servers or not. It can be -- set to one of the constants LDAP_OPT_ON or LDAP_OPT_OFF. -- -- LDAP_OPT_RESTART (0x09) void * -- This option controls whether LDAP I/O operations should -- automatically be restarted if they abort prematurely. It -- should be set to one of the constants LDAP_OPT_ON or -- LDAP_OPT_OFF. This option is useful if an LDAP I/O operation -- may be interrupted prematurely, for example by a timer going -- off, or other interrrupt. -- -- LDAP_OPT_PROTOCOL_VERSION (0x11) int * -- This option indicates the version of the default LDAP server. -- It can be one of the constants LDAP_VERSION2 or LDAP_VERSION3. -- If no version is set the default is LDAP_VERSION2. -- -- LDAP_OPT_SERVER_CONTROLS (0x12) LDAPControl ** -- A default list of LDAP server controls to be sent with each -- request. See the Using Controls section below. -- -- LDAP_OPT_CLIENT_CONTROLS (0x13) LDAPControl ** -- A default list of client controls that affect the LDAP ses- -- sion. See the Using Controls section below. -- -- LDAP_OPT_HOST_NAME (0x30) char ** -- The host name of the default LDAP server. -- -- LDAP_OPT_ERROR_NUMBER (0x31) int * -- The code of the most recent LDAP error that occurred for this -- session. -- -- LDAP_OPT_ERROR_STRING (0x32) char ** -- The message returned with the most recent LDAP error that -- occurred for this session. -- -- --outvalue The address of a place to put the value of the option. The -- actual type of this parameter depends on the setting of the -- option parameter. -- --invalue A pointer to the value the option is to be given. The actual -- type of this parameter depends on the setting of the option -- parameter. The constants LDAP_OPT_ON and LDAP_OPT_OFF can be -- given for options that have on or off settings. -- -- -- -- -- -- --Expires: January 1998 [Page 9] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --7.3. Working with controls -- --LDAPv3 operations can be extended through the use of controls. Controls --may be sent to a server or returned to the client with any LDAP message. --These controls are referred to as server controls. -- --The LDAP API also supports a client-side extension mechanism through the --use of client controls. These controls affect the behavior of the LDAP --API only and are never sent to a server. A common data structure is --used to represent both types of controls: -- -- typedef struct ldapcontrol { -- char *ldctl_oid; -- struct berval ldctl_value; -- char ldctl_iscritical; -- } LDAPControl, *PLDAPControl; -- --The fields in the ldapcontrol structure have the following meanings: -- --ldctl_oid The control type, represented as a string. -- --ldctl_value The data associated with the control (if any). -- --ldctl_iscritical Indicates whether the control is critical of not. If -- this field is non-zero, the operation will only be car- -- ried out if the control is recognized by the server -- and/or client. -- --Some LDAP API calls allocate an ldapcontrol structure or a NULL- --terminated array of ldapcontrol structures. The following routines can --be used to dispose of a single control or an array of controls: -- -- void ldap_control_free( LDAPControl *ctrl ); -- void ldap_controls_free( LDAPControl **ctrls ); -- --A set of controls that affect the entire session can be set using the --ldap_set_option() function (see above). A list of controls can also be --passed directly to some LDAP API calls such as ldap_search_ext(), in --which case any controls set for the session through the use of --ldap_set_option() are ignored. Control lists are represented as a NULL- --terminated array of pointers to ldapcontrol structures. -- --Server controls are defined by LDAPv3 protocol extension documents; for --example, a control has been proposed to support server-side sorting of --search results [7]. -- --No client controls are defined by this document but they may be defined --in future revisions or in any document that extends this API. -- -- -- --Expires: January 1998 [Page 10] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --7.4. Authenticating to the directory -- --The following functions are used to authenticate an LDAP client to an --LDAP directory server. -- --The ldap_sasl_bind() and ldap_sasl_bind_s() functions can be used to do --general and extensible authentication over LDAP through the use of the --Simple Authentication Security Layer [8]. The routines both take the dn --to bind as, the method to use, as a dotted-string representation of an --OID identifying the method, and a struct berval holding the credentials. --The special constant value LDAP_SASL_SIMPLE ("") can be passed to --request simple authentication, or the simplified routines --ldap_simple_bind() or ldap_simple_bind_s() can be used. -- -- int ldap_sasl_bind( -- LDAP *ld, -- char *dn, -- char *mechanism, -- struct berval *cred, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls, -- int *msgidp -- ); -- -- int ldap_sasl_bind_s( -- LDAP *ld, -- char *dn, -- char *mechanism, -- struct berval *cred, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls, -- struct berval **servercredp -- ); -- -- int ldap_simple_bind( -- LDAP *ld, -- char *dn, -- char *passwd -- ); -- -- int ldap_simple_bind_s( -- LDAP *ld, -- char *dn, -- char *passwd -- ); -- -- The use of the following routines is deprecated: -- -- -- -- --Expires: January 1998 [Page 11] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- int ldap_bind( LDAP *ld, char *dn, char *cred, int method ); -- -- int ldap_bind_s( LDAP *ld, char *dn, char *cred, int method ); -- -- int ldap_kerberos_bind( LDAP *ld, char *dn ); -- -- int ldap_kerberos_bind_s( LDAP *ld, char *dn ); -- --Parameters are: -- --ld The session handle. -- --dn The name of the entry to bind as. -- --mechanism Either LDAP_AUTH_SIMPLE_OID to get simple authentication, -- or a dotted text string representing an OID identifying the -- SASL method. -- --cred The credentials with which to authenticate. Arbitrary -- credentials can be passed using this parameter. The format -- and content of the credentials depends on the setting of -- the mechanism parameter. -- --passwd For ldap_simple_bind(), the password to compare to the -- entry's userPassword attribute. -- --serverctrls List of LDAP server controls. -- --clientctrls List of client controls. -- --msgidp This result parameter will be set to the message id of the -- request if the ldap_sasl_bind() call succeeds. -- --servercredp This result parameter will be set to the credentials -- returned by the server. This should be freed by calling -- ldap_If no credentials are returned it will be set to NULL. -- --Additional parameters for the deprecated routines are not described. --Interested readers are referred to RFC 1823. -- --The ldap_sasl_bind() function initiates an asynchronous bind operation --and returns the constant LDAP_SUCCESS if the request was successfully --sent, or another LDAP error code if not. See the section below on error --handling for more information about possible errors and how to interpret --them. If successful, ldap_sasl_bind() places the message id of the --request in *msgidp. A subsequent call to ldap_result(), described below, --can be used to obtain the result of the bind. -- -- -- -- --Expires: January 1998 [Page 12] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --The ldap_simple_bind() function initiates a simple asynchronous bind --operation and returns the message id of the operation initiated. A sub- --sequent call to ldap_result(), described below, can be used to obtain --the result of the bind. In case of error, ldap_simple_bind() will return ---1, setting the session error parameters in the LDAP structure appropri- --ately. -- --The synchronous ldap_sasl_bind_s() and ldap_simple_bind_s() functions --both return the result of the operation, either the constant --LDAP_SUCCESS if the operation was successful, or another LDAP error code --if it was not. See the section below on error handling for more informa- --tion about possible errors and how to interpret them. -- --Note that if an LDAPv2 server is contacted, no other operations over the --connection should be attempted before a bind call has successfully com- --pleted. -- --Subsequent bind calls can be used to re-authenticate over the same con- --nection, and multistep SASL sequences can be accomplished through a --sequence of calls to ldap_sasl_bind() or ldap_sasl_bind_s(). -- -- --7.5. Closing the session -- --The following functions are used to unbind from the directory, close the --connection, and dispose of the session handle. -- -- int ldap_unbind( LDAP *ld ); -- -- int ldap_unbind_s( LDAP *ld ); -- --Parameters are: -- --ld The session handle. -- --ldap_unbind() and ldap_unbind_s() both work synchronously, unbinding --from the directory, closing the connection, and freeing up the ld struc- --ture before returning. There is no server response to an unbind opera- --tion. ldap_unbind() returns LDAP_SUCCESS (or another LDAP error code if --the request cannot be sent to the LDAP server). After a call to --ldap_unbind() or ldap_unbind_s(), the session handle ld is invalid. -- -- --7.6. Searching -- --The following functions are used to search the LDAP directory, returning --a requested set of attributes for each entry matched. There are five --variations. -- -- -- --Expires: January 1998 [Page 13] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- int ldap_search_ext( -- LDAP *ld, -- char *base, -- int scope, -- char *filter, -- char **attrs, -- int attrsonly, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls, -- struct timeval *timeoutp, -- int sizelimit, -- int *msgidp -- ); -- -- int ldap_search_ext_s( -- LDAP *ld, -- char *base, -- int scope, -- char *filter, -- char **attrs, -- int attrsonly, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls, -- struct timeval *timeoutp, -- int sizelimit, -- LDAPMessage **res -- ); -- -- int ldap_search( -- LDAP *ld, -- char *base, -- int scope, -- char *filter, -- char **attrs, -- int attrsonly -- ); -- -- int ldap_search_s( -- LDAP *ld, -- char *base, -- int scope, -- char *filter, -- char **attrs, -- int attrsonly, -- LDAPMessage **res -- ); -- -- int ldap_search_st( -- -- -- --Expires: January 1998 [Page 14] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- LDAP *ld, -- char *base, -- int scope, -- char *filter, -- char **attrs, -- int attrsonly, -- struct timeval *timeout, -- LDAPMessage **res -- ); -- --Parameters are: -- --ld The session handle. -- --base The dn of the entry at which to start the search. -- --scope One of LDAP_SCOPE_BASE (0x00), LDAP_SCOPE_ONELEVEL (0x01), -- or LDAP_SCOPE_SUBTREE (0x02), indicating the scope of the -- search. -- --filter A character string as described in [3], representing the -- search filter. -- --attrs A NULL-terminated array of strings indicating which attri- -- butes to return for each matching entry. Passing NULL for -- this parameter causes all available attributes to be -- retrieved. -- --attrsonly A boolean value that should be zero if both attribute types -- and values are to be returned, non-zero if only types are -- wanted. -- --timeout For the ldap_search_st() function, this specifies the local -- search timeout value. For the ldap_search_ext() and -- ldap_search_ext_s() functions, this specifies both the -- local search timeout value and the operation time limit -- that is sent to the server within the search request. -- --res For the synchronous calls, this is a result parameter which -- will contain the results of the search upon completion of -- the call. -- --serverctrls List of LDAP server controls. -- --clientctrls List of client controls. -- --msgidp This result parameter will be set to the message id of the -- request if the ldap_search_ext() call succeeds. -- -- -- --Expires: January 1998 [Page 15] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --There are three options in the session handle ld which potentially --affect how the search is performed. They are: -- --LDAP_OPT_SIZELIMIT -- A limit on the number of entries to return from the search. -- A value of zero means no limit. Note that the value from -- the session handle is ignored when using the -- ldap_search_ext() or ldap_search_ext_s() functions. -- --LDAP_OPT_TIMELIMIT -- A limit on the number of seconds to spend on the search. A -- value of zero means no limit. Note that the value from the -- session handle is ignored when using the ldap_search_ext() -- or ldap_search_ext_s() functions. -- --LDAP_OPT_DEREF -- One of LDAP_DEREF_NEVER (0x00), LDAP_DEREF_SEARCHING -- (0x01), LDAP_DEREF_FINDING (0x02), or LDAP_DEREF_ALWAYS -- (0x03), specifying how aliases should be handled during the -- search. The LDAP_DEREF_SEARCHING value means aliases should -- be dereferenced during the search but not when locating the -- base object of the search. The LDAP_DEREF_FINDING value -- means aliases should be dereferenced when locating the base -- object but not during the search. -- --The ldap_search_ext() function initiates an asynchronous search opera- --tion and returns the constant LDAP_SUCCESS if the request was success- --fully sent, or another LDAP error code if not. See the section below on --error handling for more information about possible errors and how to --interpret them. If successful, ldap_search_ext() places the message id --of the request in *msgidp. A subsequent call to ldap_result(), described --below, can be used to obtain the results from the search. These results --can be parsed using the result parsing routines described in detail --later. -- --Similar to ldap_search_ext(), the ldap_search() function initiates an --asynchronous search operation and returns the message id of the opera- --tion initiated. As for ldap_search_ext(), a subsequent call to --ldap_result(), described below, can be used to obtain the result of the --bind. In case of error, ldap_search() will return -1, setting the ses- --sion error parameters in the LDAP structure appropriately. -- --The synchronous ldap_search_ext_s(), ldap_search_s(), and --ldap_search_st() functions all return the result of the operation, --either the constant LDAP_SUCCESS if the operation was successful, or --another LDAP error code if it was not. See the section below on error --handling for more information about possible errors and how to interpret --them. Entries returned from the search (if any) are contained in the -- -- -- --Expires: January 1998 [Page 16] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --res parameter. This parameter is opaque to the caller. Entries, attri- --butes, values, etc., should be extracted by calling the parsing routines --described below. The results contained in res should be freed when no --longer in use by calling ldap_msgfree(), described later. -- --The ldap_search_ext() and ldap_search_ext_s() functions support LDAPv3 --server controls, client controls, and allow varying size and time limits --to be easily specified for each search operation. The ldap_search_st() --function is identical to ldap_search_s() except that it takes an addi- --tional parameter specifying a local timeout for the search. -- --7.7. Reading an Entry -- --LDAP does not support a read operation directly. Instead, this operation --is emulated by a search with base set to the DN of the entry to read, --scope set to LDAP_SCOPE_BASE, and filter set to "(objectclass=*)". attrs --contains the list of attributes to return. -- -- --7.8. Listing the Children of an Entry -- --LDAP does not support a list operation directly. Instead, this operation --is emulated by a search with base set to the DN of the entry to list, --scope set to LDAP_SCOPE_ONELEVEL, and filter set to "(objectclass=*)". --attrs contains the list of attributes to return for each child entry. -- --7.9. Comparing a Value Against an Entry -- --The following routines are used to compare a given attribute value --assertion against an LDAP entry. There are four variations: -- -- int ldap_compare_ext( -- LDAP *ld, -- char *dn, -- char *attr, -- struct berval *bvalue -- LDAPControl **serverctrls, -- LDAPControl **clientctrls, -- int *msgidp -- ); -- -- int ldap_compare_ext_s( -- LDAP *ld, -- char *dn, -- char *attr, -- struct berval *bvalue, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls -- -- -- --Expires: January 1998 [Page 17] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- ); -- -- int ldap_compare( -- LDAP *ld, -- char *dn, -- char *attr, -- char *value -- ); -- -- int ldap_compare_s( -- LDAP *ld, -- char *dn, -- char *attr, -- char *value -- ); -- --Parameters are: -- --ld The session handle. -- --dn The name of the entry to compare against. -- --attr The attribute to compare against. -- --bvalue The attribute value to compare against those found in the -- given entry. This parameter is used in the extended rou- -- tines and is a pointer to a struct berval so it is possible -- to compare binary values. -- --value A string attribute value to compare against, used by the -- ldap_compare() and ldap_compare_s() functions. Use -- ldap_compare_ext() or ldap_compare_ext_s() if you need to -- compare binary values. -- --serverctrls List of LDAP server controls. -- --clientctrls List of client controls. -- --msgidp This result parameter will be set to the message id of the -- request if the ldap_compare_ext() call succeeds. -- --The ldap_compare_ext() function initiates an asynchronous compare opera- --tion and returns the constant LDAP_SUCCESS if the request was success- --fully sent, or another LDAP error code if not. See the section below on --error handling for more information about possible errors and how to --interpret them. If successful, ldap_compare_ext() places the message id --of the request in *msgidp. A subsequent call to ldap_result(), described --below, can be used to obtain the result of the compare. -- -- -- --Expires: January 1998 [Page 18] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --Similar to ldap_compare_ext(), the ldap_compare() function initiates an --asynchronous compare operation and returns the message id of the opera- --tion initiated. As for ldap_compare_ext(), a subsequent call to --ldap_result(), described below, can be used to obtain the result of the --bind. In case of error, ldap_compare() will return -1, setting the ses- --sion error parameters in the LDAP structure appropriately. -- --The synchronous ldap_compare_ext_s() and ldap_compare_s() functions both --return the result of the operation, either the constant LDAP_SUCCESS if --the operation was successful, or another LDAP error code if it was not. --See the section below on error handling for more information about pos- --sible errors and how to interpret them. -- --The ldap_compare_ext() and ldap_compare_ext_s() functions support LDAPv3 --server controls and client controls. -- -- --7.10. Modifying an entry -- --The following routines are used to modify an existing LDAP entry. There --are four variations: -- -- typedef struct ldapmod { -- int mod_op; -- char *mod_type; -- union { -- char **modv_strvals; -- struct berval **modv_bvals; -- } mod_vals; -- } LDAPMod; -- #define mod_values mod_vals.modv_strvals -- #define mod_bvalues mod_vals.modv_bvals -- -- int ldap_modify_ext( -- LDAP *ld, -- char *dn, -- LDAPMod **mods, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls, -- int *msgidp -- ); -- -- int ldap_modify_ext_s( -- LDAP *ld, -- char *dn, -- LDAPMod **mods, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls -- -- -- --Expires: January 1998 [Page 19] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- ); -- -- int ldap_modify( -- LDAP *ld, -- char *dn, -- LDAPMod **mods -- ); -- -- int ldap_modify_s( -- LDAP *ld, -- char *dn, -- LDAPMod **mods -- ); -- --Parameters are: -- --ld The session handle. -- --dn The name of the entry to modify. -- --mods A NULL-terminated array of modifications to make to the -- entry. -- --serverctrls List of LDAP server controls. -- --clientctrls List of client controls. -- --msgidp This result parameter will be set to the message id of the -- request if the ldap_modify_ext() call succeeds. -- --The fields in the LDAPMod structure have the following meanings: -- --mod_op The modification operation to perform. It should be one of -- LDAP_MOD_ADD (0x00), LDAP_MOD_DELETE (0x01), or -- LDAP_MOD_REPLACE (0x02). This field also indicates the -- type of values included in the mod_vals union. It is logi- -- cally ORed with LDAP_MOD_BVALUES (0x80) to select the -- mod_bvalues form. Otherwise, the mod_values form is used. -- --mod_type The type of the attribute to modify. -- --mod_vals The values (if any) to add, delete, or replace. Only one of -- the mod_values or mod_bvalues variants should be used, -- selected by ORing the mod_op field with the constant -- LDAP_MOD_BVALUES. mod_values is a NULL-terminated array of -- zero-terminated strings and mod_bvalues is a NULL- -- terminated array of berval structures that can be used to -- pass binary values such as images. -- -- -- --Expires: January 1998 [Page 20] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --For LDAP_MOD_ADD modifications, the given values are added to the --entry, creating the attribute if necessary. -- --For LDAP_MOD_DELETE modifications, the given values are deleted from the --entry, removing the attribute if no values remain. If the entire attri- --bute is to be deleted, the mod_vals field should be set to NULL. -- --For LDAP_MOD_REPLACE modifications, the attribute will have the listed --values after the modification, having been created if necessary, or --removed if the mod_vals field is NULL. All modifications are performed --in the order in which they are listed. -- --The ldap_modify_ext() function initiates an asynchronous modify opera- --tion and returns the constant LDAP_SUCCESS if the request was success- --fully sent, or another LDAP error code if not. See the section below on --error handling for more information about possible errors and how to --interpret them. If successful, ldap_modify_ext() places the message id --of the request in *msgidp. A subsequent call to ldap_result(), described --below, can be used to obtain the result of the modify. -- --Similar to ldap_modify_ext(), the ldap_modify() function initiates an --asynchronous modify operation and returns the message id of the opera- --tion initiated. As for ldap_modify_ext(), a subsequent call to --ldap_result(), described below, can be used to obtain the result of the --modify. In case of error, ldap_modify() will return -1, setting the ses- --sion error parameters in the LDAP structure appropriately. -- --The synchronous ldap_modify_ext_s() and ldap_modify_s() functions both --return the result of the operation, either the constant LDAP_SUCCESS if --the operation was successful, or another LDAP error code if it was not. --See the section below on error handling for more information about pos- --sible errors and how to interpret them. -- --The ldap_modify_ext() and ldap_modify_ext_s() functions support LDAPv3 --server controls and client controls. -- -- --7.11. Modifying the Name of an Entry -- --In LDAPv2, the ldap_modrdn() and ldap_modrdn_s() routines were used to --change the name of an LDAP entry. They could only be used to change the --least significant component of a name (the RDN or relative distinguished --name). LDAPv3 provides the Modify DN protocol operation that allows more --general name change access. The ldap_rename() and ldap_rename_s() rou- --tines are used to change the name of an entry, and the use of the --ldap_modrdn() and ldap_modrdn_s() routines is deprecated. -- -- int ldap_rename( -- -- -- --Expires: January 1998 [Page 21] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- LDAP *ld, -- char *dn, -- char *newrdn, -- char *newparent, -- int deleteoldrdn, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls, -- int *msgidp -- -- ); -- int ldap_rename_s( -- LDAP *ld, -- char *dn, -- char *newrdn, -- char *newparent, -- int deleteoldrdn, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls -- ); -- -- Use of the following routines is deprecated. -- -- int ldap_modrdn( -- LDAP *ld, -- char *dn, -- char *newrdn, -- int deleteoldrdn -- ); -- int ldap_modrdn_s( -- LDAP *ld, -- char *dn, -- char *newrdn, -- int deleteoldrdn -- ); -- --Parameters are: -- --ld The session handle. -- --dn The name of the entry whose DN is to be changed. -- --newrdn The new RDN to give the entry. -- --newparent The new parent, or superior entry. If this parameter is -- NULL, only the RDN of the entry is changed. The root DN -- may be specified by passing a zero length string, "". The -- newparent parameter should always be NULL when using ver- -- sion 2 of the LDAP protocol; otherwise the server's -- -- -- --Expires: January 1998 [Page 22] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- behavior is undefined. -- --deleteoldrdn This parameter only has meaning on the rename routines if -- newrdn is different than the old RDN. It is a boolean -- value, if non-zero indicating that the old RDN value(s) -- should be removed, if zero indicating that the old RDN -- value(s) should be retained as non-distinguished values of -- the entry. -- --serverctrls List of LDAP server controls. -- --clientctrls List of client controls. -- --msgidp This result parameter will be set to the message id of the -- request if the ldap_rename() call succeeds. -- --The ldap_rename() function initiates an asynchronous modify DN operation --and returns the constant LDAP_SUCCESS if the request was successfully --sent, or another LDAP error code if not. See the section below on error --handling for more information about possible errors and how to interpret --them. If successful, ldap_rename() places the DN message id of the --request in *msgidp. A subsequent call to ldap_result(), described below, --can be used to obtain the result of the rename. -- --The synchronous ldap_rename_s() returns the result of the operation, --either the constant LDAP_SUCCESS if the operation was successful, or --another LDAP error code if it was not. See the section below on error --handling for more information about possible errors and how to interpret --them. -- --The ldap_rename() and ldap_rename_s() functions both support LDAPv3 --server controls and client controls. -- -- --7.12. Adding an entry -- --The following functions are used to add entries to the LDAP directory. --There are four variations: -- -- int ldap_add_ext( -- LDAP *ld, -- char *dn, -- LDAPMod **attrs, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls, -- int *msgidp -- ); -- -- -- -- --Expires: January 1998 [Page 23] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- int ldap_add_ext_s( -- LDAP *ld, -- char *dn, -- LDAPMod **attrs, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls -- ); -- -- int ldap_add( -- LDAP *ld, -- char *dn, -- LDAPMod **attrs -- ); -- -- int ldap_add_s( -- LDAP *ld, -- char *dn, -- LDAPMod **attrs -- ); -- --Parameters are: -- --ld The session handle. -- --dn The name of the entry to add. -- --attrs The entry's attributes, specified using the LDAPMod struc- -- ture defined for ldap_modify(). The mod_type and mod_vals -- fields should be filled in. The mod_op field is ignored -- unless ORed with the constant LDAP_MOD_BVALUES, used to -- select the mod_bvalues case of the mod_vals union. -- --serverctrls List of LDAP server controls. -- --clientctrls List of client controls. -- --msgidp This result parameter will be set to the message id of the -- request if the ldap_add_ext() call succeeds. -- --Note that the parent of the entry being added must already exist or the --parent must be empty (i.e., equal to the root DN) for an add to succeed. -- --The ldap_add_ext() function initiates an asynchronous add operation and --returns the constant LDAP_SUCCESS if the request was successfully sent, --or another LDAP error code if not. See the section below on error han- --dling for more information about possible errors and how to interpret --them. If successful, ldap_add_ext() places the message id of the --request in *msgidp. A subsequent call to ldap_result(), described below, -- -- -- --Expires: January 1998 [Page 24] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --can be used to obtain the result of the add. -- --Similar to ldap_add_ext(), the ldap_add() function initiates an asyn- --chronous add operation and returns the message id of the operation ini- --tiated. As for ldap_add_ext(), a subsequent call to ldap_result(), --described below, can be used to obtain the result of the add. In case of --error, ldap_add() will return -1, setting the session error parameters --in the LDAP structure appropriately. -- --The synchronous ldap_add_ext_s() and ldap_add_s() functions both return --the result of the operation, either the constant LDAP_SUCCESS if the --operation was successful, or another LDAP error code if it was not. See --the section below on error handling for more information about possible --errors and how to interpret them. -- --The ldap_add_ext() and ldap_add_ext_s() functions support LDAPv3 server --controls and client controls. -- -- -- --7.13. Deleting an entry -- --The following functions are used to delete a leaf entry from the LDAP --directory. There are four variations: -- -- int ldap_delete_ext( -- LDAP *ld, -- char *dn, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls, -- int *msgidp -- ); -- -- int ldap_delete_ext_s( -- LDAP *ld, -- char *dn, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls -- ); -- -- int ldap_delete( -- LDAP *ld, -- char *dn -- ); -- -- int ldap_delete_s( -- LDAP *ld, -- char *dn -- -- -- --Expires: January 1998 [Page 25] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- ); -- --Parameters are: -- --ld The session handle. -- --dn The name of the entry to delete. -- --serverctrls List of LDAP server controls. -- --clientctrls List of client controls. -- --msgidp This result parameter will be set to the message id of the -- request if the ldap_delete_ext() call succeeds. -- --Note that the entry to delete must be a leaf entry (i.e., it must have --no children). Deletion of entire subtrees in a single operation is not --supported by LDAP. -- --The ldap_delete_ext() function initiates an asynchronous delete opera- --tion and returns the constant LDAP_SUCCESS if the request was success- --fully sent, or another LDAP error code if not. See the section below on --error handling for more information about possible errors and how to --interpret them. If successful, ldap_delete_ext() places the message id --of the request in *msgidp. A subsequent call to ldap_result(), described --below, can be used to obtain the result of the delete. -- --Similar to ldap_delete_ext(), the ldap_delete() function initiates an --asynchronous delete operation and returns the message id of the opera- --tion initiated. As for ldap_delete_ext(), a subsequent call to --ldap_result(), described below, can be used to obtain the result of the --delete. In case of error, ldap_delete() will return -1, setting the ses- --sion error parameters in the LDAP structure appropriately. -- --The synchronous ldap_delete_ext_s() and ldap_delete_s() functions both --return the result of the operation, either the constant LDAP_SUCCESS if --the operation was successful, or another LDAP error code if it was not. --See the section below on error handling for more information about pos- --sible errors and how to interpret them. -- --The ldap_delete_ext() and ldap_delete_ext_s() functions support LDAPv3 --server controls and client controls. -- -- --7.14. Extended Operations -- --The ldap_extended_operation() and ldap_extended_operation_s() routines --allow extended LDAP operations to be passed to the server, providing a -- -- -- --Expires: January 1998 [Page 26] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --general protocol extensibility mechanism. -- -- int ldap_extended_operation( -- LDAP *ld, -- char *exoid, -- struct berval *exdata, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls, -- int *msgidp -- ); -- -- int ldap_extended_operation_s( -- LDAP *ld, -- char *exoid, -- struct berval *exdata, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls, -- char **retoidp, -- struct berval **retdatap -- ); -- --Parameters are: -- --ld The session handle. -- --requestoid The dotted-OID text string naming the request. -- --requestdata The arbitrary data required by the operation (if NULL, no -- data is sent to the server). -- --serverctrls List of LDAP server controls. -- --clientctrls List of client controls. -- --msgidp This result parameter will be set to the message id of the -- request if the ldap_extended_operation() call succeeds. -- --retoidp Pointer to a character string that will be set to an allo- -- cated, dotted-OID text string returned by the server. This -- string should be disposed of using the ldap_memfree() func- -- tion. If no OID was returned, *retoidp is set to NULL. -- --retdatap Pointer to a berval structure pointer that will be set an -- allocated copy of the data returned by the server. This -- struct berval should be disposed of using ber_bvfree(). If -- no data is returned, *retdatap is set to NULL. -- --The ldap_extended_operation() function initiates an asynchronous -- -- -- --Expires: January 1998 [Page 27] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --extended operation and returns the constant LDAP_SUCCESS if the request --was successfully sent, or another LDAP error code if not. See the sec- --tion below on error handling for more information about possible errors --and how to interpret them. If successful, ldap_extended_operation() --places the message id of the request in *msgidp. A subsequent call to --ldap_result(), described below, can be used to obtain the result of the --extended operation which can be passed to ldap_parse_extended_result() --to obtain the OID and data contained in the response. -- --The synchronous ldap_extended_operation_s() function returns the result --of the operation, either the constant LDAP_SUCCESS if the operation was --successful, or another LDAP error code if it was not. See the section --below on error handling for more information about possible errors and --how to interpret them. The retoid and retdata parameters are filled in --with the OID and data from the response. If no OID or data was --returned, these parameters are set to NULL. -- --The ldap_extended_operation() and ldap_extended_operation_s() functions --both support LDAPv3 server controls and client controls. -- -- --8. Abandoning An Operation -- --The following calls are used to abandon an operation in progress: -- -- int ldap_abandon_ext( -- LDAP *ld, -- int msgid, -- LDAPControl **serverctrls, -- LDAPControl **clientctrls -- ); -- -- int ldap_abandon( -- LDAP *ld, -- int msgid -- ); -- -- --ld The session handle. -- --msgid The message id of the request to be abandoned. -- --serverctrls List of LDAP server controls. -- --clientctrls List of client controls. -- --ldap_abandon_ext() abandons the operation with message id msgid and --returns the constant LDAP_SUCCESS if the abandon was successful or -- -- -- --Expires: January 1998 [Page 28] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --another LDAP error code if not. See the section below on error handling --for more information about possible errors and how to interpret them. -- --ldap_abandon() is identical to ldap_abandon_ext() except that it returns --zero if the abandon was successful, -1 otherwise and does not support --LDAPv3 server controls or client controls. -- --After a successful call to ldap_abandon() or ldap_abandon_ext(), results --with the given message id are never returned from a subsequent call to --ldap_result(). There is no server response to LDAP abandon operations. -- -- --9. Obtaining Results and Peeking Inside LDAP Messages -- --ldap_result() is used to obtain the result of a previous asynchronously --initiated operation. Note that depending on how it is called, --ldap_result() may actually return a list or "chain" of messages. -- --ldap_msgfree() frees the results obtained from a previous call to --ldap_result(), or a synchronous search routine. -- --ldap_msgtype() returns the type of an LDAP message. ldap_msgid() --returns the message ID of an LDAP message. -- -- int ldap_result( -- LDAP *ld, -- int msgid, -- int all, -- struct timeval *timeout, -- LDAPMessage **res -- ); -- -- int ldap_msgfree( LDAPMessage *res ); -- -- int ldap_msgtype( LDAPMessage *res ); -- -- int ldap_msgid( LDAPMessage *res ); -- --Parameters are: -- --ld The session handle. -- --msgid The message id of the operation whose results are to be -- returned, or the constant LDAP_RES_ANY (-1) if any result is -- desired. -- --all Specifies how many messages will be retrieved in a single call -- to ldap_result(). This parameter only has meaning for search -- -- -- --Expires: January 1998 [Page 29] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- results. Pass the constant LDAP_MSG_ONE (0x00) to retrieve one -- message at a time. Pass LDAP_MSG_ALL (0x01) to request that -- all results of a search be received before returning all -- results in a single chain. Pass LDAP_MSG_RECEIVED (0x02) to -- indicate that all results retrieved so far should be returned -- in the result chain. -- --timeout A timeout specifying how long to wait for results to be -- returned. A NULL value causes ldap_result() to block until -- results are available. A timeout value of zero seconds speci- -- fies a polling behavior. -- --res For ldap_result(), a result parameter that will contain the -- result(s) of the operation. For ldap_msgfree(), the result -- chain to be freed, obtained from a previous call to -- ldap_result(), ldap_search_s(), or ldap_search_st(). -- --Upon successful completion, ldap_result() returns the type of the first --result returned in the res parameter. This will be one of the following --constants. -- -- LDAP_RES_BIND (0x61) -- LDAP_RES_SEARCH_ENTRY (0x64) -- LDAP_RES_SEARCH_REFERENCE (0x73) -- new in LDAPv3 -- LDAP_RES_SEARCH_RESULT (0x65) -- LDAP_RES_MODIFY (0x67) -- LDAP_RES_ADD (0x69) -- LDAP_RES_DELETE (0x6B) -- LDAP_RES_MODDN (0x6D) -- LDAP_RES_COMPARE (0x6F) -- LDAP_RES_EXTENDED (0x78) -- new in LDAPv3 -- --ldap_result() returns 0 if the timeout expired and -1 if an error --occurs, in which case the error parameters of the LDAP session handle --will be set accordingly. -- --ldap_msgfree() frees the result structure pointed to by res and returns --the type of the message it freed. -- --ldap_msgtype() returns the type of the LDAP message it is passed as a --parameter. The type will be one of the types listed above, or -1 on --error. -- --ldap_msgid() returns the message ID associated with the LDAP message --passed as a parameter. -- -- -- -- -- -- --Expires: January 1998 [Page 30] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --10. Handling Errors and Parsing Results -- --The following calls are used to extract information from results and --handle errors returned by other LDAP API routines. -- -- int ldap_parse_result( -- LDAP *ld, -- LDAPMessage *res, -- int *errcodep, -- char **matcheddnp, -- char **errmsgp, -- char ***referralsp, -- LDAPControl ***serverctrlsp, -- int freeit -- ); -- -- int ldap_parse_sasl_bind_result( -- LDAP *ld, -- LDAPMessage *res, -- struct berval **servercredp, -- int freeit -- ); -- -- int ldap_parse_extended_result( -- LDAP *ld, -- LDAPMessage *res, -- char **resultoidp, -- struct berval **resultdata, -- int freeit -- ); -- -- char *ldap_err2string( int err ); -- -- The use of the following routines is deprecated. -- -- int ldap_result2error( -- LDAP *ld, -- LDAPMessage *res, -- int freeit -- ); -- -- void ldap_perror( LDAP *ld, char *msg ); -- --Parameters are: -- --ld The session handle. -- --res The result of an LDAP operation as returned by -- -- -- --Expires: January 1998 [Page 31] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- ldap_result() or one of the synchronous API operation -- calls. -- --errcodep This result parameter will be filled in with the LDAP error -- code field from the LDAPResult message. This is the indi- -- cation from the server of the outcome of the operation. -- NULL may be passed to ignore this field. -- --matcheddnp In the case of a return of LDAP_NO_SUCH_OBJECT, this result -- parameter will be filled in with a DN indicating how much -- of the name in the request was recognized. NULL may be -- passed to ignore this field. The matched DN string should -- be freed by calling ldap_memfree() which is described later -- in this document. -- --errmsgp This result parameter will be filled in with the contents -- of the error message field from the LDAPResult message. -- The error message string should be freed by calling -- ldap_memfree() which is described later in this document. -- NULL may be passed to ignore this field. -- --referralsp This result parameter will be filled in with the contents -- of the referrals field from the LDAPResult message, indi- -- cating zero or more alternate LDAP servers where the -- request should be retried. The referrals array should be -- freed by calling ldap_value_free() which is described later -- in this document. NULL may be passed to ignore this field. -- --serverctrlsp This result parameter will be filled in with an allocated -- array of controls copied out of the LDAPResult message. -- The control array should be freed by calling -- ldap_controls_free() which was described earlier. -- --freeit A boolean that determines whether the res parameter is -- disposed of or not. Pass any non-zero value to have these -- routines free res after extracting the requested informa- -- tion. This is provided as a convenience; you can also use -- ldap_msgfree() to free the result later. -- --servercredp For SASL bind results, this result parameter will be filled -- in with the credentials passed back by the server for -- mutual authentication, if given. An allocated berval struc- -- ture is returned that should be disposed of by calling -- ldap_ber_free(). NULL may be passed to ignore this field. -- --resultoidp For extended results, this result parameter will be filled -- in with the dotted-OID text representation of the name of -- the extended operation response. This string should be -- -- -- --Expires: January 1998 [Page 32] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- disposed of by calling ldap_memfree(). NULL may be passed -- to ignore this field. -- --resultdatap For extended results, this result parameter will be filled -- in with a pointer to a struct berval containing the data in -- the extended operation response. It should be disposed of -- by calling ldap_ber_free(). NULL may be passed to ignore -- this field. -- --err For ldap_err2string(), an LDAP error code, as returned by -- ldap_result2error() or another LDAP API call. -- --Additional parameters for the deprecated routines are not described. --Interested readers are referred to RFC 1823. -- --All of the ldap_parse_*_result() routines skip over messages of type --LDAP_RES_SEARCH_ENTRY and LDAP_RES_SEARCH_REFERENCE when looking for a --result message to parse. They return the constant LDAP_SUCCESS if the --result was successfully parsed and another LDAP error code if not. Note --that the LDAP error code that indicates the outcome of the operation --performed by the server is placed in the errcodep ldap_parse_result() --parameter. -- --ldap_err2string() is used to convert a numeric LDAP error code, as --returned by one of the ldap_parse_*_result() routines, or one of the --synchronous API operation calls, into an informative NULL-terminated --character string message describing the error. It returns a pointer to --static data. -- -- --11. Stepping Through a List of Results -- --The ldap_first_message() and ldap_next_message() routines are used to --step through the list of messages in a result chain returned by --ldap_result(). For search operations, the result chain may actually --include referral messages, entry messages, and result messages. --ldap_count_messages() is used to count the number of messages returned. --The ldap_msgtype() function, described above, can be used to distinguish --between the different message types. -- -- LDAPMessage *ldap_first_message( LDAP *ld, LDAPMessage *res ); -- -- LDAPMessage *ldap_next_message( LDAP *ld, LDAPMessage *msg ); -- -- int ldap_count_messages( LDAP *ld, LDAPMessage *res ); -- --Parameters are: -- -- -- -- --Expires: January 1998 [Page 33] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --ld The session handle. -- --res The result chain, as obtained by a call to one of the synchronous -- search routines or ldap_result(). -- --msg The message returned by a previous call to ldap_first_message() -- or ldap_next_message(). -- --ldap_first_message() and ldap_next_message() will return NULL when no --more messages exist in the result set to be returned. NULL is also --returned if an error occurs while stepping through the entries, in which --case the error parameters in the session handle ld will be set to indi- --cate the error. -- --ldap_count_messages() returns the number of messages contained in a --chain of results. It can also be used to count the number of messages --that remain in a chain if called with a message, entry, or reference --returned by ldap_first_message(), ldap_next_message(), --ldap_first_entry(), ldap_next_entry(), ldap_first_reference(), --ldap_next_reference(). -- -- --12. Parsing Search Results -- --The following calls are used to parse the entries and references --returned by ldap_search() and friends. These results are returned in an --opaque structure that should only be accessed by calling the routines --described below. Routines are provided to step through the entries and --references returned, step through the attributes of an entry, retrieve --the name of an entry, and retrieve the values associated with a given --attribute in an entry. -- -- --12.1. Stepping Through a List of Entries -- --The ldap_first_entry() and ldap_next_entry() routines are used to step --through and retrieve the list of entries from a search result chain. --The ldap_first_reference() and ldap_next_reference() routines are used --to step through and retrieve the list of continuation references from a --search result chain. ldap_count_entries() is used to count the number --of entries returned. ldap_count_references() is used to count the number --of references returned. -- -- LDAPMessage *ldap_first_entry( LDAP *ld, LDAPMessage *res ); -- -- LDAPMessage *ldap_next_entry( LDAP *ld, LDAPMessage *entry ); -- -- LDAPMessage *ldap_first_reference( LDAP *ld, LDAPMessage *res ); -- -- -- --Expires: January 1998 [Page 34] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- LDAPMessage *ldap_next_reference( LDAP *ld, LDAPMessage *ref ); -- -- int ldap_count_entries( LDAP *ld, LDAPMessage *res ); -- -- int ldap_count_references( LDAP *ld, LDAPMessage *res ); -- --Parameters are: -- --ld The session handle. -- --res The search result, as obtained by a call to one of the synchro- -- nous search routines or ldap_result(). -- --entry The entry returned by a previous call to ldap_first_entry() or -- ldap_next_entry(). -- --ldap_first_entry() and ldap_next_entry() will return NULL when no more --entries or references exist in the result set to be returned. NULL is --also returned if an error occurs while stepping through the entries, in --which case the error parameters in the session handle ld will be set to --indicate the error. -- --ldap_count_entries() returns the number of entries contained in a chain --of entries. It can also be used to count the number of entries that --remain in a chain if called with a message, entry or reference returned --by ldap_first_message(), ldap_next_message(), ldap_first_entry(), --ldap_next_entry(), ldap_first_reference(), ldap_next_reference(). -- --ldap_count_references() returns the number of references contained in a --chain of search results. It can also be used to count the number of --references that remain in a chain. -- -- --12.2. Stepping Through the Attributes of an Entry -- --The ldap_first_attribute() and ldap_next_attribute() calls are used to --step through the list of attribute types returned with an entry. -- -- char *ldap_first_attribute( -- LDAP *ld, -- LDAPMessage *entry, -- BerElement **ptr -- ); -- -- char *ldap_next_attribute( -- LDAP *ld, -- LDAPMessage *entry, -- BerElement *ptr -- -- -- --Expires: January 1998 [Page 35] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- ); -- -- void ldap_memfree( char *mem ); -- --Parameters are: -- --ld The session handle. -- --entry The entry whose attributes are to be stepped through, as returned -- by ldap_first_entry() or ldap_next_entry(). -- --ptr In ldap_first_attribute(), the address of a pointer used inter- -- nally to keep track of the current position in the entry. In -- ldap_next_attribute(), the pointer returned by a previous call to -- ldap_first_attribute(). -- --mem A pointer to memory allocated by the LDAP library, such as the -- attribute names returned by ldap_first_attribute() and -- ldap_next_attribute, or the DN returned by ldap_get_dn(). -- --ldap_first_attribute() and ldap_next_attribute() will return NULL when --the end of the attributes is reached, or if there is an error, in which --case the error parameters in the session handle ld will be set to indi- --cate the error. -- --Both routines return a pointer to an allocated buffer containing the --current attribute name. This should be freed when no longer in use by --calling ldap_memfree(). -- --ldap_first_attribute() will allocate and return in ptr a pointer to a --BerElement used to keep track of the current position. This pointer --should be passed in subsequent calls to ldap_next_attribute() to step --through the entry's attributes. After a set of calls to --ldap_first_attribute() and ldap_next_attibute(), if ptr is non-NULL, it --should be freed by calling ldap_ber_free( ptr, 0 ). Note that it is very --important to pass the second parameter as 0 (zero) in this call. -- --The attribute names returned are suitable for passing in a call to --ldap_get_values() and friends to retrieve the associated values. -- -- --12.3. Retrieving the Values of an Attribute -- --ldap_get_values() and ldap_get_values_len() are used to retrieve the --values of a given attribute from an entry. ldap_count_values() and --ldap_count_values_len() are used to count the returned values. --ldap_value_free() and ldap_value_free_len() are used to free the values. -- -- -- -- --Expires: January 1998 [Page 36] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- char **ldap_get_values( -- LDAP *ld, -- LDAPMessage *entry, -- char *attr -- ); -- -- struct berval **ldap_get_values_len( -- LDAP *ld, -- LDAPMessage *entry, -- char *attr -- ); -- -- int ldap_count_values( char **vals ); -- -- int ldap_count_values_len( struct berval **vals ); -- -- int ldap_value_free( char **vals ); -- -- int ldap_value_free_len( struct berval **vals ); -- --Parameters are: -- --ld The session handle. -- --entry The entry from which to retrieve values, as returned by -- ldap_first_entry() or ldap_next_entry(). -- --attr The attribute whose values are to be retrieved, as returned by -- ldap_first_attribute() or ldap_next_attribute(), or a caller- -- supplied string (e.g., "mail"). -- --vals The values returned by a previous call to ldap_get_values() or -- ldap_get_values_len(). -- --Two forms of the various calls are provided. The first form is only --suitable for use with non-binary character string data. The second _len --form is used with any kind of data. -- --Note that the values returned are dynamically allocated and should be --freed by calling either ldap_value_free() or ldap_value_free_len() when --no longer in use. -- -- --12.4. Retrieving the name of an entry -- --ldap_get_dn() is used to retrieve the name of an entry. --ldap_explode_dn() and ldap_explode_rdn() are used to break up a name --into its component parts. ldap_dn2ufn() is used to convert the name into -- -- -- --Expires: January 1998 [Page 37] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --a more "user friendly" format. -- -- char *ldap_get_dn( LDAP *ld, LDAPMessage *entry ); -- -- char **ldap_explode_dn( char *dn, int notypes ); -- -- char **ldap_explode_rdn( char *rdn, int notypes ); -- -- char *ldap_dn2ufn( char *dn ); -- --Parameters are: -- --ld The session handle. -- --entry The entry whose name is to be retrieved, as returned by -- ldap_first_entry() or ldap_next_entry(). -- --dn The dn to explode, such as returned by ldap_get_dn(). -- --rdn The rdn to explode, such as returned in the components of the -- array returned by ldap_explode_dn(). -- --notypes A boolean parameter, if non-zero indicating that the dn or rdn -- components should have their type information stripped off -- (i.e., "cn=Babs" would become "Babs"). -- --ldap_get_dn() will return NULL if there is some error parsing the dn, --setting error parameters in the session handle ld to indicate the error. --It returns a pointer to malloc'ed space that the caller should free by --calling ldap_memfree() when it is no longer in use. Note the format of --the DNs returned is given by [4]. -- --ldap_explode_dn() returns a NULL-terminated char * array containing the --RDN components of the DN supplied, with or without types as indicated by --the notypes parameter. The array returned should be freed when it is no --longer in use by calling ldap_value_free(). -- --ldap_explode_rdn() returns a NULL-terminated char * array containing the --components of the RDN supplied, with or without types as indicated by --the notypes parameter. The array returned should be freed when it is no --longer in use by calling ldap_value_free(). -- --ldap_dn2ufn() converts the DN into the user friendly format described in --[5]. The UFN returned is malloc'ed space that should be freed by a call --to ldap_memfree() when no longer in use. -- -- -- -- -- -- --Expires: January 1998 [Page 38] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --13. Encoded ASN.1 Value Manipulation -- --This section describes routines which may be used to encode and decode --BER-encoded ASN.1 values, which are often used inside of control and --extension values. -- --With the exceptions of two new functions ber_flatten() and ber_init(), --these functions are compatible with the University of Michigan LDAP 3.3 --implementation of BER. -- -- --13.1. General -- -- struct berval { -- unsigned long bv_len; -- char *bv_val; -- }; -- --A struct berval contains a sequence of bytes and an indication of its --length. The bv_val is not null terminated. bv_len must always be a --nonnegative number. Applications may allocate their own berval struc- --tures. -- -- typedef struct berelement { -- /* opaque */ -- } BerElement; -- --The BerElement structure contains not only a copy of the encoded value, --but also state information used in encoding or decoding. Applications --cannot allocate their own BerElement structures. The internal state is --neither thread-specific nor locked, so two threads should not manipulate --the same BerElement value simultaneously. -- --A single BerElement value cannot be used for both encoding and decoding. -- -- void ber_bvfree ( struct berval *bv); -- --ber_bvfree() frees a berval returned from this API. Both the bv->bv_val --string and the berval itself are freed. Applications should not use --ber_bvfree() with bervals which the application has allocated. -- -- void ber_bvecfree ( struct berval **bv ); -- --ber_bvecfree() frees an array of bervals returned from this API. Each --of the bervals in the array are freed using ber_bvfree(), then the array --itself is freed. -- -- struct berval *ber_bvdup (struct berval *bv ); -- -- -- --Expires: January 1998 [Page 39] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --ber_bvdup() returns a copy of a berval. The bv_val field in the --returned berval points to a different area of memory as the bv_val field --in the argument berval. The null pointer is returned on error (e.g. out --of memory). -- -- void ber_free ( BerElement *ber, int fbuf ); -- --ber_free() frees a BerElement which is returned from the API calls --ber_alloc_t() or ber_init(). Each BerElement must be freed by the --caller. The second argument fbuf should always be set to 1. -- -- --13.2. Encoding -- -- BerElement *ber_alloc_t(int options); -- --ber_alloc_t() constructs and returns BerElement. The null pointer is --returned on error. The options field contains a bitwise-or of options --which are to be used when generating the encoding of this BerElement. --One option is defined and must always be supplied: -- -- #define LBER_USE_DER 0x01 -- --When this option is present, lengths will always be encoded in the --minimum number of octets. Note that this option does not cause values --of sets and sequences to be rearranged in tag and byte order, so these --functions are not suitable for generating DER output as defined in X.509 --and X.680. -- --Unrecognized option bits are ignored. -- --The BerElement returned by ber_alloc_t() is initially empty. Calls to --ber_printf() will append bytes to the end of the ber_alloc_t(). -- -- int ber_printf(BerElement *ber, char *fmt, ... ) -- --The ber_printf() routine is used to encode a BER element in much the --same way that sprintf() works. One important difference, though, is --that state information is kept in the ber argument so that multiple --calls can be made to ber_printf() to append to the end of the BER ele- --ment. ber must be a pointer to a BerElement returned by ber_alloc_t(). --ber_printf() interprets and formats its arguments according to the for- --mat string fmt. ber_printf() returns -1 if there is an error during --encoding. As with sprintf(), each character in fmt refers to an argu- --ment to ber_printf(). -- --The format string can contain the following format characters: -- -- -- -- --Expires: January 1998 [Page 40] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --'t' Tag. The next argument is an int specifying the tag to override -- the next element to be written to the ber. This works across -- calls. The int value must contain the tag class, constructed -- bit, and tag value. The tag value must fit in a single octet -- (tag value is less than 32). For example, a tag of "[3]" for a -- constructed type is 0xA3. -- --'b' Boolean. The next argument is an int, containing either 0 for -- FALSE or 0xff for TRUE. A boolean element is output. If this -- format character is not preceded by the 't' format modifier, the -- tag 0x01 is used for the element. -- --'i' Integer. The next argument is an int, containing the integer in -- the host's byte order. An integer element is output. If this -- format character is not preceded by the 't' format modifier, the -- tag 0x02 is used for the element. -- --'X' Bitstring. The next two arguments are a char * pointer to the -- start of the bitstring, followed by an int containing the number -- of bits in the bitstring. A bitstring element is output, in -- primitive form. If this format character is not preceded by the -- 't' format modifier, the tag 0x03 is used for the element. -- --'n' Null. No argument is required. An ASN.1 NULL element is out- -- put. If this format character is not preceded by the 't' format -- modifier, the tag 0x05 is used for the element. -- --'o' Octet string. The next two arguments are a char *, followed by -- an int with the length of the string. The string may contain -- null bytes and need not by null-terminated. An octet string -- element is output, in primitive form. If this format character -- is not preceded by the 't' format modifier, the tag 0x04 is used -- for the element. -- --'s' Octet string. The next argument is a char * pointing to a -- null-terminated string. An octet string element in primitive -- form is output, which does not include the trailing ' ' byte. If -- this format character is not preceded by the 't' format modif- -- ier, the tag 0x04 is used for the element. -- --'v' Several octet strings. The next argument is a char **, an array -- of char * pointers to null-terminated strings. The last element -- in the array must be a null pointer. The octet strings do not -- include the trailing SEQUENCE OF octet strings. The 't' format -- modifier cannot be used with this format character. -- --'V' Several octet strings. A null-terminated array of berval *'s is -- supplied. Note that a construct like '{V}' is required to get an -- -- -- --Expires: January 1998 [Page 41] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- actual SEQUENCE OF octet strings. The 't' format modifier cannot -- be used with this format character. -- --'{' Begin sequence. No argument is required. If this format char- -- acter is not preceded by the 't' format modifier, the tag 0x30 -- is used. -- --'}' End sequence. No argument is required. The 't' format modifier -- cannot be used with this format character. -- --'[' Begin set. No argument is required. If this format character -- is not preceded by the 't' format modifier, the tag 0x31 is -- used. -- --']' End set. No argument is required. The 't' format modifier can- -- not be used with this format character. -- --Each use of a '{' format character must be matched by a '}' character, --either later in the format string, or in the format string of a subse- --quent call to ber_printf() for that BerElement. The same applies to the --'[' and -- --Sequences and sets nest, and implementations of this API must maintain --internal state to be able to properly calculate the lengths. -- -- int ber_flatten (BerElement *ber, struct berval **bvPtr); -- --The ber_flatten routine allocates a struct berval whose contents are a --BER encoding taken from the ber argument. The bvPtr pointer points to --the returned berval, which must be freed using ber_bvfree(). This rou- --tine returns 0 on success and -1 on error. -- --The ber_flatten API call is not present in U-M LDAP 3.3. -- --The use of ber_flatten on a BerElement in which all '{' and '}' format --modifiers have not been properly matched can result in a berval whose --contents are not a valid BER encoding. -- -- --13.3. Encoding Example -- --The following is an example of encoding the following ASN.1 data type: -- -- Example1Request ::= SEQUENCE { -- s OCTET STRING, -- must be printable -- val1 INTEGER, -- val2 [0] INTEGER DEFAULT 0 -- } -- -- -- --Expires: January 1998 [Page 42] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- int encode_example1(char *s,int val1,int val2,struct berval **bvPtr) -- { -- BerElement *ber; -- int rc; -- -- ber = ber_alloc_t(LBER_USE_DER); -- -- if (ber == NULL) return -1; -- -- if (ber_printf(ber,"{si",s,val1) == -1) { -- ber_free(ber,1); -- return -1; -- } -- -- if (val2 != 0) { -- if (ber_printf(ber,"ti",0x80,val2) == -1) { -- ber_free(ber,1); -- return -1; -- } -- } -- -- if (ber_printf(ber,"}") == -1) { -- ber_free(ber,1); -- return -1; -- } -- -- rc = ber_flatten(ber,bvPtr); -- ber_free(ber,1); -- return -1; -- } -- -- --13.4. Decoding -- --The following two symbols are available to applications. -- -- #define LBER_ERROR 0xffffffffL -- #define LBER_DEFAULT 0xffffffffL -- -- BerElement *ber_init (struct berval *bv); -- --The ber_init functions construct BerElement and returns a new BerElement --containing a copy of the data in the bv argument. ber_init returns the --null pointer on error. -- -- unsigned long ber_scanf (BerElement *ber, char *fmt, ... ); -- --The ber_scanf() routine is used to decode a BER element in much the same -- -- -- --Expires: January 1998 [Page 43] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --way that sscanf() works. One important difference, though, is that some --state information is kept with the ber argument so that multiple calls --can be made to ber_scanf() to sequentially read from the BER element. --The ber argument must be a pointer to a BerElement returned by --ber_init(). ber_scanf interprets the bytes according to the format --string fmt, and stores the results in its additional arguments. --ber_scanf() returns LBER_ERROR on error, and a nonnegative number on --success. -- --The format string contains conversion specifications which are used to --direct the interpretation of the BER element. The format string can --contain the following characters: -- --'a' Octet string. A char ** argument should be supplied. Memory is -- allocated, filled with the contents of the octet string, null- -- terminated, and the pointer to the string is stored in the argu- -- ment. The returned value must be freed using ldap_memfree. The -- tag of the element must indicate the primitive form (constructed -- strings are not supported) but is otherwise ignored and dis- -- carded during the decoding. This format cannot be used with -- octet strings which could contain null bytes. -- --'O' Octet string. A struct berval ** argument should be supplied, -- which upon return points to a allocated struct berval containing -- the octet string and its length. ber_bvfree() must be called to -- free the allocated memory. The tag of the element must indicate -- the primitive form (constructed strings are not supported) but -- is otherwise ignored during the decoding. -- --'b' Boolean. A pointer to an int should be supplied. The int value -- stored will be 0 for FALSE or nonzero for TRUE. The tag of the -- element must indicate the primitive form but is otherwise -- ignored during the decoding. -- --'i' Integer. A pointer to an int should be supplied. The int value -- stored will be in host byte order. The tag of the element must -- indicate the primitive form but is otherwise ignored during the -- decoding. ber_scanf() will return an error if the integer can- -- not be stored in an int. -- --'B' Bitstring. A char ** argument should be supplied which will -- point to the allocated bits, followed by an unsigned long * -- argument, which will point to the length (in bits) of the bit- -- string returned. ldap_memfree must be called to free the bit- -- string. The tag of the element must indicate the primitive form -- (constructed bitstrings are not supported) but is otherwise -- ignored during the decoding. -- -- -- -- --Expires: January 1998 [Page 44] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --'n' Null. No argument is required. The element is simply skipped -- if it is recognized as a zero-length element. The tag is -- ignored. -- --'v' Several octet strings. A char *** argument should be supplied, -- which upon return points to a allocated null-terminated array of -- char *'s containing the octet strings. NULL is stored if the -- sequence is empty. ldap_memfree must be called to free each -- element of the array and the array itself. The tag of the -- sequence and of the octet strings are ignored. -- --'V' Several octet strings (which could contain null bytes). A -- struct berval *** should be supplied, which upon return points -- to a allocated null-terminated array of struct berval *'s con- -- taining the octet strings and their lengths. NULL is stored if -- the sequence is empty. ber_bvecfree() can be called to free the -- allocated memory. The tag of the sequence and of the octet -- strings are ignored. -- --'x' Skip element. The next element is skipped. No argument is -- required. -- --'{' Begin sequence. No argument is required. The initial sequence -- tag and length are skipped. -- --'}' End sequence. No argument is required. -- --'[' Begin set. No argument is required. The initial set tag and -- length are skipped. -- --']' End set. No argument is required. -- -- unsigned long ber_peek_tag (BerElement *ber, unsigned long *lenPtr); -- --ber_peek_tag() returns the tag of the next element to be parsed in the --BerElement argument. The length of this element is stored in the --*lenPtr argument. LBER_DEFAULT is returned if there is no further data --to be read. The ber argument is not modified. -- -- unsigned long ber_skip_tag (BerElement *ber, unsigned long *lenPtr); -- --ber_skip_tag() is similar to ber_peek_tag(), except that the state --pointer in the BerElement argument is advanced past the first tag and --length, and is pointed to the value part of the next element. This rou- --tine should only be used with constructed types and situations when a --BER encoding is used as the value of an OCTET STRING. The length of the --value is stored in *lenPtr. -- -- -- -- --Expires: January 1998 [Page 45] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- unsigned long ber_first_element(BerElement *ber, -- unsigned long *lenPtr, char **opaquePtr); -- -- unsigned long ber_next_element (BerElement *ber, -- unsigned long *lenPtr, char *opaque); -- --ber_first_element() and ber_next_element() are used to traverse a SET, --SET OF, SEQUENCE or SEQUENCE OF data value. ber_first_element() calls --ber_skip_tag(), stores internal information in *lenPtr and *opaquePtr, --and calls ber_peek_tag() for the first element inside the constructed --value. LBER_DEFAULT is returned if the constructed value is empty. --ber_next_element() positions the state at the start of the next element --in the constructed type. LBER_DEFAULT is returned if there are no --further values. -- --The len and opaque values should not be used by applications other than --as arguments to ber_next_element(), as shown in the example below. -- -- --13.5. Decoding Example -- --The following is an example of decoding an ASN.1 data type: -- -- Example2Request ::= SEQUENCE { -- dn OCTET STRING, -- must be printable -- scope ENUMERATED { b (0), s (1), w (2) }, -- ali ENUMERATED { n (0), s (1), f (2), a (3) }, -- size INTEGER, -- time INTEGER, -- tonly BOOLEAN, -- attrs SEQUENCE OF OCTET STRING, -- must be printable -- [0] SEQUENCE OF SEQUENCE { -- type OCTET STRING -- must be printable, -- crit BOOLEAN DEFAULT FALSE, -- value OCTET STRING -- } OPTIONAL } -- -- #define LDAP_TAG_CONTROL_LIST 0xA0L /* context specific cons 0 */ -- -- int decode_example2(struct berval *bv) -- { -- BerElement *ber; -- unsigned long len; -- int scope, ali, size, time, tonly; -- char *dn = NULL, **attrs = NULL; -- int res,i,rc = 0; -- -- ber = ber_init(bv); -- -- -- --Expires: January 1998 [Page 46] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- if (ber == NULL) { -- printf("ERROR ber_init failed0); -- return -1; -- } -- -- res = ber_scanf(ber,"{aiiiiib{v}",&dn,&scope,&ali, -- &size,&time,&tonly,&attrs); -- -- if (res == -1) { -- printf("ERROR ber_scanf failed0); -- ber_free(ber,1); -- return -1; -- } -- -- /* *** use dn */ -- ldap_memfree(dn); -- -- for (i = 0; attrs != NULL && attrs[i] != NULL; i++) { -- /* *** use attrs[i] */ -- ldap_memfree(attrs[i]); -- } -- ldap_memfree(attrs); -- -- if (ber_peek_tag(ber,&len) == LDAP_TAG_CONTROL_LIST) { -- char *opaque; -- unsigned long tag; -- -- for (tag = ber_first_element(ber,&len,&opaque); -- tag != LBER_DEFAULT; -- tag = ber_next_element (ber,&len,opaque)) { -- -- unsigned long ttag, tlen; -- char *type; -- int crit; -- struct berval *value; -- -- if (ber_scanf(ber,"{a",&type) == LBER_ERROR) { -- printf("ERROR cannot parse type0); -- break; -- } -- /* *** use type */ -- ldap_memfree(type); -- -- ttag = ber_peek_tag(ber,&tlen); -- if (ttag == 0x01) { /* boolean */ -- if (ber_scanf(ber,"b", -- &crit) == LBER_ERROR) { -- printf("ERROR cannot parse crit0); -- -- -- --Expires: January 1998 [Page 47] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- rc = -1; -- break; -- } -- } else if (ttag == 0x04) { /* octet string */ -- crit = 0; -- } else { -- printf("ERROR extra field in controls0); -- break; -- } -- -- if (ber_scanf(ber,"O}",&value) == LBER_ERROR) { -- printf("ERROR cannot parse value0); -- rc = -1; -- break; -- } -- /* *** use value */ -- ldap_bvfree(value); -- } -- } -- -- ber_scanf(ber,"}"); -- -- ber_free(ber,1); -- -- return rc; -- } -- -- -- --14. Security Considerations -- --LDAPv2 supports security through protocol-level authentication using --clear-text passwords. LDAPv3 adds support for SASL [8] (Simple Authen- --tication Security Layer) methods. LDAPv3 also supports operation over a --secure transport layer using Transport Layer Security TLS [8]. Readers --are referred to the protocol documents for discussion of related secu- --rity considerations. -- --Implementations of this API should be cautious when handling authentica- --tion credentials. In particular, keeping long-lived copies of creden- --tials without the application's knowledge is discouraged. -- -- --15. Acknowledgements -- --Many members of the IETF ASID working group as well as members of the --Internet at large have provided useful comments and suggestions that --have been incorporated into this revision. -- -- -- --Expires: January 1998 [Page 48] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --This original material upon which this revision is based was based upon --work supported by the National Science Foundation under Grant No. NCR- --9416667. -- -- --16. Bibliography -- --[1] The Directory: Selected Attribute Syntaxes. CCITT, Recommendation -- X.520. -- --[2] M. Wahl, A. Coulbeck, T. Howes, S. Kille, W. Yeong, C. Robbins, -- "Lightweight Directory Access Protocol Attribute Syntax Defini- -- tions", INTERNET-DRAFT , -- 11 July 1997. -- --[3] T. Howes, "A String Representation of LDAP Search Filters," -- INTERNET-DRAFT , May 1997. -- --[4] S. Kille, M. Wahl, "A UTF-8 String Representation of Distinguished -- Names", INTERNET-DRAFT , 29 April -- 1997. -- --[5] S. Kille, "Using the OSI Directory to Achieve User Friendly Nam- -- ing," RFC 1781, March 1995. -- --[6] M. Wahl, T. Howes, S. Kille, "Lightweight Directory Access Protocol -- (v3)", INTERNET-DRAFT , 11 -- July 1997. -- --[7] A. Herron, T. Howes, M. Wahl, "LDAP Control Extension for Server -- Side Sorting of Search Result," INTERNET-DRAFT , 16 April 1997. -- --[8] J. Meyers, "Simple Authentication and Security Layer", INTERNET- -- DRAFT , April 1997. -- --[9] "Lightweight Directory Access Protocol (v3) Extension for Transport -- Layer Security", INTERNET-DRAFT , June 1997. -- --[10] "UTF-8, a transformation format of Unicode and ISO 10646", RFC -- 2044, October 1996. -- --[11] "IP Version 6 Addressing Architecture,", RFC 1884, December 1995. -- -- -- -- -- -- -- --Expires: January 1998 [Page 49] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --17. Author's Addresses -- -- Tim Howes -- Netscape Communications Corp. -- 501 E. Middlefield Rd., Mailstop MV068 -- Mountain View, CA 94043 -- USA -- +1 415 937-3419 -- howes@netscape.com -- -- -- Mark Smith -- Netscape Communications Corp. -- 501 E. Middlefield Rd., Mailstop MV068 -- Mountain View, CA 94043 -- USA -- +1 415 937-3477 -- mcs@netscape.com -- -- Andy Herron -- Microsoft Corp. -- 1 Microsoft Way -- Redmond, WA 98052 -- USA -- +1 425 882-8080 -- andyhe@microsoft.com -- -- Chris Weider -- Microsoft Corp. -- 1 Microsoft Way -- Redmond, WA 98052 -- USA -- +1 425 882-8080 -- cweider@microsoft.com -- -- Mark Wahl -- Critical Angle Inc. -- 4815 W Braker Lane #502-385 -- Austin, TX 78759 -- USA -- M.Wahl@critical-angle.com -- -- --18. Appendix A - Sample LDAP API Code -- -- #include -- -- main() -- -- -- --Expires: January 1998 [Page 50] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- { -- LDAP *ld; -- LDAPMessage *res, *e; -- int i; -- char *a, *dn; -- BerElement *ptr; -- char **vals; -- -- /* open an LDAP session */ -- if ( (ld = ldap_init( "dotted.host.name", LDAP_PORT )) == NULL ) -- exit( 1 ); -- -- /* authenticate as nobody */ -- if ( ldap_simple_bind_s( ld, NULL, NULL ) != LDAP_SUCCESS ) { -- ldap_perror( ld, "ldap_simple_bind_s" ); -- exit( 1 ); -- } -- -- /* search for entries with cn of "Babs Jensen", return all attrs */ -- if ( ldap_search_s( ld, "o=University of Michigan, c=US", -- LDAP_SCOPE_SUBTREE, "(cn=Babs Jensen)", NULL, 0, &res ) -- != LDAP_SUCCESS ) { -- ldap_perror( ld, "ldap_search_s" ); -- exit( 1 ); -- } -- -- /* step through each entry returned */ -- for ( e = ldap_first_entry( ld, res ); e != NULL; -- e = ldap_next_entry( ld, e ) ) { -- /* print its name */ -- dn = ldap_get_dn( ld, e ); -- printf( "dn: %s\n", dn ); -- ldap_memfree( dn ); -- -- /* print each attribute */ -- for ( a = ldap_first_attribute( ld, e, &ptr ); a != NULL; -- a = ldap_next_attribute( ld, e, ptr ) ) { -- printf( "attribute: %s\n", a ); -- -- /* print each value */ -- vals = ldap_get_values( ld, e, a ); -- for ( i = 0; vals[i] != NULL; i++ ) { -- printf( "value: %s\n", vals[i] ); -- } -- ldap_value_free( vals ); -- } -- if ( ptr != NULL ) { -- ldap_ber_free( ptr, 0 ); -- -- -- --Expires: January 1998 [Page 51] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- -- } -- } -- /* free the search results */ -- ldap_msgfree( res ); -- -- /* close and free connection resources */ -- ldap_unbind( ld ); -- } -- -- -- --19. Appendix B - Outstanding Issues -- -- --19.1. Support for multithreaded applications -- --In order to support multithreaded applications in a platform-independent --way, some additions to the LDAP API are needed. Different implementors --have taken different paths to solve this problem in the past. A common --set of thread-related API calls must be defined so that application --developers are not unduly burdened. These will be added to a future --revision of this specification. -- -- --19.2. Using Transport Layer Security (TLS) -- --The API calls used to support TLS must be specified. They will be added --to a future revision of this specification. -- -- --19.3. Client control for chasing referrals -- --A client control has been defined that can be used to specify on a per- --operation basis whether references and external referrals are automati- --cally chased by the client library. This will be added to a future --revision of this specification. -- -- --19.4. Potential confusion between hostname:port and IPv6 addresses -- --String representations of IPv6 network addresses [11] can contain colon --characters. The ldap_init() call is specified to take strings of the --form "hostname:port" or "ipaddress:port". If IPv6 addresses are used, --the latter could be ambiguous. A future revision of this specification --will resolve this issue. -- -- -- -- -- -- --Expires: January 1998 [Page 52] -- --C LDAP API The C LDAP Application Program Interface 29 July 1997 -- -- --19.5. Need to track SASL API standardization efforts -- --If a standard Simple Authentication and Security Layer API is defined, --it may be necessary to modify the LDAP API to accommodate it. -- -- --19.6. Support for character sets other than UTF-8? -- --Some application developers would prefer to pass string data using a --character set other than UTF-8. This could be accommodated by adding a --new option to ldap_set_option() that supports choosing a character set. --If this feature is added, the number of different character sets sup- --ported should definitely be minimized. -- -- --19.7. Use of UTF-8 with LDAPv2 servers -- --Strings are always passed as UTF-8 in this API but LDAP version 2 --servers do not support the full range of UTF-8 characters. The expected --behavior of this API when using LDAP version 2 with unsupported charac- --ters should be specified. -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --Expires: January 1998 [Page 53] -- -- -- --1. Status of this Memo............................................1 --2. Introduction...................................................1 --3. Overview of the LDAP Model.....................................2 --4. Overview of LDAP API Use.......................................3 --5. Common Data Structures.........................................4 --6. LDAP Error Codes...............................................5 --7. Performing LDAP Operations.....................................6 --7.1. Initializing an LDAP Session................................6 --7.2. LDAP Session Handle Options.................................7 --7.3. Working with controls.......................................10 --7.4. Authenticating to the directory.............................11 --7.5. Closing the session.........................................13 --7.6. Searching...................................................13 --7.7. Reading an Entry............................................17 --7.8. Listing the Children of an Entry............................17 --7.9. Comparing a Value Against an Entry..........................17 --7.10. Modifying an entry..........................................19 --7.11. Modifying the Name of an Entry..............................21 --7.12. Adding an entry.............................................23 --7.13. Deleting an entry...........................................25 --7.14. Extended Operations.........................................26 --8. Abandoning An Operation........................................28 --9. Obtaining Results and Peeking Inside LDAP Messages.............29 --10. Handling Errors and Parsing Results............................31 --11. Stepping Through a List of Results.............................33 --12. Parsing Search Results.........................................34 --12.1. Stepping Through a List of Entries..........................34 --12.2. Stepping Through the Attributes of an Entry.................35 --12.3. Retrieving the Values of an Attribute.......................36 --12.4. Retrieving the name of an entry.............................37 --13. Encoded ASN.1 Value Manipulation...............................39 --13.1. General.....................................................39 --13.2. Encoding....................................................40 --13.3. Encoding Example............................................42 --13.4. Decoding....................................................43 --13.5. Decoding Example............................................46 --14. Security Considerations........................................48 --15. Acknowledgements...............................................48 --16. Bibliography...................................................49 --17. Author's Addresses.............................................50 --18. Appendix A - Sample LDAP API Code..............................50 --19. Appendix B - Outstanding Issues................................52 --19.1. Support for multithreaded applications......................52 --19.2. Using Transport Layer Security (TLS)........................52 --19.3. Client control for chasing referrals........................52 --19.4. Potential confusion between hostname:port and IPv6 addresses52 --19.5. Need to track SASL API standardization efforts..............53 --19.6. Support for character sets other than UTF-8?................53 --19.7. Use of UTF-8 with LDAPv2 servers............................53 -- -- -- -- -- -- -- -- diff --git a/rabbitmq-server/plugins-src/generate_app b/rabbitmq-server/plugins-src/generate_app deleted file mode 100644 index fb0eb1e..0000000 --- a/rabbitmq-server/plugins-src/generate_app +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -main([InFile, OutFile | SrcDirs]) -> - Modules = [list_to_atom(filename:basename(F, ".erl")) || - SrcDir <- SrcDirs, - F <- filelib:wildcard("*.erl", SrcDir)], - {ok, [{application, Application, Properties}]} = file:consult(InFile), - NewProperties = - case proplists:get_value(modules, Properties) of - [] -> lists:keyreplace(modules, 1, Properties, {modules, Modules}); - _ -> Properties - end, - file:write_file( - OutFile, - io_lib:format("~p.~n", [{application, Application, NewProperties}])). diff --git a/rabbitmq-server/plugins-src/generate_deps b/rabbitmq-server/plugins-src/generate_deps deleted file mode 100644 index 9f8485b..0000000 --- a/rabbitmq-server/plugins-src/generate_deps +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- --mode(compile). - -%% We expect the list of Erlang source and header files to arrive on -%% stdin, with the entries colon-separated. -main([TargetFile, EbinDir]) -> - ErlsAndHrls = [ string:strip(S,left) || - S <- string:tokens(io:get_line(""), ":\n")], - ErlFiles = [F || F <- ErlsAndHrls, lists:suffix(".erl", F)], - Modules = sets:from_list( - [list_to_atom(filename:basename(FileName, ".erl")) || - FileName <- ErlFiles]), - HrlFiles = [F || F <- ErlsAndHrls, lists:suffix(".hrl", F)], - IncludeDirs = lists:usort([filename:dirname(Path) || Path <- HrlFiles]), - Headers = sets:from_list(HrlFiles), - Deps = lists:foldl( - fun (Path, Deps1) -> - dict:store(Path, detect_deps(IncludeDirs, EbinDir, - Modules, Headers, Path), - Deps1) - end, dict:new(), ErlFiles), - {ok, Hdl} = file:open(TargetFile, [write, delayed_write]), - dict:fold( - fun (_Path, [], ok) -> - ok; - (Path, Dep, ok) -> - Module = filename:basename(Path, ".erl"), - ok = file:write(Hdl, [EbinDir, "/", Module, ".beam: ", - Path]), - ok = sets:fold(fun (E, ok) -> file:write(Hdl, [" ", E]) end, - ok, Dep), - file:write(Hdl, ["\n"]) - end, ok, Deps), - ok = file:write(Hdl, [TargetFile, ": ", escript:script_name(), "\n"]), - ok = file:sync(Hdl), - ok = file:close(Hdl). - -detect_deps(IncludeDirs, EbinDir, Modules, Headers, Path) -> - {ok, Forms} = epp:parse_file(Path, IncludeDirs, [{use_specs, true}]), - lists:foldl( - fun ({attribute, _Line, Attribute, Behaviour}, Deps) - when Attribute =:= behaviour orelse Attribute =:= behavior -> - maybe_add_to_deps(EbinDir, Modules, Behaviour, Deps); - ({attribute, _Line, compile, {parse_transform, Transform}}, Deps) -> - maybe_add_to_deps(EbinDir, Modules, Transform, Deps); - ({attribute, _Line, file, {FileName, _LineNumber1}}, Deps) -> - case sets:is_element(FileName, Headers) of - true -> sets:add_element(FileName, Deps); - false -> Deps - end; - (_Form, Deps) -> - Deps - end, sets:new(), Forms). - -maybe_add_to_deps(EbinDir, Modules, Module, Deps) -> - case sets:is_element(Module, Modules) of - true -> sets:add_element( - [EbinDir, "/", atom_to_list(Module), ".beam"], Deps); - false -> Deps - end. diff --git a/rabbitmq-server/plugins-src/licensing/LICENSE-MIT-Mochi b/rabbitmq-server/plugins-src/licensing/LICENSE-MIT-Mochi deleted file mode 100644 index c85b65a..0000000 --- a/rabbitmq-server/plugins-src/licensing/LICENSE-MIT-Mochi +++ /dev/null @@ -1,9 +0,0 @@ -This is the MIT license. - -Copyright (c) 2007 Mochi Media, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/rabbitmq-server/plugins-src/licensing/license_info_eldap-wrapper b/rabbitmq-server/plugins-src/licensing/license_info_eldap-wrapper deleted file mode 100644 index 0a0e13c..0000000 --- a/rabbitmq-server/plugins-src/licensing/license_info_eldap-wrapper +++ /dev/null @@ -1,3 +0,0 @@ -Eldap is "Copyright (c) 2010, Torbjorn Tornkvist" and is covered by -the MIT license. It was downloaded from https://github.com/etnt/eldap - diff --git a/rabbitmq-server/plugins-src/licensing/license_info_mochiweb-wrapper b/rabbitmq-server/plugins-src/licensing/license_info_mochiweb-wrapper deleted file mode 100644 index c72a6af..0000000 --- a/rabbitmq-server/plugins-src/licensing/license_info_mochiweb-wrapper +++ /dev/null @@ -1,4 +0,0 @@ -Mochiweb is "Copyright (c) 2007 Mochi Media, Inc." and is covered by -the MIT license. It was downloaded from -http://github.com/mochi/mochiweb/ - diff --git a/rabbitmq-server/plugins-src/licensing/license_info_webmachine-wrapper b/rabbitmq-server/plugins-src/licensing/license_info_webmachine-wrapper deleted file mode 100644 index c00fb92..0000000 --- a/rabbitmq-server/plugins-src/licensing/license_info_webmachine-wrapper +++ /dev/null @@ -1,3 +0,0 @@ -Webmachine is Copyright (c) Basho Technologies and is covered by the -Apache License 2.0. It was downloaded from http://webmachine.basho.com/ - diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/.srcdist_done b/rabbitmq-server/plugins-src/mochiweb-wrapper/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/10-build-on-R12B-5.patch b/rabbitmq-server/plugins-src/mochiweb-wrapper/10-build-on-R12B-5.patch deleted file mode 100644 index af582a7..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/10-build-on-R12B-5.patch +++ /dev/null @@ -1,303 +0,0 @@ -diff --git a/src/mochiglobal.erl b/src/mochiglobal.erl -index ea645b0..6b20e41 100644 ---- a/src/mochiglobal.erl -+++ b/src/mochiglobal.erl -@@ -6,12 +6,12 @@ - -author("Bob Ippolito "). - -export([get/1, get/2, put/2, delete/1]). - ---spec get(atom()) -> any() | undefined. -+%% -spec get(atom()) -> any() | undefined. - %% @equiv get(K, undefined) - get(K) -> - get(K, undefined). - ---spec get(atom(), T) -> any() | T. -+%% -spec get(atom(), T) -> any() | T. - %% @doc Get the term for K or return Default. - get(K, Default) -> - get(K, Default, key_to_module(K)). -@@ -22,7 +22,7 @@ get(_K, Default, Mod) -> - Default - end. - ---spec put(atom(), any()) -> ok. -+%% -spec put(atom(), any()) -> ok. - %% @doc Store term V at K, replaces an existing term if present. - put(K, V) -> - put(K, V, key_to_module(K)). -@@ -33,7 +33,7 @@ put(_K, V, Mod) -> - {module, Mod} = code:load_binary(Mod, atom_to_list(Mod) ++ ".erl", Bin), - ok. - ---spec delete(atom()) -> boolean(). -+%% -spec delete(atom()) -> boolean(). - %% @doc Delete term stored at K, no-op if non-existent. - delete(K) -> - delete(K, key_to_module(K)). -@@ -42,21 +42,21 @@ delete(_K, Mod) -> - code:purge(Mod), - code:delete(Mod). - ---spec key_to_module(atom()) -> atom(). -+%% -spec key_to_module(atom()) -> atom(). - key_to_module(K) -> - list_to_atom("mochiglobal:" ++ atom_to_list(K)). - ---spec compile(atom(), any()) -> binary(). -+%% -spec compile(atom(), any()) -> binary(). - compile(Module, T) -> - {ok, Module, Bin} = compile:forms(forms(Module, T), - [verbose, report_errors]), - Bin. - ---spec forms(atom(), any()) -> [erl_syntax:syntaxTree()]. -+%% -spec forms(atom(), any()) -> [erl_syntax:syntaxTree()]. - forms(Module, T) -> - [erl_syntax:revert(X) || X <- term_to_abstract(Module, term, T)]. - ---spec term_to_abstract(atom(), atom(), any()) -> [erl_syntax:syntaxTree()]. -+%% -spec term_to_abstract(atom(), atom(), any()) -> [erl_syntax:syntaxTree()]. - term_to_abstract(Module, Getter, T) -> - [%% -module(Module). - erl_syntax:attribute( -diff --git a/src/mochiutf8.erl b/src/mochiutf8.erl -index 28f28c1..c9d2751 100644 ---- a/src/mochiutf8.erl -+++ b/src/mochiutf8.erl -@@ -11,11 +11,11 @@ - - %% External API - ---type unichar_low() :: 0..16#d7ff. ---type unichar_high() :: 16#e000..16#10ffff. ---type unichar() :: unichar_low() | unichar_high(). -+%% -type unichar_low() :: 0..16#d7ff. -+%% -type unichar_high() :: 16#e000..16#10ffff. -+%% -type unichar() :: unichar_low() | unichar_high(). - ---spec codepoint_to_bytes(unichar()) -> binary(). -+%% -spec codepoint_to_bytes(unichar()) -> binary(). - %% @doc Convert a unicode codepoint to UTF-8 bytes. - codepoint_to_bytes(C) when (C >= 16#00 andalso C =< 16#7f) -> - %% U+0000 - U+007F - 7 bits -@@ -40,12 +40,12 @@ codepoint_to_bytes(C) when (C >= 16#010000 andalso C =< 16#10FFFF) -> - 2#10:2, B1:6, - 2#10:2, B0:6>>. - ---spec codepoints_to_bytes([unichar()]) -> binary(). -+%% -spec codepoints_to_bytes([unichar()]) -> binary(). - %% @doc Convert a list of codepoints to a UTF-8 binary. - codepoints_to_bytes(L) -> - <<<<(codepoint_to_bytes(C))/binary>> || C <- L>>. - ---spec read_codepoint(binary()) -> {unichar(), binary(), binary()}. -+%% -spec read_codepoint(binary()) -> {unichar(), binary(), binary()}. - read_codepoint(Bin = <<2#0:1, C:7, Rest/binary>>) -> - %% U+0000 - U+007F - 7 bits - <> = Bin, -@@ -82,32 +82,32 @@ read_codepoint(Bin = <<2#11110:5, B3:3, - {C, B, Rest} - end. - ---spec codepoint_foldl(fun((unichar(), _) -> _), _, binary()) -> _. -+%% -spec codepoint_foldl(fun((unichar(), _) -> _), _, binary()) -> _. - codepoint_foldl(F, Acc, <<>>) when is_function(F, 2) -> - Acc; - codepoint_foldl(F, Acc, Bin) -> - {C, _, Rest} = read_codepoint(Bin), - codepoint_foldl(F, F(C, Acc), Rest). - ---spec bytes_foldl(fun((binary(), _) -> _), _, binary()) -> _. -+%% -spec bytes_foldl(fun((binary(), _) -> _), _, binary()) -> _. - bytes_foldl(F, Acc, <<>>) when is_function(F, 2) -> - Acc; - bytes_foldl(F, Acc, Bin) -> - {_, B, Rest} = read_codepoint(Bin), - bytes_foldl(F, F(B, Acc), Rest). - ---spec bytes_to_codepoints(binary()) -> [unichar()]. -+%% -spec bytes_to_codepoints(binary()) -> [unichar()]. - bytes_to_codepoints(B) -> - lists:reverse(codepoint_foldl(fun (C, Acc) -> [C | Acc] end, [], B)). - ---spec len(binary()) -> non_neg_integer(). -+%% -spec len(binary()) -> non_neg_integer(). - len(<<>>) -> - 0; - len(B) -> - {_, _, Rest} = read_codepoint(B), - 1 + len(Rest). - ---spec valid_utf8_bytes(B::binary()) -> binary(). -+%% -spec valid_utf8_bytes(B::binary()) -> binary(). - %% @doc Return only the bytes in B that represent valid UTF-8. Uses - %% the following recursive algorithm: skip one byte if B does not - %% follow UTF-8 syntax (a 1-4 byte encoding of some number), -@@ -118,7 +118,7 @@ valid_utf8_bytes(B) when is_binary(B) -> - - %% Internal API - ---spec binary_skip_bytes(binary(), [non_neg_integer()]) -> binary(). -+%% -spec binary_skip_bytes(binary(), [non_neg_integer()]) -> binary(). - %% @doc Return B, but skipping the 0-based indexes in L. - binary_skip_bytes(B, []) -> - B; -@@ -126,7 +126,7 @@ binary_skip_bytes(B, L) -> - binary_skip_bytes(B, L, 0, []). - - %% @private ---spec binary_skip_bytes(binary(), [non_neg_integer()], non_neg_integer(), iolist()) -> binary(). -+%% -spec binary_skip_bytes(binary(), [non_neg_integer()], non_neg_integer(), iolist()) -> binary(). - binary_skip_bytes(B, [], _N, Acc) -> - iolist_to_binary(lists:reverse([B | Acc])); - binary_skip_bytes(<<_, RestB/binary>>, [N | RestL], N, Acc) -> -@@ -134,13 +134,13 @@ binary_skip_bytes(<<_, RestB/binary>>, [N | RestL], N, Acc) -> - binary_skip_bytes(<>, L, N, Acc) -> - binary_skip_bytes(RestB, L, 1 + N, [C | Acc]). - ---spec invalid_utf8_indexes(binary()) -> [non_neg_integer()]. -+%% -spec invalid_utf8_indexes(binary()) -> [non_neg_integer()]. - %% @doc Return the 0-based indexes in B that are not valid UTF-8. - invalid_utf8_indexes(B) -> - invalid_utf8_indexes(B, 0, []). - - %% @private. ---spec invalid_utf8_indexes(binary(), non_neg_integer(), [non_neg_integer()]) -> [non_neg_integer()]. -+%% -spec invalid_utf8_indexes(binary(), non_neg_integer(), [non_neg_integer()]) -> [non_neg_integer()]. - invalid_utf8_indexes(<>, N, Acc) when C < 16#80 -> - %% U+0000 - U+007F - 7 bits - invalid_utf8_indexes(Rest, 1 + N, Acc); -diff --git a/src/mochiweb_charref.erl b/src/mochiweb_charref.erl -index 193c7c7..665d0f9 100644 ---- a/src/mochiweb_charref.erl -+++ b/src/mochiweb_charref.erl -@@ -11,7 +11,7 @@ - %% codepoint, or return undefined on failure. - %% The input should not include an ampersand or semicolon. - %% charref("#38") = 38, charref("#x26") = 38, charref("amp") = 38. ---spec charref(binary() | string()) -> integer() | [integer()] | undefined. -+%% -spec charref(binary() | string()) -> integer() | [integer()] | undefined. - charref(B) when is_binary(B) -> - charref(binary_to_list(B)); - charref([$#, C | L]) when C =:= $x orelse C =:= $X -> -diff --git a/src/mochiweb_http.erl b/src/mochiweb_http.erl -index 931ecd0..ae6410f 100644 ---- a/src/mochiweb_http.erl -+++ b/src/mochiweb_http.erl -@@ -121,12 +121,12 @@ call_body({M, F}, Req) -> - call_body(Body, Req) -> - Body(Req). - ---spec handle_invalid_request(term()) -> no_return(). -+%% -spec handle_invalid_request(term()) -> no_return(). - handle_invalid_request(Socket) -> - handle_invalid_request(Socket, {'GET', {abs_path, "/"}, {0,9}}, []), - exit(normal). - ---spec handle_invalid_request(term(), term(), term()) -> no_return(). -+%% -spec handle_invalid_request(term(), term(), term()) -> no_return(). - handle_invalid_request(Socket, Request, RevHeaders) -> - Req = new_request(Socket, Request, RevHeaders), - Req:respond({400, [], []}), -diff --git a/src/mochiweb_session.erl b/src/mochiweb_session.erl -index ac5d66b..ddf7c46 100644 ---- a/src/mochiweb_session.erl -+++ b/src/mochiweb_session.erl -@@ -21,11 +21,11 @@ - - %% @doc Generates a secure encrypted binary convining all the parameters. The - %% expiration time must be a 32-bit integer. ---spec generate_session_data( -- ExpirationTime :: expiration_time(), -- Data :: iolist(), -- FSessionKey :: key_fun(), -- ServerKey :: iolist()) -> binary(). -+%% -spec generate_session_data( -+%% ExpirationTime :: expiration_time(), -+%% Data :: iolist(), -+%% FSessionKey :: key_fun(), -+%% ServerKey :: iolist()) -> binary(). - generate_session_data(ExpirationTime, Data, FSessionKey, ServerKey) - when is_integer(ExpirationTime), is_function(FSessionKey)-> - BData = ensure_binary(Data), -@@ -39,11 +39,11 @@ generate_session_data(ExpirationTime, Data, FSessionKey, ServerKey) - %% @doc Convenience wrapper for generate_session_data that returns a - %% mochiweb cookie with "id" as the key, a max_age of 20000 seconds, - %% and the current local time as local time. ---spec generate_session_cookie( -- ExpirationTime :: expiration_time(), -- Data :: iolist(), -- FSessionKey :: key_fun(), -- ServerKey :: iolist()) -> header(). -+%% -spec generate_session_cookie( -+%% ExpirationTime :: expiration_time(), -+%% Data :: iolist(), -+%% FSessionKey :: key_fun(), -+%% ServerKey :: iolist()) -> header(). - generate_session_cookie(ExpirationTime, Data, FSessionKey, ServerKey) - when is_integer(ExpirationTime), is_function(FSessionKey)-> - CookieData = generate_session_data(ExpirationTime, Data, -@@ -55,13 +55,13 @@ generate_session_cookie(ExpirationTime, Data, FSessionKey, ServerKey) - calendar:universal_time())}]). - - %% TODO: This return type is messy to express in the type system. ---spec check_session_cookie( -- ECookie :: binary(), -- ExpirationTime :: string(), -- FSessionKey :: key_fun(), -- ServerKey :: iolist()) -> -- {Success :: boolean(), -- ExpTimeAndData :: [integer() | binary()]}. -+%% -spec check_session_cookie( -+ %% ECookie :: binary(), -+ %% ExpirationTime :: string(), -+ %% FSessionKey :: key_fun(), -+ %% ServerKey :: iolist()) -> -+ %% {Success :: boolean(), -+ %% ExpTimeAndData :: [integer() | binary()]}. - check_session_cookie(ECookie, ExpirationTime, FSessionKey, ServerKey) - when is_binary(ECookie), is_integer(ExpirationTime), - is_function(FSessionKey) -> -@@ -83,7 +83,7 @@ check_session_cookie(_ECookie, _ExpirationTime, _FSessionKey, _ServerKey) -> - {false, []}. - - %% 'Constant' time =:= operator for binary, to mitigate timing attacks. ---spec eq(binary(), binary()) -> boolean(). -+%% -spec eq(binary(), binary()) -> boolean(). - eq(A, B) when is_binary(A) andalso is_binary(B) -> - eq(A, B, 0). - -@@ -94,27 +94,27 @@ eq(<<>>, <<>>, 0) -> - eq(_As, _Bs, _Acc) -> - false. - ---spec ensure_binary(iolist()) -> binary(). -+%% -spec ensure_binary(iolist()) -> binary(). - ensure_binary(B) when is_binary(B) -> - B; - ensure_binary(L) when is_list(L) -> - iolist_to_binary(L). - ---spec encrypt_data(binary(), binary()) -> binary(). -+%% -spec encrypt_data(binary(), binary()) -> binary(). - encrypt_data(Data, Key) -> - IV = crypto:rand_bytes(16), - Crypt = crypto:aes_cfb_128_encrypt(Key, IV, Data), - <>. - ---spec decrypt_data(binary(), binary()) -> binary(). -+%% -spec decrypt_data(binary(), binary()) -> binary(). - decrypt_data(<>, Key) -> - crypto:aes_cfb_128_decrypt(Key, IV, Crypt). - ---spec gen_key(iolist(), iolist()) -> binary(). -+%% -spec gen_key(iolist(), iolist()) -> binary(). - gen_key(ExpirationTime, ServerKey)-> - crypto:md5_mac(ServerKey, [ExpirationTime]). - ---spec gen_hmac(iolist(), binary(), iolist(), binary()) -> binary(). -+%% -spec gen_hmac(iolist(), binary(), iolist(), binary()) -> binary(). - gen_hmac(ExpirationTime, Data, SessionKey, Key) -> - crypto:sha_mac(Key, [ExpirationTime, Data, SessionKey]). - diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/20-MAX_RECV_BODY.patch b/rabbitmq-server/plugins-src/mochiweb-wrapper/20-MAX_RECV_BODY.patch deleted file mode 100644 index 2656fa2..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/20-MAX_RECV_BODY.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/src/mochiweb_request.erl b/src/mochiweb_request.erl -index 5d89662..6765ab0 100644 ---- a/src/mochiweb_request.erl -+++ b/src/mochiweb_request.erl -@@ -42,7 +42,7 @@ - -define(IDLE_TIMEOUT, 300000). - - % Maximum recv_body() length of 1MB ---define(MAX_RECV_BODY, (1024*1024)). -+-define(MAX_RECV_BODY, 104857600). - - %% @spec get_header_value(K) -> undefined | Value - %% @doc Get the value of a given request header. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/30-remove-crypto-ssl-dependencies.patch b/rabbitmq-server/plugins-src/mochiweb-wrapper/30-remove-crypto-ssl-dependencies.patch deleted file mode 100644 index 0d5c85a..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/30-remove-crypto-ssl-dependencies.patch +++ /dev/null @@ -1,104 +0,0 @@ -diff --git a/src/mochitemp.erl b/src/mochitemp.erl -index dda7863..f64876d 100644 ---- a/src/mochitemp.erl -+++ b/src/mochitemp.erl -@@ -1,7 +1,7 @@ - %% @author Bob Ippolito - %% @copyright 2010 Mochi Media, Inc. - --%% @doc Create temporary files and directories. Requires crypto to be started. -+%% @doc Create temporary files and directories. - - -module(mochitemp). - -export([gettempdir/0]). -@@ -87,7 +87,7 @@ rngchars(N) -> - [rngchar() | rngchars(N - 1)]. - - rngchar() -> -- rngchar(crypto:rand_uniform(0, tuple_size(?SAFE_CHARS))). -+ rngchar(mochiweb_util:rand_uniform(0, tuple_size(?SAFE_CHARS))). - - rngchar(C) -> - element(1 + C, ?SAFE_CHARS). -@@ -177,7 +177,6 @@ gettempdir_cwd_test() -> - ok. - - rngchars_test() -> -- crypto:start(), - ?assertEqual( - "", - rngchars(0)), -@@ -199,7 +198,6 @@ rngchar_test() -> - ok. - - mkdtemp_n_failonce_test() -> -- crypto:start(), - D = mkdtemp(), - Path = filename:join([D, "testdir"]), - %% Toggle the existence of a dir so that it fails -@@ -246,7 +244,6 @@ make_dir_fail_test() -> - ok. - - mkdtemp_test() -> -- crypto:start(), - D = mkdtemp(), - ?assertEqual( - true, -@@ -257,7 +254,6 @@ mkdtemp_test() -> - ok. - - rmtempdir_test() -> -- crypto:start(), - D1 = mkdtemp(), - ?assertEqual( - true, -diff --git a/src/mochiweb.app.src b/src/mochiweb.app.src -index 8d75a3a..c98d8a0 100644 ---- a/src/mochiweb.app.src -+++ b/src/mochiweb.app.src -@@ -5,5 +5,5 @@ - {modules, []}, - {registered, []}, - {env, []}, -- {applications, [kernel, stdlib, crypto, inets, ssl, xmerl, -+ {applications, [kernel, stdlib, inets, xmerl, - compiler, syntax_tools]}]}. -diff --git a/src/mochiweb_multipart.erl b/src/mochiweb_multipart.erl -index a83a88c..a4857d6 100644 ---- a/src/mochiweb_multipart.erl -+++ b/src/mochiweb_multipart.erl -@@ -38,7 +38,7 @@ parts_to_body([{Start, End, Body}], ContentType, Size) -> - {HeaderList, Body}; - parts_to_body(BodyList, ContentType, Size) when is_list(BodyList) -> - parts_to_multipart_body(BodyList, ContentType, Size, -- mochihex:to_hex(crypto:rand_bytes(8))). -+ mochihex:to_hex(mochiweb_util:rand_bytes(8))). - - %% @spec parts_to_multipart_body([bodypart()], ContentType::string(), - %% Size::integer(), Boundary::string()) -> -diff --git a/src/mochiweb_util.erl b/src/mochiweb_util.erl -index 4d39990..a0bc2bc 100644 ---- a/src/mochiweb_util.erl -+++ b/src/mochiweb_util.erl -@@ -13,7 +13,7 @@ - -export([record_to_proplist/2, record_to_proplist/3]). - -export([safe_relative_path/1, partition/2]). - -export([parse_qvalues/1, pick_accepted_encodings/3]). ---export([make_io/1]). -+-export([make_io/1, rand_bytes/1, rand_uniform/2]). - - -define(PERCENT, 37). % $\% - -define(FULLSTOP, 46). % $\. -@@ -581,6 +581,12 @@ make_io(Integer) when is_integer(Integer) -> - make_io(Io) when is_list(Io); is_binary(Io) -> - Io. - -+rand_bytes(Count) -> -+ list_to_binary([rand_uniform(0, 16#FF + 1) || _ <- lists:seq(1, Count)]). -+ -+rand_uniform(Lo, Hi) -> -+ random:uniform(Hi - Lo) + Lo - 1. -+ - %% - %% Tests - %% diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/40-remove-compiler-syntax_tools-dependencies.patch b/rabbitmq-server/plugins-src/mochiweb-wrapper/40-remove-compiler-syntax_tools-dependencies.patch deleted file mode 100644 index c9938e5..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/40-remove-compiler-syntax_tools-dependencies.patch +++ /dev/null @@ -1,124 +0,0 @@ -diff --git a/src/mochiglobal.erl b/src/mochiglobal.erl -deleted file mode 100644 -index 6b20e41..0000000 ---- a/src/mochiglobal.erl -+++ /dev/null -@@ -1,107 +0,0 @@ --%% @author Bob Ippolito --%% @copyright 2010 Mochi Media, Inc. --%% @doc Abuse module constant pools as a "read-only shared heap" (since erts 5.6) --%% [1]. ---module(mochiglobal). ---author("Bob Ippolito "). ---export([get/1, get/2, put/2, delete/1]). -- --%% -spec get(atom()) -> any() | undefined. --%% @equiv get(K, undefined) --get(K) -> -- get(K, undefined). -- --%% -spec get(atom(), T) -> any() | T. --%% @doc Get the term for K or return Default. --get(K, Default) -> -- get(K, Default, key_to_module(K)). -- --get(_K, Default, Mod) -> -- try Mod:term() -- catch error:undef -> -- Default -- end. -- --%% -spec put(atom(), any()) -> ok. --%% @doc Store term V at K, replaces an existing term if present. --put(K, V) -> -- put(K, V, key_to_module(K)). -- --put(_K, V, Mod) -> -- Bin = compile(Mod, V), -- code:purge(Mod), -- {module, Mod} = code:load_binary(Mod, atom_to_list(Mod) ++ ".erl", Bin), -- ok. -- --%% -spec delete(atom()) -> boolean(). --%% @doc Delete term stored at K, no-op if non-existent. --delete(K) -> -- delete(K, key_to_module(K)). -- --delete(_K, Mod) -> -- code:purge(Mod), -- code:delete(Mod). -- --%% -spec key_to_module(atom()) -> atom(). --key_to_module(K) -> -- list_to_atom("mochiglobal:" ++ atom_to_list(K)). -- --%% -spec compile(atom(), any()) -> binary(). --compile(Module, T) -> -- {ok, Module, Bin} = compile:forms(forms(Module, T), -- [verbose, report_errors]), -- Bin. -- --%% -spec forms(atom(), any()) -> [erl_syntax:syntaxTree()]. --forms(Module, T) -> -- [erl_syntax:revert(X) || X <- term_to_abstract(Module, term, T)]. -- --%% -spec term_to_abstract(atom(), atom(), any()) -> [erl_syntax:syntaxTree()]. --term_to_abstract(Module, Getter, T) -> -- [%% -module(Module). -- erl_syntax:attribute( -- erl_syntax:atom(module), -- [erl_syntax:atom(Module)]), -- %% -export([Getter/0]). -- erl_syntax:attribute( -- erl_syntax:atom(export), -- [erl_syntax:list( -- [erl_syntax:arity_qualifier( -- erl_syntax:atom(Getter), -- erl_syntax:integer(0))])]), -- %% Getter() -> T. -- erl_syntax:function( -- erl_syntax:atom(Getter), -- [erl_syntax:clause([], none, [erl_syntax:abstract(T)])])]. -- --%% --%% Tests --%% ---ifdef(TEST). ---include_lib("eunit/include/eunit.hrl"). --get_put_delete_test() -> -- K = '$$test$$mochiglobal', -- delete(K), -- ?assertEqual( -- bar, -- get(K, bar)), -- try -- ?MODULE:put(K, baz), -- ?assertEqual( -- baz, -- get(K, bar)), -- ?MODULE:put(K, wibble), -- ?assertEqual( -- wibble, -- ?MODULE:get(K)) -- after -- delete(K) -- end, -- ?assertEqual( -- bar, -- get(K, bar)), -- ?assertEqual( -- undefined, -- ?MODULE:get(K)), -- ok. ---endif. -diff --git a/src/mochiweb.app.src b/src/mochiweb.app.src -index c98d8a0..4a6808e 100644 ---- a/src/mochiweb.app.src -+++ b/src/mochiweb.app.src -@@ -5,5 +5,4 @@ - {modules, []}, - {registered, []}, - {env, []}, -- {applications, [kernel, stdlib, inets, xmerl, -- compiler, syntax_tools]}]}. -+ {applications, [kernel, stdlib, inets, xmerl]}]}. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/50-remove-json.patch b/rabbitmq-server/plugins-src/mochiweb-wrapper/50-remove-json.patch deleted file mode 100644 index 8c7597f..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/50-remove-json.patch +++ /dev/null @@ -1,1255 +0,0 @@ -diff --git a/src/mochijson2.erl b/src/mochijson2.erl -deleted file mode 100644 -index 2b8d16e..0000000 ---- a/src/mochijson2.erl -+++ /dev/null -@@ -1,889 +0,0 @@ --%% @author Bob Ippolito --%% @copyright 2007 Mochi Media, Inc. -- --%% @doc Yet another JSON (RFC 4627) library for Erlang. mochijson2 works --%% with binaries as strings, arrays as lists (without an {array, _}) --%% wrapper and it only knows how to decode UTF-8 (and ASCII). --%% --%% JSON terms are decoded as follows (javascript -> erlang): --%%
    --%%
  • {"key": "value"} -> --%% {struct, [{<<"key">>, <<"value">>}]}
  • --%%
  • ["array", 123, 12.34, true, false, null] -> --%% [<<"array">>, 123, 12.34, true, false, null] --%%
  • --%%
--%%
    --%%
  • Strings in JSON decode to UTF-8 binaries in Erlang
  • --%%
  • Objects decode to {struct, PropList}
  • --%%
  • Numbers decode to integer or float
  • --%%
  • true, false, null decode to their respective terms.
  • --%%
--%% The encoder will accept the same format that the decoder will produce, --%% but will also allow additional cases for leniency: --%%
    --%%
  • atoms other than true, false, null will be considered UTF-8 --%% strings (even as a proplist key) --%%
  • --%%
  • {json, IoList} will insert IoList directly into the output --%% with no validation --%%
  • --%%
  • {array, Array} will be encoded as Array --%% (legacy mochijson style) --%%
  • --%%
  • A non-empty raw proplist will be encoded as an object as long --%% as the first pair does not have an atom key of json, struct, --%% or array --%%
  • --%%
-- ---module(mochijson2). ---author('bob@mochimedia.com'). ---export([encoder/1, encode/1]). ---export([decoder/1, decode/1, decode/2]). -- --%% This is a macro to placate syntax highlighters.. ---define(Q, $\"). ---define(ADV_COL(S, N), S#decoder{offset=N+S#decoder.offset, -- column=N+S#decoder.column}). ---define(INC_COL(S), S#decoder{offset=1+S#decoder.offset, -- column=1+S#decoder.column}). ---define(INC_LINE(S), S#decoder{offset=1+S#decoder.offset, -- column=1, -- line=1+S#decoder.line}). ---define(INC_CHAR(S, C), -- case C of -- $\n -> -- S#decoder{column=1, -- line=1+S#decoder.line, -- offset=1+S#decoder.offset}; -- _ -> -- S#decoder{column=1+S#decoder.column, -- offset=1+S#decoder.offset} -- end). ---define(IS_WHITESPACE(C), -- (C =:= $\s orelse C =:= $\t orelse C =:= $\r orelse C =:= $\n)). -- --%% @type json_string() = atom | binary() --%% @type json_number() = integer() | float() --%% @type json_array() = [json_term()] --%% @type json_object() = {struct, [{json_string(), json_term()}]} --%% @type json_eep18_object() = {[{json_string(), json_term()}]} --%% @type json_iolist() = {json, iolist()} --%% @type json_term() = json_string() | json_number() | json_array() | --%% json_object() | json_eep18_object() | json_iolist() -- ---record(encoder, {handler=null, -- utf8=false}). -- ---record(decoder, {object_hook=null, -- offset=0, -- line=1, -- column=1, -- state=null}). -- --%% @spec encoder([encoder_option()]) -> function() --%% @doc Create an encoder/1 with the given options. --%% @type encoder_option() = handler_option() | utf8_option() --%% @type utf8_option() = boolean(). Emit unicode as utf8 (default - false) --encoder(Options) -> -- State = parse_encoder_options(Options, #encoder{}), -- fun (O) -> json_encode(O, State) end. -- --%% @spec encode(json_term()) -> iolist() --%% @doc Encode the given as JSON to an iolist. --encode(Any) -> -- json_encode(Any, #encoder{}). -- --%% @spec decoder([decoder_option()]) -> function() --%% @doc Create a decoder/1 with the given options. --decoder(Options) -> -- State = parse_decoder_options(Options, #decoder{}), -- fun (O) -> json_decode(O, State) end. -- --%% @spec decode(iolist(), [{format, proplist | eep18 | struct}]) -> json_term() --%% @doc Decode the given iolist to Erlang terms using the given object format --%% for decoding, where proplist returns JSON objects as [{binary(), json_term()}] --%% proplists, eep18 returns JSON objects as {[binary(), json_term()]}, and struct --%% returns them as-is. --decode(S, Options) -> -- json_decode(S, parse_decoder_options(Options, #decoder{})). -- --%% @spec decode(iolist()) -> json_term() --%% @doc Decode the given iolist to Erlang terms. --decode(S) -> -- json_decode(S, #decoder{}). -- --%% Internal API -- --parse_encoder_options([], State) -> -- State; --parse_encoder_options([{handler, Handler} | Rest], State) -> -- parse_encoder_options(Rest, State#encoder{handler=Handler}); --parse_encoder_options([{utf8, Switch} | Rest], State) -> -- parse_encoder_options(Rest, State#encoder{utf8=Switch}). -- --parse_decoder_options([], State) -> -- State; --parse_decoder_options([{object_hook, Hook} | Rest], State) -> -- parse_decoder_options(Rest, State#decoder{object_hook=Hook}); --parse_decoder_options([{format, Format} | Rest], State) -- when Format =:= struct orelse Format =:= eep18 orelse Format =:= proplist -> -- parse_decoder_options(Rest, State#decoder{object_hook=Format}). -- --json_encode(true, _State) -> -- <<"true">>; --json_encode(false, _State) -> -- <<"false">>; --json_encode(null, _State) -> -- <<"null">>; --json_encode(I, _State) when is_integer(I) -> -- integer_to_list(I); --json_encode(F, _State) when is_float(F) -> -- mochinum:digits(F); --json_encode(S, State) when is_binary(S); is_atom(S) -> -- json_encode_string(S, State); --json_encode([{K, _}|_] = Props, State) when (K =/= struct andalso -- K =/= array andalso -- K =/= json) -> -- json_encode_proplist(Props, State); --json_encode({struct, Props}, State) when is_list(Props) -> -- json_encode_proplist(Props, State); --json_encode({Props}, State) when is_list(Props) -> -- json_encode_proplist(Props, State); --json_encode({}, State) -> -- json_encode_proplist([], State); --json_encode(Array, State) when is_list(Array) -> -- json_encode_array(Array, State); --json_encode({array, Array}, State) when is_list(Array) -> -- json_encode_array(Array, State); --json_encode({json, IoList}, _State) -> -- IoList; --json_encode(Bad, #encoder{handler=null}) -> -- exit({json_encode, {bad_term, Bad}}); --json_encode(Bad, State=#encoder{handler=Handler}) -> -- json_encode(Handler(Bad), State). -- --json_encode_array([], _State) -> -- <<"[]">>; --json_encode_array(L, State) -> -- F = fun (O, Acc) -> -- [$,, json_encode(O, State) | Acc] -- end, -- [$, | Acc1] = lists:foldl(F, "[", L), -- lists:reverse([$\] | Acc1]). -- --json_encode_proplist([], _State) -> -- <<"{}">>; --json_encode_proplist(Props, State) -> -- F = fun ({K, V}, Acc) -> -- KS = json_encode_string(K, State), -- VS = json_encode(V, State), -- [$,, VS, $:, KS | Acc] -- end, -- [$, | Acc1] = lists:foldl(F, "{", Props), -- lists:reverse([$\} | Acc1]). -- --json_encode_string(A, State) when is_atom(A) -> -- L = atom_to_list(A), -- case json_string_is_safe(L) of -- true -> -- [?Q, L, ?Q]; -- false -> -- json_encode_string_unicode(xmerl_ucs:from_utf8(L), State, [?Q]) -- end; --json_encode_string(B, State) when is_binary(B) -> -- case json_bin_is_safe(B) of -- true -> -- [?Q, B, ?Q]; -- false -> -- json_encode_string_unicode(xmerl_ucs:from_utf8(B), State, [?Q]) -- end; --json_encode_string(I, _State) when is_integer(I) -> -- [?Q, integer_to_list(I), ?Q]; --json_encode_string(L, State) when is_list(L) -> -- case json_string_is_safe(L) of -- true -> -- [?Q, L, ?Q]; -- false -> -- json_encode_string_unicode(L, State, [?Q]) -- end. -- --json_string_is_safe([]) -> -- true; --json_string_is_safe([C | Rest]) -> -- case C of -- ?Q -> -- false; -- $\\ -> -- false; -- $\b -> -- false; -- $\f -> -- false; -- $\n -> -- false; -- $\r -> -- false; -- $\t -> -- false; -- C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF -> -- false; -- C when C < 16#7f -> -- json_string_is_safe(Rest); -- _ -> -- false -- end. -- --json_bin_is_safe(<<>>) -> -- true; --json_bin_is_safe(<>) -> -- case C of -- ?Q -> -- false; -- $\\ -> -- false; -- $\b -> -- false; -- $\f -> -- false; -- $\n -> -- false; -- $\r -> -- false; -- $\t -> -- false; -- C when C >= 0, C < $\s; C >= 16#7f -> -- false; -- C when C < 16#7f -> -- json_bin_is_safe(Rest) -- end. -- --json_encode_string_unicode([], _State, Acc) -> -- lists:reverse([$\" | Acc]); --json_encode_string_unicode([C | Cs], State, Acc) -> -- Acc1 = case C of -- ?Q -> -- [?Q, $\\ | Acc]; -- %% Escaping solidus is only useful when trying to protect -- %% against "" injection attacks which are only -- %% possible when JSON is inserted into a HTML document -- %% in-line. mochijson2 does not protect you from this, so -- %% if you do insert directly into HTML then you need to -- %% uncomment the following case or escape the output of encode. -- %% -- %% $/ -> -- %% [$/, $\\ | Acc]; -- %% -- $\\ -> -- [$\\, $\\ | Acc]; -- $\b -> -- [$b, $\\ | Acc]; -- $\f -> -- [$f, $\\ | Acc]; -- $\n -> -- [$n, $\\ | Acc]; -- $\r -> -- [$r, $\\ | Acc]; -- $\t -> -- [$t, $\\ | Acc]; -- C when C >= 0, C < $\s -> -- [unihex(C) | Acc]; -- C when C >= 16#7f, C =< 16#10FFFF, State#encoder.utf8 -> -- [xmerl_ucs:to_utf8(C) | Acc]; -- C when C >= 16#7f, C =< 16#10FFFF, not State#encoder.utf8 -> -- [unihex(C) | Acc]; -- C when C < 16#7f -> -- [C | Acc]; -- _ -> -- exit({json_encode, {bad_char, C}}) -- end, -- json_encode_string_unicode(Cs, State, Acc1). -- --hexdigit(C) when C >= 0, C =< 9 -> -- C + $0; --hexdigit(C) when C =< 15 -> -- C + $a - 10. -- --unihex(C) when C < 16#10000 -> -- <> = <>, -- Digits = [hexdigit(D) || D <- [D3, D2, D1, D0]], -- [$\\, $u | Digits]; --unihex(C) when C =< 16#10FFFF -> -- N = C - 16#10000, -- S1 = 16#d800 bor ((N bsr 10) band 16#3ff), -- S2 = 16#dc00 bor (N band 16#3ff), -- [unihex(S1), unihex(S2)]. -- --json_decode(L, S) when is_list(L) -> -- json_decode(iolist_to_binary(L), S); --json_decode(B, S) -> -- {Res, S1} = decode1(B, S), -- {eof, _} = tokenize(B, S1#decoder{state=trim}), -- Res. -- --decode1(B, S=#decoder{state=null}) -> -- case tokenize(B, S#decoder{state=any}) of -- {{const, C}, S1} -> -- {C, S1}; -- {start_array, S1} -> -- decode_array(B, S1); -- {start_object, S1} -> -- decode_object(B, S1) -- end. -- --make_object(V, #decoder{object_hook=N}) when N =:= null orelse N =:= struct -> -- V; --make_object({struct, P}, #decoder{object_hook=eep18}) -> -- {P}; --make_object({struct, P}, #decoder{object_hook=proplist}) -> -- P; --make_object(V, #decoder{object_hook=Hook}) -> -- Hook(V). -- --decode_object(B, S) -> -- decode_object(B, S#decoder{state=key}, []). -- --decode_object(B, S=#decoder{state=key}, Acc) -> -- case tokenize(B, S) of -- {end_object, S1} -> -- V = make_object({struct, lists:reverse(Acc)}, S1), -- {V, S1#decoder{state=null}}; -- {{const, K}, S1} -> -- {colon, S2} = tokenize(B, S1), -- {V, S3} = decode1(B, S2#decoder{state=null}), -- decode_object(B, S3#decoder{state=comma}, [{K, V} | Acc]) -- end; --decode_object(B, S=#decoder{state=comma}, Acc) -> -- case tokenize(B, S) of -- {end_object, S1} -> -- V = make_object({struct, lists:reverse(Acc)}, S1), -- {V, S1#decoder{state=null}}; -- {comma, S1} -> -- decode_object(B, S1#decoder{state=key}, Acc) -- end. -- --decode_array(B, S) -> -- decode_array(B, S#decoder{state=any}, []). -- --decode_array(B, S=#decoder{state=any}, Acc) -> -- case tokenize(B, S) of -- {end_array, S1} -> -- {lists:reverse(Acc), S1#decoder{state=null}}; -- {start_array, S1} -> -- {Array, S2} = decode_array(B, S1), -- decode_array(B, S2#decoder{state=comma}, [Array | Acc]); -- {start_object, S1} -> -- {Array, S2} = decode_object(B, S1), -- decode_array(B, S2#decoder{state=comma}, [Array | Acc]); -- {{const, Const}, S1} -> -- decode_array(B, S1#decoder{state=comma}, [Const | Acc]) -- end; --decode_array(B, S=#decoder{state=comma}, Acc) -> -- case tokenize(B, S) of -- {end_array, S1} -> -- {lists:reverse(Acc), S1#decoder{state=null}}; -- {comma, S1} -> -- decode_array(B, S1#decoder{state=any}, Acc) -- end. -- --tokenize_string(B, S=#decoder{offset=O}) -> -- case tokenize_string_fast(B, O) of -- {escape, O1} -> -- Length = O1 - O, -- S1 = ?ADV_COL(S, Length), -- <<_:O/binary, Head:Length/binary, _/binary>> = B, -- tokenize_string(B, S1, lists:reverse(binary_to_list(Head))); -- O1 -> -- Length = O1 - O, -- <<_:O/binary, String:Length/binary, ?Q, _/binary>> = B, -- {{const, String}, ?ADV_COL(S, Length + 1)} -- end. -- --tokenize_string_fast(B, O) -> -- case B of -- <<_:O/binary, ?Q, _/binary>> -> -- O; -- <<_:O/binary, $\\, _/binary>> -> -- {escape, O}; -- <<_:O/binary, C1, _/binary>> when C1 < 128 -> -- tokenize_string_fast(B, 1 + O); -- <<_:O/binary, C1, C2, _/binary>> when C1 >= 194, C1 =< 223, -- C2 >= 128, C2 =< 191 -> -- tokenize_string_fast(B, 2 + O); -- <<_:O/binary, C1, C2, C3, _/binary>> when C1 >= 224, C1 =< 239, -- C2 >= 128, C2 =< 191, -- C3 >= 128, C3 =< 191 -> -- tokenize_string_fast(B, 3 + O); -- <<_:O/binary, C1, C2, C3, C4, _/binary>> when C1 >= 240, C1 =< 244, -- C2 >= 128, C2 =< 191, -- C3 >= 128, C3 =< 191, -- C4 >= 128, C4 =< 191 -> -- tokenize_string_fast(B, 4 + O); -- _ -> -- throw(invalid_utf8) -- end. -- --tokenize_string(B, S=#decoder{offset=O}, Acc) -> -- case B of -- <<_:O/binary, ?Q, _/binary>> -> -- {{const, iolist_to_binary(lists:reverse(Acc))}, ?INC_COL(S)}; -- <<_:O/binary, "\\\"", _/binary>> -> -- tokenize_string(B, ?ADV_COL(S, 2), [$\" | Acc]); -- <<_:O/binary, "\\\\", _/binary>> -> -- tokenize_string(B, ?ADV_COL(S, 2), [$\\ | Acc]); -- <<_:O/binary, "\\/", _/binary>> -> -- tokenize_string(B, ?ADV_COL(S, 2), [$/ | Acc]); -- <<_:O/binary, "\\b", _/binary>> -> -- tokenize_string(B, ?ADV_COL(S, 2), [$\b | Acc]); -- <<_:O/binary, "\\f", _/binary>> -> -- tokenize_string(B, ?ADV_COL(S, 2), [$\f | Acc]); -- <<_:O/binary, "\\n", _/binary>> -> -- tokenize_string(B, ?ADV_COL(S, 2), [$\n | Acc]); -- <<_:O/binary, "\\r", _/binary>> -> -- tokenize_string(B, ?ADV_COL(S, 2), [$\r | Acc]); -- <<_:O/binary, "\\t", _/binary>> -> -- tokenize_string(B, ?ADV_COL(S, 2), [$\t | Acc]); -- <<_:O/binary, "\\u", C3, C2, C1, C0, Rest/binary>> -> -- C = erlang:list_to_integer([C3, C2, C1, C0], 16), -- if C > 16#D7FF, C < 16#DC00 -> -- %% coalesce UTF-16 surrogate pair -- <<"\\u", D3, D2, D1, D0, _/binary>> = Rest, -- D = erlang:list_to_integer([D3,D2,D1,D0], 16), -- [CodePoint] = xmerl_ucs:from_utf16be(<>), -- Acc1 = lists:reverse(xmerl_ucs:to_utf8(CodePoint), Acc), -- tokenize_string(B, ?ADV_COL(S, 12), Acc1); -- true -> -- Acc1 = lists:reverse(xmerl_ucs:to_utf8(C), Acc), -- tokenize_string(B, ?ADV_COL(S, 6), Acc1) -- end; -- <<_:O/binary, C1, _/binary>> when C1 < 128 -> -- tokenize_string(B, ?INC_CHAR(S, C1), [C1 | Acc]); -- <<_:O/binary, C1, C2, _/binary>> when C1 >= 194, C1 =< 223, -- C2 >= 128, C2 =< 191 -> -- tokenize_string(B, ?ADV_COL(S, 2), [C2, C1 | Acc]); -- <<_:O/binary, C1, C2, C3, _/binary>> when C1 >= 224, C1 =< 239, -- C2 >= 128, C2 =< 191, -- C3 >= 128, C3 =< 191 -> -- tokenize_string(B, ?ADV_COL(S, 3), [C3, C2, C1 | Acc]); -- <<_:O/binary, C1, C2, C3, C4, _/binary>> when C1 >= 240, C1 =< 244, -- C2 >= 128, C2 =< 191, -- C3 >= 128, C3 =< 191, -- C4 >= 128, C4 =< 191 -> -- tokenize_string(B, ?ADV_COL(S, 4), [C4, C3, C2, C1 | Acc]); -- _ -> -- throw(invalid_utf8) -- end. -- --tokenize_number(B, S) -> -- case tokenize_number(B, sign, S, []) of -- {{int, Int}, S1} -> -- {{const, list_to_integer(Int)}, S1}; -- {{float, Float}, S1} -> -- {{const, list_to_float(Float)}, S1} -- end. -- --tokenize_number(B, sign, S=#decoder{offset=O}, []) -> -- case B of -- <<_:O/binary, $-, _/binary>> -> -- tokenize_number(B, int, ?INC_COL(S), [$-]); -- _ -> -- tokenize_number(B, int, S, []) -- end; --tokenize_number(B, int, S=#decoder{offset=O}, Acc) -> -- case B of -- <<_:O/binary, $0, _/binary>> -> -- tokenize_number(B, frac, ?INC_COL(S), [$0 | Acc]); -- <<_:O/binary, C, _/binary>> when C >= $1 andalso C =< $9 -> -- tokenize_number(B, int1, ?INC_COL(S), [C | Acc]) -- end; --tokenize_number(B, int1, S=#decoder{offset=O}, Acc) -> -- case B of -- <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 -> -- tokenize_number(B, int1, ?INC_COL(S), [C | Acc]); -- _ -> -- tokenize_number(B, frac, S, Acc) -- end; --tokenize_number(B, frac, S=#decoder{offset=O}, Acc) -> -- case B of -- <<_:O/binary, $., C, _/binary>> when C >= $0, C =< $9 -> -- tokenize_number(B, frac1, ?ADV_COL(S, 2), [C, $. | Acc]); -- <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E -> -- tokenize_number(B, esign, ?INC_COL(S), [$e, $0, $. | Acc]); -- _ -> -- {{int, lists:reverse(Acc)}, S} -- end; --tokenize_number(B, frac1, S=#decoder{offset=O}, Acc) -> -- case B of -- <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 -> -- tokenize_number(B, frac1, ?INC_COL(S), [C | Acc]); -- <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E -> -- tokenize_number(B, esign, ?INC_COL(S), [$e | Acc]); -- _ -> -- {{float, lists:reverse(Acc)}, S} -- end; --tokenize_number(B, esign, S=#decoder{offset=O}, Acc) -> -- case B of -- <<_:O/binary, C, _/binary>> when C =:= $- orelse C=:= $+ -> -- tokenize_number(B, eint, ?INC_COL(S), [C | Acc]); -- _ -> -- tokenize_number(B, eint, S, Acc) -- end; --tokenize_number(B, eint, S=#decoder{offset=O}, Acc) -> -- case B of -- <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 -> -- tokenize_number(B, eint1, ?INC_COL(S), [C | Acc]) -- end; --tokenize_number(B, eint1, S=#decoder{offset=O}, Acc) -> -- case B of -- <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 -> -- tokenize_number(B, eint1, ?INC_COL(S), [C | Acc]); -- _ -> -- {{float, lists:reverse(Acc)}, S} -- end. -- --tokenize(B, S=#decoder{offset=O}) -> -- case B of -- <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C) -> -- tokenize(B, ?INC_CHAR(S, C)); -- <<_:O/binary, "{", _/binary>> -> -- {start_object, ?INC_COL(S)}; -- <<_:O/binary, "}", _/binary>> -> -- {end_object, ?INC_COL(S)}; -- <<_:O/binary, "[", _/binary>> -> -- {start_array, ?INC_COL(S)}; -- <<_:O/binary, "]", _/binary>> -> -- {end_array, ?INC_COL(S)}; -- <<_:O/binary, ",", _/binary>> -> -- {comma, ?INC_COL(S)}; -- <<_:O/binary, ":", _/binary>> -> -- {colon, ?INC_COL(S)}; -- <<_:O/binary, "null", _/binary>> -> -- {{const, null}, ?ADV_COL(S, 4)}; -- <<_:O/binary, "true", _/binary>> -> -- {{const, true}, ?ADV_COL(S, 4)}; -- <<_:O/binary, "false", _/binary>> -> -- {{const, false}, ?ADV_COL(S, 5)}; -- <<_:O/binary, "\"", _/binary>> -> -- tokenize_string(B, ?INC_COL(S)); -- <<_:O/binary, C, _/binary>> when (C >= $0 andalso C =< $9) -- orelse C =:= $- -> -- tokenize_number(B, S); -- <<_:O/binary>> -> -- trim = S#decoder.state, -- {eof, S} -- end. --%% --%% Tests --%% ---ifdef(TEST). ---include_lib("eunit/include/eunit.hrl"). -- -- --%% testing constructs borrowed from the Yaws JSON implementation. -- --%% Create an object from a list of Key/Value pairs. -- --obj_new() -> -- {struct, []}. -- --is_obj({struct, Props}) -> -- F = fun ({K, _}) when is_binary(K) -> true end, -- lists:all(F, Props). -- --obj_from_list(Props) -> -- Obj = {struct, Props}, -- ?assert(is_obj(Obj)), -- Obj. -- --%% Test for equivalence of Erlang terms. --%% Due to arbitrary order of construction, equivalent objects might --%% compare unequal as erlang terms, so we need to carefully recurse --%% through aggregates (tuples and objects). -- --equiv({struct, Props1}, {struct, Props2}) -> -- equiv_object(Props1, Props2); --equiv(L1, L2) when is_list(L1), is_list(L2) -> -- equiv_list(L1, L2); --equiv(N1, N2) when is_number(N1), is_number(N2) -> N1 == N2; --equiv(B1, B2) when is_binary(B1), is_binary(B2) -> B1 == B2; --equiv(A, A) when A =:= true orelse A =:= false orelse A =:= null -> true. -- --%% Object representation and traversal order is unknown. --%% Use the sledgehammer and sort property lists. -- --equiv_object(Props1, Props2) -> -- L1 = lists:keysort(1, Props1), -- L2 = lists:keysort(1, Props2), -- Pairs = lists:zip(L1, L2), -- true = lists:all(fun({{K1, V1}, {K2, V2}}) -> -- equiv(K1, K2) and equiv(V1, V2) -- end, Pairs). -- --%% Recursively compare tuple elements for equivalence. -- --equiv_list([], []) -> -- true; --equiv_list([V1 | L1], [V2 | L2]) -> -- equiv(V1, V2) andalso equiv_list(L1, L2). -- --decode_test() -> -- [1199344435545.0, 1] = decode(<<"[1199344435545.0,1]">>), -- <<16#F0,16#9D,16#9C,16#95>> = decode([34,"\\ud835","\\udf15",34]). -- --e2j_vec_test() -> -- test_one(e2j_test_vec(utf8), 1). -- --test_one([], _N) -> -- %% io:format("~p tests passed~n", [N-1]), -- ok; --test_one([{E, J} | Rest], N) -> -- %% io:format("[~p] ~p ~p~n", [N, E, J]), -- true = equiv(E, decode(J)), -- true = equiv(E, decode(encode(E))), -- test_one(Rest, 1+N). -- --e2j_test_vec(utf8) -> -- [ -- {1, "1"}, -- {3.1416, "3.14160"}, %% text representation may truncate, trail zeroes -- {-1, "-1"}, -- {-3.1416, "-3.14160"}, -- {12.0e10, "1.20000e+11"}, -- {1.234E+10, "1.23400e+10"}, -- {-1.234E-10, "-1.23400e-10"}, -- {10.0, "1.0e+01"}, -- {123.456, "1.23456E+2"}, -- {10.0, "1e1"}, -- {<<"foo">>, "\"foo\""}, -- {<<"foo", 5, "bar">>, "\"foo\\u0005bar\""}, -- {<<"">>, "\"\""}, -- {<<"\n\n\n">>, "\"\\n\\n\\n\""}, -- {<<"\" \b\f\r\n\t\"">>, "\"\\\" \\b\\f\\r\\n\\t\\\"\""}, -- {obj_new(), "{}"}, -- {obj_from_list([{<<"foo">>, <<"bar">>}]), "{\"foo\":\"bar\"}"}, -- {obj_from_list([{<<"foo">>, <<"bar">>}, {<<"baz">>, 123}]), -- "{\"foo\":\"bar\",\"baz\":123}"}, -- {[], "[]"}, -- {[[]], "[[]]"}, -- {[1, <<"foo">>], "[1,\"foo\"]"}, -- -- %% json array in a json object -- {obj_from_list([{<<"foo">>, [123]}]), -- "{\"foo\":[123]}"}, -- -- %% json object in a json object -- {obj_from_list([{<<"foo">>, obj_from_list([{<<"bar">>, true}])}]), -- "{\"foo\":{\"bar\":true}}"}, -- -- %% fold evaluation order -- {obj_from_list([{<<"foo">>, []}, -- {<<"bar">>, obj_from_list([{<<"baz">>, true}])}, -- {<<"alice">>, <<"bob">>}]), -- "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}"}, -- -- %% json object in a json array -- {[-123, <<"foo">>, obj_from_list([{<<"bar">>, []}]), null], -- "[-123,\"foo\",{\"bar\":[]},null]"} -- ]. -- --%% test utf8 encoding --encoder_utf8_test() -> -- %% safe conversion case (default) -- [34,"\\u0001","\\u0442","\\u0435","\\u0441","\\u0442",34] = -- encode(<<1,"\321\202\320\265\321\201\321\202">>), -- -- %% raw utf8 output (optional) -- Enc = mochijson2:encoder([{utf8, true}]), -- [34,"\\u0001",[209,130],[208,181],[209,129],[209,130],34] = -- Enc(<<1,"\321\202\320\265\321\201\321\202">>). -- --input_validation_test() -> -- Good = [ -- {16#00A3, <>}, %% pound -- {16#20AC, <>}, %% euro -- {16#10196, <>} %% denarius -- ], -- lists:foreach(fun({CodePoint, UTF8}) -> -- Expect = list_to_binary(xmerl_ucs:to_utf8(CodePoint)), -- Expect = decode(UTF8) -- end, Good), -- -- Bad = [ -- %% 2nd, 3rd, or 4th byte of a multi-byte sequence w/o leading byte -- <>, -- %% missing continuations, last byte in each should be 80-BF -- <>, -- <>, -- <>, -- %% we don't support code points > 10FFFF per RFC 3629 -- <>, -- %% escape characters trigger a different code path -- <> -- ], -- lists:foreach( -- fun(X) -> -- ok = try decode(X) catch invalid_utf8 -> ok end, -- %% could be {ucs,{bad_utf8_character_code}} or -- %% {json_encode,{bad_char,_}} -- {'EXIT', _} = (catch encode(X)) -- end, Bad). -- --inline_json_test() -> -- ?assertEqual(<<"\"iodata iodata\"">>, -- iolist_to_binary( -- encode({json, [<<"\"iodata">>, " iodata\""]}))), -- ?assertEqual({struct, [{<<"key">>, <<"iodata iodata">>}]}, -- decode( -- encode({struct, -- [{key, {json, [<<"\"iodata">>, " iodata\""]}}]}))), -- ok. -- --big_unicode_test() -> -- UTF8Seq = list_to_binary(xmerl_ucs:to_utf8(16#0001d120)), -- ?assertEqual( -- <<"\"\\ud834\\udd20\"">>, -- iolist_to_binary(encode(UTF8Seq))), -- ?assertEqual( -- UTF8Seq, -- decode(iolist_to_binary(encode(UTF8Seq)))), -- ok. -- --custom_decoder_test() -> -- ?assertEqual( -- {struct, [{<<"key">>, <<"value">>}]}, -- (decoder([]))("{\"key\": \"value\"}")), -- F = fun ({struct, [{<<"key">>, <<"value">>}]}) -> win end, -- ?assertEqual( -- win, -- (decoder([{object_hook, F}]))("{\"key\": \"value\"}")), -- ok. -- --atom_test() -> -- %% JSON native atoms -- [begin -- ?assertEqual(A, decode(atom_to_list(A))), -- ?assertEqual(iolist_to_binary(atom_to_list(A)), -- iolist_to_binary(encode(A))) -- end || A <- [true, false, null]], -- %% Atom to string -- ?assertEqual( -- <<"\"foo\"">>, -- iolist_to_binary(encode(foo))), -- ?assertEqual( -- <<"\"\\ud834\\udd20\"">>, -- iolist_to_binary(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))), -- ok. -- --key_encode_test() -> -- %% Some forms are accepted as keys that would not be strings in other -- %% cases -- ?assertEqual( -- <<"{\"foo\":1}">>, -- iolist_to_binary(encode({struct, [{foo, 1}]}))), -- ?assertEqual( -- <<"{\"foo\":1}">>, -- iolist_to_binary(encode({struct, [{<<"foo">>, 1}]}))), -- ?assertEqual( -- <<"{\"foo\":1}">>, -- iolist_to_binary(encode({struct, [{"foo", 1}]}))), -- ?assertEqual( -- <<"{\"foo\":1}">>, -- iolist_to_binary(encode([{foo, 1}]))), -- ?assertEqual( -- <<"{\"foo\":1}">>, -- iolist_to_binary(encode([{<<"foo">>, 1}]))), -- ?assertEqual( -- <<"{\"foo\":1}">>, -- iolist_to_binary(encode([{"foo", 1}]))), -- ?assertEqual( -- <<"{\"\\ud834\\udd20\":1}">>, -- iolist_to_binary( -- encode({struct, [{[16#0001d120], 1}]}))), -- ?assertEqual( -- <<"{\"1\":1}">>, -- iolist_to_binary(encode({struct, [{1, 1}]}))), -- ok. -- --unsafe_chars_test() -> -- Chars = "\"\\\b\f\n\r\t", -- [begin -- ?assertEqual(false, json_string_is_safe([C])), -- ?assertEqual(false, json_bin_is_safe(<>)), -- ?assertEqual(<>, decode(encode(<>))) -- end || C <- Chars], -- ?assertEqual( -- false, -- json_string_is_safe([16#0001d120])), -- ?assertEqual( -- false, -- json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8(16#0001d120)))), -- ?assertEqual( -- [16#0001d120], -- xmerl_ucs:from_utf8( -- binary_to_list( -- decode(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))))), -- ?assertEqual( -- false, -- json_string_is_safe([16#110000])), -- ?assertEqual( -- false, -- json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8([16#110000])))), -- %% solidus can be escaped but isn't unsafe by default -- ?assertEqual( -- <<"/">>, -- decode(<<"\"\\/\"">>)), -- ok. -- --int_test() -> -- ?assertEqual(0, decode("0")), -- ?assertEqual(1, decode("1")), -- ?assertEqual(11, decode("11")), -- ok. -- --large_int_test() -> -- ?assertEqual(<<"-2147483649214748364921474836492147483649">>, -- iolist_to_binary(encode(-2147483649214748364921474836492147483649))), -- ?assertEqual(<<"2147483649214748364921474836492147483649">>, -- iolist_to_binary(encode(2147483649214748364921474836492147483649))), -- ok. -- --float_test() -> -- ?assertEqual(<<"-2147483649.0">>, iolist_to_binary(encode(-2147483649.0))), -- ?assertEqual(<<"2147483648.0">>, iolist_to_binary(encode(2147483648.0))), -- ok. -- --handler_test() -> -- ?assertEqual( -- {'EXIT',{json_encode,{bad_term,{x,y}}}}, -- catch encode({x,y})), -- F = fun ({x,y}) -> [] end, -- ?assertEqual( -- <<"[]">>, -- iolist_to_binary((encoder([{handler, F}]))({x, y}))), -- ok. -- --encode_empty_test_() -> -- [{A, ?_assertEqual(<<"{}">>, iolist_to_binary(encode(B)))} -- || {A, B} <- [{"eep18 {}", {}}, -- {"eep18 {[]}", {[]}}, -- {"{struct, []}", {struct, []}}]]. -- --encode_test_() -> -- P = [{<<"k">>, <<"v">>}], -- JSON = iolist_to_binary(encode({struct, P})), -- [{atom_to_list(F), -- ?_assertEqual(JSON, iolist_to_binary(encode(decode(JSON, [{format, F}]))))} -- || F <- [struct, eep18, proplist]]. -- --format_test_() -> -- P = [{<<"k">>, <<"v">>}], -- JSON = iolist_to_binary(encode({struct, P})), -- [{atom_to_list(F), -- ?_assertEqual(A, decode(JSON, [{format, F}]))} -- || {F, A} <- [{struct, {struct, P}}, -- {eep18, {P}}, -- {proplist, P}]]. -- ---endif. -diff --git a/src/mochinum.erl b/src/mochinum.erl -deleted file mode 100644 -index c52b15c..0000000 ---- a/src/mochinum.erl -+++ /dev/null -@@ -1,354 +0,0 @@ --%% @copyright 2007 Mochi Media, Inc. --%% @author Bob Ippolito -- --%% @doc Useful numeric algorithms for floats that cover some deficiencies --%% in the math module. More interesting is digits/1, which implements --%% the algorithm from: --%% http://www.cs.indiana.edu/~burger/fp/index.html --%% See also "Printing Floating-Point Numbers Quickly and Accurately" --%% in Proceedings of the SIGPLAN '96 Conference on Programming Language --%% Design and Implementation. -- ---module(mochinum). ---author("Bob Ippolito "). ---export([digits/1, frexp/1, int_pow/2, int_ceil/1]). -- --%% IEEE 754 Float exponent bias ---define(FLOAT_BIAS, 1022). ---define(MIN_EXP, -1074). ---define(BIG_POW, 4503599627370496). -- --%% External API -- --%% @spec digits(number()) -> string() --%% @doc Returns a string that accurately represents the given integer or float --%% using a conservative amount of digits. Great for generating --%% human-readable output, or compact ASCII serializations for floats. --digits(N) when is_integer(N) -> -- integer_to_list(N); --digits(0.0) -> -- "0.0"; --digits(Float) -> -- {Frac1, Exp1} = frexp_int(Float), -- [Place0 | Digits0] = digits1(Float, Exp1, Frac1), -- {Place, Digits} = transform_digits(Place0, Digits0), -- R = insert_decimal(Place, Digits), -- case Float < 0 of -- true -> -- [$- | R]; -- _ -> -- R -- end. -- --%% @spec frexp(F::float()) -> {Frac::float(), Exp::float()} --%% @doc Return the fractional and exponent part of an IEEE 754 double, --%% equivalent to the libc function of the same name. --%% F = Frac * pow(2, Exp). --frexp(F) -> -- frexp1(unpack(F)). -- --%% @spec int_pow(X::integer(), N::integer()) -> Y::integer() --%% @doc Moderately efficient way to exponentiate integers. --%% int_pow(10, 2) = 100. --int_pow(_X, 0) -> -- 1; --int_pow(X, N) when N > 0 -> -- int_pow(X, N, 1). -- --%% @spec int_ceil(F::float()) -> integer() --%% @doc Return the ceiling of F as an integer. The ceiling is defined as --%% F when F == trunc(F); --%% trunc(F) when F < 0; --%% trunc(F) + 1 when F > 0. --int_ceil(X) -> -- T = trunc(X), -- case (X - T) of -- Pos when Pos > 0 -> T + 1; -- _ -> T -- end. -- -- --%% Internal API -- --int_pow(X, N, R) when N < 2 -> -- R * X; --int_pow(X, N, R) -> -- int_pow(X * X, N bsr 1, case N band 1 of 1 -> R * X; 0 -> R end). -- --insert_decimal(0, S) -> -- "0." ++ S; --insert_decimal(Place, S) when Place > 0 -> -- L = length(S), -- case Place - L of -- 0 -> -- S ++ ".0"; -- N when N < 0 -> -- {S0, S1} = lists:split(L + N, S), -- S0 ++ "." ++ S1; -- N when N < 6 -> -- %% More places than digits -- S ++ lists:duplicate(N, $0) ++ ".0"; -- _ -> -- insert_decimal_exp(Place, S) -- end; --insert_decimal(Place, S) when Place > -6 -> -- "0." ++ lists:duplicate(abs(Place), $0) ++ S; --insert_decimal(Place, S) -> -- insert_decimal_exp(Place, S). -- --insert_decimal_exp(Place, S) -> -- [C | S0] = S, -- S1 = case S0 of -- [] -> -- "0"; -- _ -> -- S0 -- end, -- Exp = case Place < 0 of -- true -> -- "e-"; -- false -> -- "e+" -- end, -- [C] ++ "." ++ S1 ++ Exp ++ integer_to_list(abs(Place - 1)). -- -- --digits1(Float, Exp, Frac) -> -- Round = ((Frac band 1) =:= 0), -- case Exp >= 0 of -- true -> -- BExp = 1 bsl Exp, -- case (Frac =/= ?BIG_POW) of -- true -> -- scale((Frac * BExp * 2), 2, BExp, BExp, -- Round, Round, Float); -- false -> -- scale((Frac * BExp * 4), 4, (BExp * 2), BExp, -- Round, Round, Float) -- end; -- false -> -- case (Exp =:= ?MIN_EXP) orelse (Frac =/= ?BIG_POW) of -- true -> -- scale((Frac * 2), 1 bsl (1 - Exp), 1, 1, -- Round, Round, Float); -- false -> -- scale((Frac * 4), 1 bsl (2 - Exp), 2, 1, -- Round, Round, Float) -- end -- end. -- --scale(R, S, MPlus, MMinus, LowOk, HighOk, Float) -> -- Est = int_ceil(math:log10(abs(Float)) - 1.0e-10), -- %% Note that the scheme implementation uses a 326 element look-up table -- %% for int_pow(10, N) where we do not. -- case Est >= 0 of -- true -> -- fixup(R, S * int_pow(10, Est), MPlus, MMinus, Est, -- LowOk, HighOk); -- false -> -- Scale = int_pow(10, -Est), -- fixup(R * Scale, S, MPlus * Scale, MMinus * Scale, Est, -- LowOk, HighOk) -- end. -- --fixup(R, S, MPlus, MMinus, K, LowOk, HighOk) -> -- TooLow = case HighOk of -- true -> -- (R + MPlus) >= S; -- false -> -- (R + MPlus) > S -- end, -- case TooLow of -- true -> -- [(K + 1) | generate(R, S, MPlus, MMinus, LowOk, HighOk)]; -- false -> -- [K | generate(R * 10, S, MPlus * 10, MMinus * 10, LowOk, HighOk)] -- end. -- --generate(R0, S, MPlus, MMinus, LowOk, HighOk) -> -- D = R0 div S, -- R = R0 rem S, -- TC1 = case LowOk of -- true -> -- R =< MMinus; -- false -> -- R < MMinus -- end, -- TC2 = case HighOk of -- true -> -- (R + MPlus) >= S; -- false -> -- (R + MPlus) > S -- end, -- case TC1 of -- false -> -- case TC2 of -- false -> -- [D | generate(R * 10, S, MPlus * 10, MMinus * 10, -- LowOk, HighOk)]; -- true -> -- [D + 1] -- end; -- true -> -- case TC2 of -- false -> -- [D]; -- true -> -- case R * 2 < S of -- true -> -- [D]; -- false -> -- [D + 1] -- end -- end -- end. -- --unpack(Float) -> -- <> = <>, -- {Sign, Exp, Frac}. -- --frexp1({_Sign, 0, 0}) -> -- {0.0, 0}; --frexp1({Sign, 0, Frac}) -> -- Exp = log2floor(Frac), -- <> = <>, -- {Frac1, -(?FLOAT_BIAS) - 52 + Exp}; --frexp1({Sign, Exp, Frac}) -> -- <> = <>, -- {Frac1, Exp - ?FLOAT_BIAS}. -- --log2floor(Int) -> -- log2floor(Int, 0). -- --log2floor(0, N) -> -- N; --log2floor(Int, N) -> -- log2floor(Int bsr 1, 1 + N). -- -- --transform_digits(Place, [0 | Rest]) -> -- transform_digits(Place, Rest); --transform_digits(Place, Digits) -> -- {Place, [$0 + D || D <- Digits]}. -- -- --frexp_int(F) -> -- case unpack(F) of -- {_Sign, 0, Frac} -> -- {Frac, ?MIN_EXP}; -- {_Sign, Exp, Frac} -> -- {Frac + (1 bsl 52), Exp - 53 - ?FLOAT_BIAS} -- end. -- --%% --%% Tests --%% ---ifdef(TEST). ---include_lib("eunit/include/eunit.hrl"). -- --int_ceil_test() -> -- ?assertEqual(1, int_ceil(0.0001)), -- ?assertEqual(0, int_ceil(0.0)), -- ?assertEqual(1, int_ceil(0.99)), -- ?assertEqual(1, int_ceil(1.0)), -- ?assertEqual(-1, int_ceil(-1.5)), -- ?assertEqual(-2, int_ceil(-2.0)), -- ok. -- --int_pow_test() -> -- ?assertEqual(1, int_pow(1, 1)), -- ?assertEqual(1, int_pow(1, 0)), -- ?assertEqual(1, int_pow(10, 0)), -- ?assertEqual(10, int_pow(10, 1)), -- ?assertEqual(100, int_pow(10, 2)), -- ?assertEqual(1000, int_pow(10, 3)), -- ok. -- --digits_test() -> -- ?assertEqual("0", -- digits(0)), -- ?assertEqual("0.0", -- digits(0.0)), -- ?assertEqual("1.0", -- digits(1.0)), -- ?assertEqual("-1.0", -- digits(-1.0)), -- ?assertEqual("0.1", -- digits(0.1)), -- ?assertEqual("0.01", -- digits(0.01)), -- ?assertEqual("0.001", -- digits(0.001)), -- ?assertEqual("1.0e+6", -- digits(1000000.0)), -- ?assertEqual("0.5", -- digits(0.5)), -- ?assertEqual("4503599627370496.0", -- digits(4503599627370496.0)), -- %% small denormalized number -- %% 4.94065645841246544177e-324 =:= 5.0e-324 -- <> = <<0,0,0,0,0,0,0,1>>, -- ?assertEqual("5.0e-324", -- digits(SmallDenorm)), -- ?assertEqual(SmallDenorm, -- list_to_float(digits(SmallDenorm))), -- %% large denormalized number -- %% 2.22507385850720088902e-308 -- <> = <<0,15,255,255,255,255,255,255>>, -- ?assertEqual("2.225073858507201e-308", -- digits(BigDenorm)), -- ?assertEqual(BigDenorm, -- list_to_float(digits(BigDenorm))), -- %% small normalized number -- %% 2.22507385850720138309e-308 -- <> = <<0,16,0,0,0,0,0,0>>, -- ?assertEqual("2.2250738585072014e-308", -- digits(SmallNorm)), -- ?assertEqual(SmallNorm, -- list_to_float(digits(SmallNorm))), -- %% large normalized number -- %% 1.79769313486231570815e+308 -- <> = <<127,239,255,255,255,255,255,255>>, -- ?assertEqual("1.7976931348623157e+308", -- digits(LargeNorm)), -- ?assertEqual(LargeNorm, -- list_to_float(digits(LargeNorm))), -- %% issue #10 - mochinum:frexp(math:pow(2, -1074)). -- ?assertEqual("5.0e-324", -- digits(math:pow(2, -1074))), -- ok. -- --frexp_test() -> -- %% zero -- ?assertEqual({0.0, 0}, frexp(0.0)), -- %% one -- ?assertEqual({0.5, 1}, frexp(1.0)), -- %% negative one -- ?assertEqual({-0.5, 1}, frexp(-1.0)), -- %% small denormalized number -- %% 4.94065645841246544177e-324 -- <> = <<0,0,0,0,0,0,0,1>>, -- ?assertEqual({0.5, -1073}, frexp(SmallDenorm)), -- %% large denormalized number -- %% 2.22507385850720088902e-308 -- <> = <<0,15,255,255,255,255,255,255>>, -- ?assertEqual( -- {0.99999999999999978, -1022}, -- frexp(BigDenorm)), -- %% small normalized number -- %% 2.22507385850720138309e-308 -- <> = <<0,16,0,0,0,0,0,0>>, -- ?assertEqual({0.5, -1021}, frexp(SmallNorm)), -- %% large normalized number -- %% 1.79769313486231570815e+308 -- <> = <<127,239,255,255,255,255,255,255>>, -- ?assertEqual( -- {0.99999999999999989, 1024}, -- frexp(LargeNorm)), -- %% issue #10 - mochinum:frexp(math:pow(2, -1074)). -- ?assertEqual( -- {0.5, -1073}, -- frexp(math:pow(2, -1074))), -- ok. -- ---endif. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/LICENSE-MIT-Mochi b/rabbitmq-server/plugins-src/mochiweb-wrapper/LICENSE-MIT-Mochi deleted file mode 100644 index c85b65a..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/LICENSE-MIT-Mochi +++ /dev/null @@ -1,9 +0,0 @@ -This is the MIT license. - -Copyright (c) 2007 Mochi Media, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/Makefile b/rabbitmq-server/plugins-src/mochiweb-wrapper/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/hash.mk b/rabbitmq-server/plugins-src/mochiweb-wrapper/hash.mk deleted file mode 100644 index d1cebfa..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/hash.mk +++ /dev/null @@ -1 +0,0 @@ -UPSTREAM_SHORT_HASH:=680dba8 diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/license_info b/rabbitmq-server/plugins-src/mochiweb-wrapper/license_info deleted file mode 100644 index c72a6af..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/license_info +++ /dev/null @@ -1,4 +0,0 @@ -Mochiweb is "Copyright (c) 2007 Mochi Media, Inc." and is covered by -the MIT license. It was downloaded from -http://github.com/mochi/mochiweb/ - diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/.done b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/.done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/.travis.yml b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/.travis.yml deleted file mode 100644 index 43dad1a..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/.travis.yml +++ /dev/null @@ -1,7 +0,0 @@ -language: erlang -notifications: - email: false -otp_release: - - R15B02 - - R15B03 - - R16B diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/CHANGES.md b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/CHANGES.md deleted file mode 100644 index 06a8b5f..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/CHANGES.md +++ /dev/null @@ -1,91 +0,0 @@ -Version 2.7.0 released XXXX-XX-XX - -* `mochiweb_socket_server:stop/1` is now a synchronous - call instead of an asynchronous cast -* `mochiweb_html:parse_tokens/1` (and `parse/1`) will now create a - html element to wrap documents that have a HTML5 doctype - (``) but no html element - https://github.com/mochi/mochiweb/issues/110 - -Version 2.6.0 released 2013-04-15 - -* Enable R15B gen_tcp workaround only on R15B - https://github.com/mochi/mochiweb/pull/107 - -Version 2.5.0 released 2013-03-04 - -* Replace now() with os:timestamp() in acceptor (optimization) - https://github.com/mochi/mochiweb/pull/102 -* New mochiweb_session module for managing session cookies. - NOTE: this module is only supported on R15B02 and later! - https://github.com/mochi/mochiweb/pull/94 -* New mochiweb_base64url module for base64url encoding - (URL and Filename safe alphabet, see RFC 4648). -* Fix rebar.config in mochiwebapp_skel to use {branch, "master"} - https://github.com/mochi/mochiweb/issues/105 - -Version 2.4.2 released 2013-02-05 - -* Fixed issue in mochiweb_response introduced in v2.4.0 - https://github.com/mochi/mochiweb/pull/100 - -Version 2.4.1 released 2013-01-30 - -* Fixed issue in mochiweb_request introduced in v2.4.0 - https://github.com/mochi/mochiweb/issues/97 -* Fixed issue in mochifmt_records introduced in v2.4.0 - https://github.com/mochi/mochiweb/issues/96 - -Version 2.4.0 released 2013-01-23 - -* Switch from parameterized modules to explicit tuple module calls for - R16 compatibility (#95) -* Fix for mochiweb_acceptor crash with extra-long HTTP headers under - R15B02 (#91) -* Fix case in handling range headers (#85) -* Handle combined Content-Length header (#88) -* Windows security fix for `safe_relative_path`, any path with a - backslash on any platform is now considered unsafe (#92) - -Version 2.3.2 released 2012-07-27 - -* Case insensitive match for "Connection: close" (#81) - -Version 2.3.1 released 2012-03-31 - -* Fix edoc warnings (#63) -* Fix mochiweb_html handling of invalid charref sequences (unescaped &) (#69). -* Add a manual garbage collection between requests to avoid worst case behavior - on keep-alive sockets. -* Fix dst cookie bug (#73) -* Removed unnecessary template_dir option, see - https://github.com/basho/rebar/issues/203 - -Version 2.3.0 released 2011-10-14 - -* Handle ssl_closed message in mochiweb_http (#59) -* Added support for new MIME types (otf, eot, m4v, svg, svgz, ttc, ttf, - vcf, webm, webp, woff) (#61) -* Updated mochiweb_charref to support all HTML5 entities. Note that - if you are using this module directly, the spec has changed to return - `[integer()]` for some entities. (#64) - -Version 2.2.1 released 2011-08-31 - -* Removed `mochiweb_skel` module from the pre-rebar era - -Version 2.2.0 released 2011-08-29 - -* Added new `mochiweb_http:start_link/1` and - `mochiweb_socket_server:start_link/1` APIs to explicitly start linked - servers. Also added `{link, false}` option to the `start/1` variants - to explicitly start unlinked. This is in expectation that we will - eventually change the default behavior of `start/1` to be unlinked as you - would expect it to. See https://github.com/mochi/mochiweb/issues/58 for - discussion. - -Version 2.1.0 released 2011-08-29 - -* Added new `mochijson2:decode/2` with `{format, struct | proplist | eep18}` - options for easy decoding to various proplist formats. Also added encoding - support for eep18 style objects. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/Makefile b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/Makefile deleted file mode 100644 index 9de1944..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/Makefile +++ /dev/null @@ -1,29 +0,0 @@ - -PREFIX:=../ -DEST:=$(PREFIX)$(PROJECT) - -REBAR=./rebar - -all: - @$(REBAR) get-deps compile - -edoc: - @$(REBAR) doc - -test: - @rm -rf .eunit - @mkdir -p .eunit - @$(REBAR) skip_deps=true eunit - -clean: - @$(REBAR) clean - -build_plt: - @$(REBAR) build-plt - -dialyzer: - @$(REBAR) dialyze - -app: - @$(REBAR) create template=mochiwebapp dest=$(DEST) appid=$(PROJECT) - diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/rebar b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/rebar deleted file mode 100755 index 8082f142fd527c7fa291eecad48d465f8d9bff93..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 95259 zcmZ6yLzFN))TLXtZQHhO+qP}nw(WY$wr$(CeZTIj_4h0*86<;buk-AkLrh5T>g+^s zWNAllYUf5^>TK*}>EHrIN=isTYiI9lVQORZzXq0e4z4ax3evzJC;$Ke5CF7}vV3ni zSXkI_001WF007Yc&6=CK*gLq;8JQZ|hHCiQV4r#R9Zkg(cVT1>eipIsLdb1#oc zme43RLVCo_eRa>B-FfoMxnF*tiGdW} zm1ZtKJ1?2}iX!Oz(LxAKb{S;6gk4yoWYZ2=vs90423a>;+h)#4*$!B*rpIbc8Pl;{ zrSpn4Y1(^LrKW8>vc^h-(dfLG&XpRq*hVPM`7~@Y38fYD=%`(`__XM0-_v4OKc#AI z?A4{hGKh7QD+SAzUR&Jk87kSdP(dq9qGv<*aHG;HRx8$KaJRUvx1n9hF4?kejppG# zW;u==Z}xV0^;n@;m#DFOt5e-8Yo9;X->W>a!nB$m?(Z5E8w$h$Mcq_gOt*Yf*p6sq zGG}>9yNG@G_0kdKac7J1tqA#GMG=+mOU?NK5MRM{bQCaU}HeL=;R#ky3l z4qeuSYad_sRo7l`9t%;7IQZE%>)=m=Mm;u=!ZYr5;6{T#+Wa@LInE zk?^bKYj?faKc~8X2H!XC!B)YRc4|;vs$8eZs$wi?Y6v-&2e6v2m7O{X@|9AXxn$)k zNxyDd7pbU@$*D=#J4tPVrVn^$Yn=WXKJ*$E45WibP|Dj50}?r@W}0l^Z(fJ?6UW3w zvj~EjgpFvvB|aG*C9-^rOf=;oPBO&vM(Vsw&fzGUs>&ouriK8h#+sosWSTi1OzcpH zKj*}c3TmM$aLKnA(Ju186uWX@rnsWEswSz$?E`=HEj%2;!}2;kf4c~zoa+`L+c7Dc zk^gKmNEIb>ESqE|I!r$`g2Kj0BGMIvAZgJg8y(U{j41+UtZHOgiM)Ag*qwBc}*PMScmMw){x z0TdBDWCM3yGSFRVYbBfjn+#J(ZyaC1Qdku?v30bKr7|r!si|Yv)`SWT(_;Ub_Lk>Ts{|2@Xm&IQsCwkR58^KCn8hKam7*u3i*gKPxnOdB`x=;>`jjkwgz?U|R!K7Y;iO z2ipc7vPe_Q5u5VGOH#~g-ArrVgh+uTYG@iV_QKMMT_7Tz9~mcyOcFW^+kNlN7iSUc z%neidZYq7iR`NuucqGR~Df#E%QF$`6i(H($sc&<4I>or3Pztox6G$yUaz!AQE=$dq zHzpUzI5^vqQS0PST)JaUG<5CEk8oD?w+=65#V!IOt;f?aW4kIc`;g^sfq^3r^`9`RraVJp5if~Elb%yYzM=%W(Lqrsr6AFwVYqE|jG{iizfYnP5&biq*+4JQVB(K9H=gt&Z zLwW&0<}Ry{gtX>?R4BVhrN{-W0dc^VndqLF0uS+da8aT&EOQh@!B)7*f#uB+3=*nL zB0Xb@c!ZgFx%jPXb=fisl_#e8Hr(GH2n-rYaBK7DoLZ_8^>6{n(2Ihd!*q<+3-jeA zvV-DLOsluKQY7Ap0o{-Q;fq-|L1qc-A? z&U^KV_sWxBs!w2%;^ZkVntq)Q&*8Ui-v~rQS7tsPz-#~vdFrBwqNl_-rV7dz)Zk6j z;GM}~DtZxJF;Ts2s_40KhBiWJ_w-bxazBJ}@P(o=`)Z(T0?mhuwOzhAp0|WFKozYN z$>144tnT}upM(O(Lx7+l7+7d&1O?y#}AA(r^O_Zu)=OAf?rE5?fC-^pM4!L@>jDon{DKas~jTTg5ia!G24|-(UV>Q-nTYc1GI@o!-^nAyfL~5rCU+uf;X=A23__?ab z$(cEtsHEWsAeEGK&p(cV8I|(JIAl6>AC3d zfoor*ZFdiv(fV$;-SFtsJ9J-%#m1x-HZzYnmb*XxPqV_NSCur;6>yhrCax4eS$B!X ze}YTvBI_sKXcfUh;-dWl;%ORML@a~#?+i_aQ+K#}OzW9bd~s8EVwPOb^gg*RIUz~x zVwbii0megl5uI8=^b_j6Of2CQ>%5j(|MWyJ%IzbfceuN%c{60*Fmgt|S zeuQ>|Q}xf$-@PYsU&($2lV2y_PE<%}LsL-{b51K?g(WEsjdU69$Cd2GuVhEa0+3tL zGM?rc4xL1sZ0(Lb`seJ611y(a!pS$y$)f~*T$D-nGm#FpvfWzjg*YSW{DNN&*%DtP z7j@|ZKeTZOAXFjQZ?_Q2=8xTxo^5dk?UB#$QjNcJG4H?`f33)iJxyCx*%RApK z)a`(Eohqa8-M;a`W8Fux98Nj&&lmkWqy5XO#a301WYTA^>mTZtQR507RtKmrru$Cr zzwfXM7{Rw%hn&CChMqX;Ir`9Yt=8J-#&Gm0#ed_D17>&j$E!zDBzI-)m<|x!-(%k9MG&U<_NepleJ1O1%X7J z{C&P_Kj(yw#6z22l-@Qk>kKz5ui(y0f=6iQQ;MB&T?Q9Az_MJ9tKsHO(ghvE^}$AP z`~X=l@0|+}$6kpf+>hyl!f^PY7jbe~&i@gg_3$2#e`qP-a_ZBA<9=i;5vLz{=Qn}9 zbL~*Cdk1`N9W8szABc|0Azrskxw>(-~EX4{>;?-|BWN5-=l4v8ueISq=1@M>ekS%Xjb+bsZ@`wD)e+1RNgSv zv0Tb7eY%ai`sz?<>c0_CLiwDZ314m1@TPI;SEg3oGzJd0@uAQoO{Y!?8JrGYG&Ox}Uz{mT4c;pHnuy@zMxMMd@*-kiQApC0 zG<__c$>r*!xzVb~xcc~r(9d3=>GAz?B1+Oi=e^Ir&r_s#%uGf#oR3gF`Kq{}S}rI* zqX(-6)p~VlGNpx%-xDR^>#U-1N3c^at_KzAS}RLo;bBh#*w(6?dBN_2!&V0P%tGUDmE_?$&@NXi*Vs`i=tGz z=2&7TRK|>43aBD55*Mjfzy7?wCRH2QhPl7>yxn>G=l%4WZmHMQlv4O*_SKqtMyFwv zQG6;#6`?T>U8JDJ!BDhH7;#`?D`AM@LX{F7w6m60HhA<=GFp%qnP|vTdVaE&)W}y+ z?kIF{^Bl)eUkUXzc%alMn-w($u+(M{M^+d~maX`W@TEYA3;QDMa^*yng`+SZ+*BD! z$W|FCryeVC0D&dMm#r^7R4k%LQ!7?i0aRxdH$;RPP-;_vFbWY$OG#HmnNp-JDz)|& z{wt$==q{-Wi4m$v(8qYJuW6>J%2L1-t|MZ_ZG#DK84qAhy$z+hkkaNUgnoq0R*;Ic zXb{=P3{wkj7Kx}ioTh4V@Q(_Vwm^tt8$)&^r57Kunq*KZqMT>4?QgUrK*JRHi2@@X zE0#lw03iv~^}T5Cg|V~{T1lzUR9=cIrRPaenl?z8j2~ew)mLny)G$8YwZ4(ANT^p0 zELUT5q;7<`!9iDy;Dj1$YZOfZ-fas=lWm(4 zfEYhxK`p)ka`DL#P8gScXgL81P1KZWd}fM66EwKs!C2HqKpuGO2nBRHgW+j|Gm59t zHI*(VdojJ4YWa)oi#R*_ay-aO9|vV91D*B7!aFq^rYy8vGv!;JBdl^QUua zSKre8qgyma6wqRy^#)w$JSb%;Wzg6!NHjsMN1fCECgoqlMqM;D?8jd)~ z$t@D!uQP&fl;9$OUTRG;ZsSFn=^&VLWpv~w?E_USO?x{a2q$+X$a0(;^HxAw6lvCk z%LHOP6KFhn+xpPlSpp6E!H0h1#nqd59^X6YDsiVxw0mG=M3~~G7u6zxGH1sGOYV~4 z5Lbe+7t^!>GY&G2qpb5r9LPzr90m3=gC=-Fn(%Fm$sJ?|{L3#F;in%@y+&C$$ZW3edeGL6J(@%7q49URlMy^r{#*%<UxKQ)y-6D(K zu@0>Bp8oDZ3q0^(5Y4OAK@u-i8%EEIii3+(I&{8VNq@d0b>n?sY~XTWL7pbX)^rp&AMf+gM|DL zk$#ENDNM*E=p+!YVy7a?=Q+@@ z2=Ll=vo960I|I%#``AGCcaT7vL+H+sG(0GYUKRD9MwfNz?cssns7Hsjt=>mHZ zAav>KWt2%EY|nIc0%n|GxE`r7Nvl^@tA^VrP%b)dB-{RS9Cs#Gdv|*)JAG~mo0hXe zKQIXK1&?OV!v~ELFsFtGwUXJ}kRb%5#lRb)!4v|85qz#|GK6S$Ctw1B3saz$`+U)E zn_zKMA-<>WS`WaG3vD#6Q^qdHV0z2}d!(~BIx9ja0jt_%ZqqcGhOX-k&bHbcoXt<$ z&}*XwG)GBBa2}75$UQco-t(xO&jAOW0j5<4%pgq#csIp;5irO85&Foo3$V5&(F(^+ z{xRRi;Ky(;MQ9sd_v6f{2vw>&!#~SNB(db!zVPuM#*V z5W;f?k1qlpJv1N@XiSR!0}+Sjxg_#jhW;lWQanl2;EGUm`sd32^f^(^EqR<#K`Pm= zaEYaXiFrnnuNjb&v;k^TZc<_JH!=JsWG>omqod3R1H04Y@2X=2$k_l#Hyt+d%REwCM zpbfPD4q=yF(CmdF(h;wH8AQUHCGc0GAmh})JyYX(k zsn^-*v4+hfDLBPg*?JsOKONb*m(U)-isf2zR(9_oawZ4|`>>0Goa{i>_DXW@aI(z)cf47k1> z&E3b-YH9TM+x2$$`$oOa$MyNLQ)rjJ{CIFBitwvaZY)xqcdlmhu5DMx@o;OGJe#oWx2MzKz?l{Q_Qz5EHyobt8b`%?jgOGO z_=aP86?OaJGg^PUgWuBL^IOIAz*#wO+TT6Wr>pt1=R8VcsI=|D7W>yDYVCE;?_gPf zMcAG1d3j159mFkuTdT)Wdv`?mO@I5tbnVC9DctY2-OKaqlJsONYF4E!4>zyXM|f&> zqiyf|7unOec&Fa>-*)RsexJ|&O4lH~?BDhN!&#lrPLtacdRrb`3to?k+MCN*;O}zR zPjBm7HNOLDk0Y&s_8z`)ysyvAIQw!-{Vce6-oIh8pP$~&FTa;B=ImBoFNZx-M|}R) z%Adkbas2E(h3&qfMVGnPwbs3ruP#_^dOBaPyYCKn+mDGH*vBUPZiSb@!nu5Y7lzl3 z%}QC_t}Az3K7Y6U#|?13p6=_!(?iViF`XOyznlH;SaElsO~T!V)8VNueV^mYk^R10 zbN0JNT}+YaaEdrEvAA6Xl~PjD!_{`G%k4#V zur9-2{U6r6d6^koNRp2~M4}+86eE}TT0)i2_YN98;_>Mc)(aTJ%+x&%9EtM{f|F>` z;$e%5q*Agzr65z{pzVX1)D^ftZrp|XZ-HjrCGa=Zg7 zM73fmf>qsS1dKJyFjNmM4OuBzm5Qmm|Eioffsi}I@Q@EoSVPBW5q7~tWrnJMxZ8n=^S4j?=p z>NePVH^IPmweyozjB(#^5EFj>DX)*D&jtE+!yTQbkPbF&*8&O6YZ|gc?u^(KJP_5T z*6KR%SwvuUml?3)EQqmEY^bo_lXcM^AA~*0=r|BGFBZ)hgm)}zg&yEF)%Yhx2D}av zt6xZAAZ{$NR86jO5E*4DM?_e{!h2<&vJehtHOdYnB!>&^=|NWDE{D+umfFFpSXoUg zzfa}{1+0?f1l*qLlkeY1?($k=2L3rCL}p49v~jN1)`6{SK3yH0?Cks>_<0AHvbOqP zgn9g5stNV~-J<_H!GzX9R`1N8ATfc6fyso3fDaNB@dO1z5-{}$fe0${WokJhsHebL)d)3$Z3*7LF6ackLCtG3hLTCsZLP8Qtg zd3$~Q^5xw7n04>H`{SME{4Vo3Jf?wJ(xquYQAY>&!Gj@wHNaS{K=R-wr{K+*sy+lA zAP-@6q+`E>IRa%rvsG(#xD6OwD|7f5%<>5ehei;Z#~%3ju}cqzZuikE-5TRuA!v3S z*f4}MjN{)|js%p02$3$cwbO4}JaGkG?mkD*`4tec_~*$9FV7Z8KU~4t6)`$Kj~70- zb!@d7;nTl*{kp;F?lFILoUNm`TWRS>qQdFv{Vx*$9I?=LAYh8NDlw>dM{<)hv5iwV zX!Pt16tY_4Js4279y+l6CunXUnn;ERZ}ti$m9~2e9}LXrH-L)5X{LL04$>eDIZHHk zxXSz?XovifN+LX%Jh4eFUst6l-{Bv4BqFd8N7CHkwjdu33iKu!5QtIQc`8EdgL4ty zn7Or+^Gp(B*W8&JSO@{f{x(rPvbN6+TYB4s`ICe5bvG<($n^*|+`~E!ZGbeYgQtfe zX{YC|Jg}F8De_PEP}qJ*hY!Q%3Y~p`{b{JgxK{u=Nxo4A27=D+>A!&b*te)ei5;~2 z4UTifRFo*KeMGmsq+F>`o>^RF>Dl|dL+WXz~euCbXV(;u``%X zPs%Pu2+RDBh-dx4yv)>FHuo0zi)1dAL$XUlQ4P)l;eq0y`R|Q|*W#FCl#)0~ts`Y^AHr z>t!!tNNm{z7O^86D_%kiOR)OFm*!9fh8k4jWv@*Z&eGy@IT zg2gXQ*_>&&5F*BZYQz$UkSz_;v9))qctn&({iFbEuw;^-0I~Xk5O&nfSioV)E)Zg1 z)}{jzv5_saCK9jA8j)bJCRpSxKQRF`l1frWMaj#b)E_E1iU7|POJY=UlqMDJ8f@7{ zAX4GUFy|NoV*nSdX&4O3k%4qADO3>AkpSX;VKZU|r7#i~?1v?{3wY}S{a|6HF5@kE zOeIPU3Xrl;Lu7~y_|sNm&y7~sYyidsHDsvX2CEfCl{^x{G$he6)gk$S2?;U@!@`pqu*cY_ z0n4Y#z=W8tSK^ZK%v4Ucr!u@ zTxT$;}jcMqzF$3X4|?XA;A2;m=`CAnFn4qBnlQ}G9x64`Lr};5F{&JYD{2&UyWx} z+|CP>HN%X8D_O!P=}a+$SSbO`aH>tXEP1&srIz`q&T;>xfl7gC)weNlEmufDQBL}W z;;xd47>DS1?5x!!KmP544 zZ2%B8icvR{Hu6(~%~vGG_jYf7+Bg-KBi`@)HrMIaTG z6d}tDqDHgdj5X$D(NI~40r=pO5S-CLnB-rGWIkYh7nlS5P@x?fEsbaic$$GKr<3KwSu9rn}L5Rh?5VTswlufXmT);Ca7Rx z1J(#XppI;im3)Xc=p}r%RRV~Bj8K9=1v#>#6BC5BVZ*8nYk?l90@rdR7#Oc6Oea-z ztP1hq5(Q~39K*_y$xd;}ics-NxLK}4mEe3EB9>Fd1cWm2APZ>D3Se2K;l0nnF)~AK z_(d{(`JA| zINlpnJieF@>_6c^6#+v54g^RPxDtRUfT)y{pMav{KiK&{hy|v4P$LsP@MZngc)+OC z8v?(8>Ea&XK!hAYf#8!Kh+w@{$rMLIZb^Y5hG&xRkdtz|tu;?+?9iO{Vx` zvP^NtEYq(K0Z=CXPWfw5ftqPzn~_F2$WM34(?q$BnqZ@bNuI^Z^P%`VdK-;{o&Zw2RZX(m&%8a=!whfZe_wOTGVU#Joy`8d-wp?z zfOB{j91PTk{$H?;7u;=hbY0A!tFQH^GSd|=zHeT~%c-hIeI7(DohBo!-^*oda=+V- zwt~x_71R&rlgel>C@kGx=GJ3-7uKdvfxCnrnz7M*VXm-)ZpZR~n<>gs*`-|baQUH)&GXV0Bf+uAZTT`X1-sJr z-u$0KzS7$0>RPXL1x9J2G=0uX&3%y=k3(747}-a&E6ZQv-{$Z>E#@f8x6so=AZSo{#W8-Gk?3A_7_usY$xNJH4&un zyWAeQ{+sS>c@flH_e*VCZ13{TE4y_c_w1`HP><#Z^nCN zyzEkcpX1nHaALLWQ$+bmw^{|0lMb3^F;ww}Rs zGmW!I&n!-FiYnXI^58Wk=JF=eSi|r9ejYCN^m%Y{jCg+sBVNh#e^`E2!VSH; zC^@}f=&kSE(d@sG^E1Am8w|~FcYVkm)-@Xo3>B4sotNt5yiaa0;j`E<&2+k74}%8~ z=i0VrvuG{-?kIc_!jYK=`J|_TqP3~ zAC|SeRW~Q|hiAt2w4T~xioD`DUzA;sf6 z_%_%L*VRSUkJ;ea<^NUv4E|;*K6|&seUbmMt!;JeTyK0ngtB*~+1+6^J6m`Cp#47a z9p-kc^LuG%a=3roT>a$@`eAOPUv|LYq!$#{BEW#@nSy@rZ$4fTen?B26{flD+0?sf7vJK>q6IG4`CxM+UsoSc^I$URtM6f;`6S(7MzEN zC1r(J?xify+Q(!u{C?Vbtp<(gcEwSKx&Jt&e?I88IcZHDYwzJ^eQsx3?tS+<0`&TC zrL4rY&Kiq<;dYmAGkGmeTKyGVx$JZaZ%>fZ@0|Q%`Lz{zXP>es8%@PS+_~9rIp2Qc z^KIvh+w^|>o-5IY?MRGs4Sf~GR<)zG{0 zRi|Of2Uj86ng+$9zQPWKGW86r3ICn%VwLKx51;B5>fLKH#p@|dvC$_+K3o|I3;-~m ze6l04RODcM9+AlMu(Y}NPr>PSmKWZ*wce83<4#)K)Y3Yz4I1%I>P5P!7S{I{A%So< zN_keDBLm~blgkK38T1(lSR!pUl-j+0B!axP8d|2SAHsEcl)HXtYefTE&5@H)R?=5g zHLI?!Xl*uB+!(E)F?LIvFq>vQ&35um9S>X7(jEzUjI`AdX`(Keo0?*Bax<GVXJQO%CO=xaV000bd007MYmwxpPjqJ>f^^NUq9V~53o&I+a ztE=j7gRF|Rq+&A8ZT ziNR)$Y06wXdC9hAoMOvaoT;-go&t?JQJV!SieIc=gHW}g6jl0*4U=V>TRqAy)p}B` zwHTJSGP<8()zwgi+1X}-RT^Zrk%k`q7@|Z`ofng(ItiLxFSkb1VN#V=wN)3@UlJR0 zxtX=J$hnSt^Lh~$w=_V=;yZmuqhA}-Fw=NBNd+ox+N80PS~QtN0~y$kH0f{=mWX9O zPGNg&Ipbgs+f=bm$8CI;LzM0cf)X-|9X`j65VEpA^1u)N326Eg6 zT?{!S{ovw|DG}L1%oFqjgDTUG6)5a*jqR`6jDwArEUXB;2{z~ zzzn8P0qpu(D1pI39~ln^b-fW)Nrx;raV^3OEDlE$Q6M^aiEc;6^^62p( zFn%FSe=A&}L?0{!Z8fGT=5P}uzO5wLwi#8WTmjO5vv2+Z$e6RAMrObx6AnHc2jcD? z6zLu^gxnIxL@ zF=w!U2N(|qEtu>JIM|tm>2xdr!*y1Vh$o~0BG}0%I=92!kjAk+fRM`vQ*8ZU7+;g& zZI_z^cTa8NhR?hf&AfZ}pkFh)H#V@5@3KbPr*^hmYz&%5mK__6HQS@kcq)5q*Vle- z;$SK1E{3(sw?<%lCD=Mb&HR^kbp$1YHZS>t#B%rQB+7_ite)|77rL zkFD&bC!Fv1J~8*{Bif!cKZ~#7d$>qdy_q<@?srqwyTa~o5nPV)Jd_>ol{LR7J6KuV zEU%~c?XJUpQ}2pDX{WR5VEvWc?e5Fi?)_tewa*o1=9Qi;v;I!LiTU*5`#+Pizf(T{ z>sYOF#Q#yInQh)x(4aRb%CxNU7i&%D@8dXWW3L{@|HX6X1AonTzU`y`__e-M$6Ail6qx_r~aA+W94TFeg?kn=i={hWuknbAd$=!-l+ZVTy`kv4$DPL zwAZ!&f_MYIMJ|3~8%x~W%VvvY?`#A}(a8m@A5yx$`2~w@ z-9uk#?vT-bWDcRP-0M_J#KAm3pH+4!rPisTB4aCZAXeVfXZ#GXxQr#2A|(wRPgWj% z5|b6U+Y~o{CaccO2_fE;Dd1`+1?M_lJo)O#Qvp4BbE{OtJDkQ<65h>ahpi$0%{C=j z7>-!A^J3?2y3SLvmLEv+NIg4?|baFeN*kQ%L+$*C%MsP@!lWcf4bJ&<-8yc|0ym4 z1OUMQYNYtzuC@MuuC>0ai=~b8|0A}#HYlnnz3z85?p$5M_U>q#i}|=*C<$U*H$bHj zj-6AtB_$|E+DF1iZf@pM<^Kg9nHy-ags~)I$EAqE5+Wo7k)sw_SjhMc1DDau;4&(Q z`oaoE9>iEiKJy-TE>`MnAFp}dvmJZxr_(*o%wzc%paG`I7X!w@b*exBasU8sEbC%& zO-)**NwuA33kNb|+2>`rdeEM?^O8d|r)Ms1lQdYSwI&SAX_{~gnhVnn&Sr{p)~&Pc zZYvOSmZD=C}Au9h0M#SlgXtk zMc8$PLa9~J)uu-(Bja6Rd9JWs$IDA#(^ex@iuw(-NmuQ1?Iss1MTx9a%4)!|YtzbU zb^rlz$#};9Ua<++MG?X{5N<#eVgL|;2qKmMCWnIykp?sr0}BYqNDdLu;A4sfNhG|- zNNJ)m5i9Gfg;1dHLC^%V6dA2dgfz+`N41I`G!9FgP!}4IBIpb`?jEA8zoj@lf&}$k z2oZj8ho}+vMTHMbiY84eC?phAKpTj_92gN!iZFIdP$CQ&_PFjLaE0QtP(HLD__}jN z1SE)PT=|hW1QU67yIN*3)Q+1`1T4cE^9X95F!*?)2&6X$YzGo-YYtA0LvrE~-T0^; zflP|6b&8f}#&|#l4M{?+g#xW|Vl^u%+-(1O=MSf#4_xe zRP2jM{)=cyt)&>q$`C2YoLv=E0{~7+-*@;+~-hN1a7LrI(}h@S*2?J0XYZ^ z2+Qc(8I`1h7y{sq2QUp_^?mIoB7X?k=JBS$pKt9awpaFpeeHeACl4;D-Cp^Rjo+L* zP$jvqm@IrV@R_pyHywf_h>pK^sV$J-?&YTzWTv-g1pN6b`qORr4)mFGsCVYmRiIBC zFrS~nrn5QBU9#d~ne?by&`xu7U@I1RPY2dNj7Z1qI$T~Dp4*GrJU&FF|-^I5@&ht*TpR_#6#c>w_ z@aN(?*+~o)KhN@afj_Umo=E8Yww-o2FEcV$l{_NG(J6Bazws)MbTHTDi-0$~%o=6d zRF*H$ECq8_T_;d90OR9nrWb)G03kC@{hG@#IJUY(OA(_^xc1}s`TNA{_4x)DK$ho; zo*pNCdm`$)WN@Uf)#P<^<>HOP#OVhhhRiS&B+8_V`3Sj@$#s9xTJKHh-&yv_ybJQbXT#{ckjc;+|B%quJ#l#Vr~wloL=rr z4jbSd4ZuLg(NffXs?+>zDs1UigZ5G?kx+G`)>QSFw%Qb{(^~8}>UM$L zR9rDn1uozrvh4z8M8DbnKW*kus(fLZ~wnSx`Ve6q5Q~uf;i}4U9~#Q zYB_!*PT6vpbKu^1*%g;vZYRxd&B$-ZwG{=8a;-vEcctLZvVHpVX@&6orGR#LmjTn9_=eDrjO1g(2-1;SHE%-Naw;^Smm_*5GX%wkZmGKG2iCQT`+}d8>OLE#Edi!| z#r`z2b%8^AC-@2ADfUW1&Y`S{MqzWvoy2=UN-L_Q?|3S)AY~NP<^V^Qu!;#< zVB?_En1&r!og&Dpk9#itr%=~WejBD70|8hfAk#3$Aq>(Ua#-H@+$#FBsGEPYKM(x~ zfrx~-_XMXf%uwPF%1~v*9A`Q4UjwbkcX^l7oQtE%ETtYCYkqWdoQNaN2EnCh)Mcb@ zXGljs-|Wdz10C%F=pGB$hH-N>xgNHZy*+QA$p_DxX}jY`T0?`Q1rrB+Rrr)SJ_;W1 zFJO<@Xxf{@>yN$scO5J&Z!{mDY@T!t{NA4<;~Q_c_l?;>@ybE|ua1sRuFu{5)8WAH z&0N$-U-a=A{q0WQ3q{WxD&_PWHhn>&_g;=F|9N$O=fecS`{zTZB|sWZao3sM&W;v% zaql}GDUrX==3B?;ih8$Vt6ee>cV~D*+coR#J*7@zrORYYO z3rV(Gi;5*Qno&oo6j6ys;MkS4R_&#d4-~oY9G|!6;86uZz{F4q1eE)$0^9g<`T~I{ zu=8LP^Jo$=&-VMBKj0O{`sN zjjJcFOH;O+plkXp(6KLZX>~^R#035$PQ2(Hr=t2as~bwJJk^I(q}6KEsRRLC*0w`B z)Mr$lYg4K>M&dgSS2MPmb2Xxu&BLq3eenY>mXRhTqdaNbijTB8!#t*X%6W-GnRFsd!q9bUUz_gx8b z;=MHy;GRN-xx>@nn_(s^Qmu21z@%H1r@Y<8UF!_kWh%_|9!iwT3)xe6A*Dl~_7Y6e?a@dF*Q z(=n-7EsVxB(e+f;3-*}EjtL-DW-S1}P#;7AON-=N&+D~k^lSabGmkdl&c0LG360TZ z1R%Y@yv0zpeYORr1$2rpq2ZMkp)gV}5VGefH3}l!SQy#9Mj~eQgCBc-Uke7t~y#Gk;J_eyC zqUtf$l$|TDdQ>96MJ|4OJh)Rzb&;T@5oARw@JcB#Rjw2$bw&DPdMOF97A%wIDTW2) zu@7hv_X?!&ji$8;0~bZ2E+MlP1<4T}=tM5(qVU5k(0G7XD=B122(7{N6?Tj|#34Z2 zyiBxNgEpyUk3AN~!xxc=E+_;Wy|xhW1KR{;Q&tf6WeonXua^y+vEePkRTXc6lm&F* z<_v;P6Y~PtGs!DcF~}%$>lpq48&FLi$9QR{5F7BDL^!!iFyyVmY)79W+=5zsI?R#W zBA(!>#cg{dMLOZH!v)*^!R&?$EoX>ROfUpJX7w!D?hCg1{_F5HK)ORb0aQiSx-mi! z5L(b2!x{4y^^}($D#3&r`%I&_D^*+^#{4KmuB5n=KI8+rfxv`mgn1PPBUZ>w8y1F$ zXm%0qqSzmllT8Wten51A8JkCj81t$Od!gYYnfwGo8V6Ex1s)s60bEIAiYlHI+q zZmsOU6F&vQInotO5$~JF4l#bn2oBy3aFD=L#@nPc1jLL46;WoS5amP~3Rj-gVCN2% z$yIRhgYDD+TB{dWLo|)0U6Vz z<0^{gYKF7MQjE)wz~GqRBHy^qnUKsuIGHf}+;5{NwjDD0zsNer;7+23%g4^dnD8Gb z6Wg|J+qP}n$;7s8+qP|MXZLw)ci-Kr{?K(lb=~T^-S^z{J0~8q|GW&_h*w{xQnHh&bdkA!U^O4o-s1sjKZtPozY?F&izzF!t#kl{hkKV9N@=MU!Ve zv~SLd*Ylr0H4$iv9!tb@Q8b@x0?h9Srz04!V5G(c+<%xtj*l=$y_{-Q-0Ky1k@}Yg zuCOr4yZ0R@1iGVx!ew~vrVoi18oT6gkL=_wI4T2EtG-=}9+Plj#-{NuDk#Me1V*t0 zYYv9-xQJd*(CcRO<7#t7%N8gSKt$&P^Q1x+3(IE z0W2Xy*5AxzNm8$F(TyUgj%hZotx1EClll-@lK~AStFJ^H(s4k5*dVkSb>knelnUqP zZ%8>XF_8O*A4%Z9fD=wXQ~=l_G%Cowz31ecHeUz(4mj$u7lIsZ9265!V@dSRgf(VQ zjdXYJgZy7m>pBCiX$MT(M6y<|&=>p60&A8#mQ2GaSzYsG8TY9~E#47#|BbpYhLQe} zoo|+&?jn)sH*tqGs*y8_YtZltU!VAk^xOk389p{gMxXKf)ooqSud8tcnVv_^qZp(% z8(meN4(`uM&WD+)ZxWV=y6dG_xGp=V!gjj2F89N=NS7twhVL5J*rOoWfZI$*+gPvr zl8T*IJ@=5Lq}9yzwGYlys2TeD=@81`DiU>V#X9K>lfh`IjvTK%9KiHjQ>z28DZiW8EY`*(Fc4@oM zm)!l!hZXmJty;xV?+-%_;bdFqnwgZD6a369`dOvm@(L!EBYRLae+Elfbmi1Sy z47RDR%=mtof)3<%eI(<5dpj~VX;J#i*sM!V@|dk~PgJ)_A(F$izFnm`VlP>2w~fPr zxC{i=?9~I(!7GM|^Oey9RIID!$uorW3gjs>Zu@M|SrJbog!$E9&+p5IHqY(qmx7lfiu*yga-T z#)xOiGqC+Zm`*(1b@5KV-eSB;)AcSYTBrueST4)i zG&&p@<|4#R6`g=!%{cn6Drr;W0z+dq!Gg;YZ1miPHyFMEejjXZFQ;z5s9mw<{`%ns z`@_`AO;%RU&{8ClUvchCN$NsY5s)3xtc_#mQ|krUYC#&du~mvKD>3x6#u9e{L3kt7 ziHJZwS4D7?;mi)RB04Ix3p9LsxcmNGDUr}MZu@UGzO0g@RTGR_XftdH%-!B#{Y#Dg|1uy650ufa%yn*{{+Sb8T zM{R5bN6`gO#QJOF{8v7r<0WVgOll`yL{q6}{34>gam?kV&}ksdz)j`8gnu{3NOlq) z5(bEXv}GP0afM8;;zeKH zoh~-bT$YrWipqa2SzXbnMpj^EAdOXx85wzMdg?wqMZ1$`MD)2aq`?g*EI20YXSbp8 z?zfb9SkK&I=~P(J8G^sq!^MVjrtIrSo#ntWUL#B_y{F=?cmP-&5$Lu|+OlqR|NC|w z;0?9#@lQ~d`{w~h`akcGt&NGf>HktwtyQG0u!T``?yN4O6RlbBSgp+#Y}##5R>V7F z6g41~*NIzg)|sN`LqH+3nz+ST)NeF8E);B%5tB>V9uqVvpMW5Vt7lI7X}Hes-~%Za1_D(h5XS=)q@3KegVxW zF7!JupJ^>Ku11EY#!O&btM0mH~5tsK_QLn>Sf;F(s2?<&6vvC4icG}p#pbd}>) zfn{2(ntk2`vo*PHG81-lY1x9%Y|vg7Tjzhmda%^#Mcx;mmdTA?yf;pWpD$m539L5D z51IkOsLd%}G2?_w(S3Ml^N`)-{q>Pe8^!rFsR{0eb>PVe!3BHZs1ry)EYC)X=Ij%x zS96b9QMX7R)T9%HjF@rG9;lK+S%}O?Ax`iwXrW?t)3lUyEJ(h4%j2U-?puudNek#UK@TsaF_x~aDsX5mP;^ts%H~P zVzKmU0XxWmSO$ZELtlG3Y5m-Mt(2?V5tAIQ-{f}!1CSCZQt~gQp)7{Kq6!gC@*Vz1 z{;AxMe@zinc-0gj@=Wb+?gMK(>%d0Ve}kl)loW5Nq%JyYAh8Ji!3ei8DOCree(^KD zCAp#O^~7w%8K)2=W_QwHm+~w)V18rYi8TReXE=xr-S62Oo0Y61bbPf@^d-+u6+$$| z0@E2OWV`Yi%(+D!5!ETdx5nP%{Rx*4@YGOXO4E0HIX+C%e-AE8Op>UpYFLx5xkP1q?WGumdD;sLz1hLlmHX*xr}W ze}i(9l6HE9!87v$1$QDH$?<{!1NWl47RXCXx5--{`cv8q7l@03d41vOAsXb1Ox052 z`pN54>BXJ#f|mxuLVAACFPiVG^g0quP`5qpV<#ONJmZW2qwUoUO4x&M#doMkNNUid z(60)H9_bw$-eds#4+HN8UvBj&>27ZyS5aMx7PXXZz@X8_^mMqJbH(UU7u(Dj(e-{) zEXSnM71w=r?D-+C=Rwc6+KY(5WV)ZA9%O4VrOO&$Binh4YT_UD;cTRs)T<^v4FTj`&4`iD2{n zIq~9Kp{(>zOn%7BbbPqRTC@vFW#a+H=XlkD;6kzmkh~wN2D&tuzunmJZCZxHZ|n#l zSFF*@=y}4DShZX?VpsaR^i*PlSwurfOjl3RcItF-_?C4BzmrA_^rcauRt!@VC~-A` zChVkjC}it$xw^wXRc_?DygT*PS`3>MhWpy`FQT7kWUz75k2xxsv95`9LZtqPQkqbx zq%K)MiGYCCT&=93bENOwW>EDEZAx?Ezdm_Ap6IQh_(qfN{!EZcaVo+J__Y4~KSyt+ zIo2Hl{}6LTG$0^^|LaTP@E_LvemJh>enf!DMC^N+-_WcOwiaK9dJ6kZe8qE!IQrt~0Wh>+ zwCpJqTEN!}$+cc!$4FFJ`4GMi;+#z0h#AjtWqQK~+#gJQZeC1G!n4_U7y(chM|YE} zb@VMdXkZSKh+vGXjS(gda^Xe+{5m&7;dU*92 zMMHXB`pnWR)hk)oE~hD$s#!pXv`I8co+^Ol<12851{$%LBMzR9ZSaqZ;tN-d%qQ3_ zN_K$0-gdPbTF0ufr*09fNdsvf^=_jeMER3Lg#o({ty9aInbKuKXs@KjQjVJGGR@vL z$jjr9L7&LNzBq9Qi|!4}#8H59UAfBCmCYhl92`6!p@KRgJS> zWkN^42kQyUSx5mhP|<>NQ@L;jv=_HNGj2u4T6tA_X3=~^ceA`X^=gG=#lK{EE0+piY3PuRG?U7f6vJ=MVBdrygH?z zDJ>HN3+f9eiwi9?2^(EPKvR~dWiAe}>%KY*TX`|y(CZLGjz)G>zP6>G3{8~3;&ccr z-K`TN)qM23g55RY@k2p{=;ob(-%;zn3JP~}OQF@XltjfFdGPBI|ADrp%daPg76!!% ztnIU70vuw7g9WjR6#C8TiD!%dA%gz*Cr^PIkEzTpiu?f@{wE_&{C!c5DalAZ7_4z( zUps}==V*mpUL}E-)Oux$Nl1t-{?9l;%F<}#eHMAB_uj4v>G=8jL=?%q)C^oEV#P;})|*v6V;%Fvd2ql)U=gub`qWN_cSgM6R5}r;1-z!P2Q~U8 zOI8rjcDP{Z`w89>%z#=NKjF+tq0Y>`?o1<*P^ot_lr+hLctR@@RfNWmq6q_=4N$(c z+|eN&Ny~NJo0LXkCe5Viw?{ut9t}eQ$t6-12@KQ<1)a%@+yhIiz?BpH#!9GrLhsT| zYB4}NL|QOrPC6;g$15j(RDrQXxt$EZ|596(uieWNe4~{3MVTq=)aaL8s5buqPP~!e z#Hdd}r3;T_0~us5o%9i6o)m?|O)t=L@o(br0;4ZLey=2(VN;>bP9v3resFTE?c|}O z4W!MAv1BX_K!Ax5wFOt96Zn*J2qqPXw`bR0I;Oco9(x1bU;B4*!oMTpedg()UzT(P zKzPd~xzeM(OZ*DI545JH8b@$v;%mK$HD;82%We&b{8j-x<()sesrGfWM)hG?EmKILr zDo*I7wlPLI-YQj=9QtX>OeE`XO~sN?J}-X`t%7m*WCDef%uE;CXA zX!tIa;sRX1?dR(O7}Iaf#05~e_a6sr=ZrY1AU#f?$`!qgaqQi8_*Ds;pcB7n;LW*h zT!pS|$E)m$j%|%K=CArP`($ z-5E#G(dF}aPHwnCYyyM(sw4*y!U+2Nh`IRy^g=vf{ILP%PH8cTcp>!kfQ7utz3LLb z1jE_=v5r;04AWZJ_VCB6LHy?=y=pcJ81c&&H%BqRZrr6puagbW<*XPHY>*?Fe&A`6@yU5WQwSt5Q@PUfOy-H6m<#y+k7R%^;zy1JBDW#FYOEo= z{%FKL$OYNpHF|I-TwzxzMrNs4V2w1B2D&L6p$j)NQ|FX~I5Fg46QRZhKbQ6fpyG`N zEK|}4nzIE#3z%CR$@u~=F=<02oX|l=Ir%m`Vf{XJv`B@-mcA}aIT1!TW@MR+k=_%q z*PMWv-^Yp_Oc@wQ>W#gnV+aePVzGIBh%s)JA{#C4Shf#LJh`&yXB6^0;Vr+l({cRCIQAz)G!&J!ySOBciBVh{yjlEd8VEhYSfUv z1}QhzCk>&3XREO50y1=ty0;V>eU;F9Y&kMABulT3i~@?iCgK^DS1x`r>@A-`Z)!|x z#+*K1dHag}s+!uC+%!@YO!mreX996*xF)DPJV?1`x+*W^#b1Fc@y~zeyOjzj@;etp z3lm6p9jS(pFM)m2$6 zszL$=4w{a*@h*stGe=H>em(0d(6`JWcD`4GMm+A@%k-Tzt)rK8Tqc1fOae@Q+4}en z@*&;=lxHZ{uN?(U1q@mUES`Va4qF0oaAl9^c4v#gMwOp%vm4I{bj6eN@q>@WWpHiK z(HeMd{K*=CwW7NJw>nUW^O>jDIqF9*v^(&SWR zT}iak_A8ftkrCt0Czv9R=9}WaiNfdCW<$)zibG!y9*r5otyk#>sYVmmOy~~mXT$Uh zv^MKaWp2H8WyRMnjn3zJBH6cd0vx&T*W0egPr51XHggVl*Z0E=T-WPl@+GV7i^{K6 z)gQ}Tj@QzSpLExs=U!P}H4Yz|&pq4iPLDF*`=V6SZO5MHdC@vwx1qK#{DI5VomQ97 z&d0~t?w^MtRkDK`UCV@vc*~WaJH5X*u)1HTwBOh38@!)ysr5B6c-+tT;T?6Zw%_y& z%O75!B|D#HJ1IA_E8FBhM(3a5x>?`%y5BZC`^^~JpG(D4x#47%sV+PZp(V>B?Gl1F zJy}1QH{Y4=Pi2rz;|Se#UsIV2HDWzYKWD*Ry7=$QA-)K?n{UsmEIDUCY8~!-OS+vO zTY#*)RrokxYac%jC+Eq5ePl31k&LpwAC9KHuLq@A zp5C9=4L6^cygNUo?VtTq%Q?Zc-H%PK#XN2|H9G7bXZ@Z9qAuBQC)PiPKF04ip4szD zv|J=nWivbeu;r=u))rk@^z1VyyhCpXS}pw3_nsM$?nxVw0LIOjCWnX%_v%P5?~n`j zI23@74iTA~tkga8A|pW5Jv%x8=a}Pp1}rwv+d1rF)$<=BbUDL~13I0>w(`Mez(u>f zY;s)_(`JLhr=<4fy8-2t5^;PETxvJjo zW^0$wzsW%zDkhhUA(`ed7H5JA_5vc>AK1_iAW!#jUS%?-f$s7| zyIi62Q?IO9|B-D{w&S_xDEpvedtH~2Orl?Ty4!)cC;Q)pInv%#b&Y>)elOyGg1!FZ zbTs@=JhZhctP|=HTF#oUi)+16|K;>?b{s)RP;Y|KJ%HFh2zty!%ndzY9ZTz(eod;r zBXN2M%cRIc)7xmX2$MD*&*G^9SC z{lw(vXU5y>r)%fSwZwEO!y`a1mKxfn`gC!Ln1A(}3@Sf%`A|#^R@hln>7-HWPvL|) zcOhJe2s1l%iPOp7gg6w5!Ms92df^6y5ES$D`KBl-YZ%KioCWT@VWco=+&tQ0JDa4Y zu#6PPDliKRx$uttdb|j-MKQL$xF#Vsv=(p>F&XPD6Afc0lKAGLP=|czHMole#j_|< z>^5U*Mdl8k!m`XP3TH*~DofOmll4N1cAS0-87Hxr7(G!)E@cIKF#OW4YTTxlrVZ33 z`cjLrTA7m2Ui?ZRUp5oB`LWwDoCs1wnJD^p3Xy{?B{mAhf${{05F&F!BxOT}sKG>a`PVR>}gOJ?O6uVtACP)|3Dei0Az^? zMjUPpi2^L;q|;jGpVM&02Cw$Q%FnEnq=^s72WJJ(S^K%n3za`2?d+leU1i0Y5& z>rYi+h$3LuOUCY`2rH#o=|6wsz7HGQ}HXS^GxUx%aJT#ENIZtn~~*zn4g@Vn4fmG(DgJ+?SZZ!k}YxL?Wih; z{Qqjp*#y|}fc=q7^ECui@Np4aJ}BK2(>m}V+2;!ZLJ^6-B^2Ksdl?z4=%o8a5JyT>7RIH?k`?B$hdPDYy1Z)n0r$<8LWbg6Xt(pMQ55%^m zV|NvR>mfAPV7^;%N_v<3a|hrW489jy<=L!T@ezfWXpl~bfm<%j|rniJV z2=sLqNLU349u{FYZS3aSur@pn%X-fFr%drn;b`&gd@a;VFAvhK%@=B2d3Qmrgs5#OYuX?M(BkVIz-8= z%{;wuVdYPe@M`q|qp;Y+EM5-Fdj{2f*PjDPCl5Gl6>C{qmT!;m;EW8;FEYhlMwo?1 zQ>|H~Ls5fM_2b_smI#0|!El>gNuLddI2ACafz|#y2t`*E?+D|R*E>0!oE=Sh5lN>^ zd=-h358*0BBt!$M;LLA#V$oh2d{-bd_?D5T&XY2tAApKS-JXS_Jq2+^z8U`Es-D^1=ObQJ}hdX8osE$LgXD;gKN2T%~a{YV6B7*KgynQT+c_M=O zN8#U?CGemI+y}KgSLE3~a$a@zF`$_+)oe4{3GF!~pfP|abD*uhO+N-!rW=Y(HwbpV zm>yQV{YrtXyp8^w)0LoWHi~y7q6bRQ0#ZzJS6!5bKh3ROiYt;HuOd8hN@j8>MvpQ; zmetE80S4F%eG+^6i3rkS3{!QA_V>RAk5a%$gBscwBf9fl zWAd-WF990R0{*M~VsSW#7*J{`?tle;tL2xDjd^y&nj*s;kO_Aaiy(Tc(eVw7HN&o{dHbwj^(XV0a@kvkFN5vvN)A+m%-yC|Q3#W$ zyp8*ddZ*77^Eb@-L~;EOhKcHtbp(y(po4*O(Ns!~)z#gX-x=*LwhmWo%35R_^yrnx zk=+ioFE{HVD?RawPm{s$Ej7nnMvn!LD7GXsTzBi4zVB|svy_=XD)dby$5JD4EM0e( z@X@;8j(x}5oD(KbF}gjqF9#PqYEIo3Z?rA=Zo6Y4WeYz~_*S&PE-!j)qnj^^JXfrE zy{}IP-JiIP2Ew75@#eDD7}|6*?0uXth2!Mt4|kHv$JVRbuF=k zYuPD>hRfo$J1)~c?J~b$?a_7Gfc1S{$$L(`x%oIA1IUmfiQZOm6c`4dL$oA2P?F!}XwEcRIjvg>Rg`4y8ejGm8=21n3m%5~(-hAAb z?&&!%?rrxsM)19#SYP!+M(A0{DD%qG3)@fX1^pTi*t!Tn=vWJ%4P8yhj8{CS}j<@;njt?m? z@!a_S?Rc+r7^|#rc%@xPpzt+uHYi+4bDJmt6z#Awi}E ziQ*leFPqzDd+v7Edv(4?dDAB6SiO(!d(u9|tE1_*p}Vd_Rq9>aaQ%uAY#Af{lcrWJ zLjMwjbnIQXxUYjBW^^at9+fCbrQza7)oi6v$-IoskFM@f^)zWR$daQbjqokS(bgdgnlQxy?Rf*gWaDx)QTR&TB;;UID#a^miqODVJ24<;b+=M za_TU91s=imsTg8!ZzY5~_$vFnw7Rs6Y1?iPYS;}s1f#`>p;9{(9>+o?bfpg|Ac05% z<|b|&YEMaFPKcCrBuui#RS+s?$RNmatbO}vEAM+7tq;k-BT!D4mT8eSB^l7mUW4y)VkL*shaqRKoo3yeLBcm zpVSKBB*+oo&z!kH_y{S#+(t}#+?mM1Laj{Fa*pHR>L&q$wx+2*1#=ifj=1W8^!xwu zj!qYex$XYrj6Mnf?=uf0W4r&5MxAP&UMR}lGmlam1XmP^GJlg!lEh(=H9|;(i@CB$ zr33F_ti6GPS)_|eSfm9K(B*Wx=aet^LAAi;mC$to2>842r(4EH)$e!v`{f<&Jk=BT z_Pj7W%Og5Z-#(0qEW#=^Kbt;0C632gI|m=0IgDkxt8`07SU+tWnw&i>Xcr9WtAeD7 z6vi^8*8W10V$oz%cR^UPg8>!{V|$*GK>N_t>!v&1sL~l4 zk4wYF0PFmGmvPr-V&Hie6%?5 z_wa=PVgd~X=iE|)8GeeKs!J%a7Q=EjR5`UpBWUL&s38E)c1{m$ozQ|N%|AcnT)|Ej z#efBBj`>w+<#4}{?Dv#JDe)ZcRP=B=NZD?L)fI2YpSUoG^Yd0LsWO6k!nnh^eLJdk zkYx*E)ZoXSyATv6=CJL}Y^j1Nm=OnnvtJAaK2}6kgouHET%9OKKSjc2xJlJ1z&e%8 zVIA4u%nn=}oTE`MKPJnr68v)9Iu=ZUf;j_LWYr?I-UWKqi*%rgyY+V}|86nE4JF5oXD zs-sG^2oF2}^lNC;v@@0lpUNhpfD5^iI<-WKI&~tn7%8ZLqDfF1HPzJUJ82wl9H;dD z`SyJOK1zcgsu-ki@!s+3v~pl#VTdJc4|0N|smbfFihe;CCrl*KIAq48`eGo9lZ9jZ z2eFcjEx{fb6E(tO(ioE!tdx7=cMJ3|Bi%SAPla9QXc#zQ4Pr$C33)QHDiei}TxAfN zdHg&nxjJ&ET4ftU?1k!H`_vH$vku`s;SDM2vb0-8DmX{3xLk9b)^8Rm9ZH(pfT40) z3Mzef4vjOEq=V8y9bqqnP;vcWEqN28)otPozkP9?;|t+F(_?QTB(Xeky(IRaJ5#g_ zYQhDL-pt8034%ITlm_LyvBEfvv{nH%wTYvzPOEYZXoiN?I;7T+NGaPGx3D$n2g3L@ z)!qj)#Z0=@e3ZgtFr+@zzBf*O`iG3&6Ey#;UEl-KZ7@2tTmiQ3rNK@Q0{v}p_aQ<@ zjvXf%_ariAIypkDZ?-5=du+Xke>G5Q!*_aEroiF_LLrn~nn~`Bq_Ao)y>7dqmmqdA zz%rIRn8IqyLZW{L^@XBIA_BQ#7wLeQ;rP?TY|yA~PBB>F=HMXH4f##t6Jrn&<7a3o zlFh}EgHTvAFm1pXfF%?;Nf~S>5y7_IX44N?O`1XvSY}O6@=^mL1PESl>(4(SYGU;rUZI=p4O{6S`{vAz3fY+(q&hW(ubKL)K5 z>o_kzSSu~$n#5Pyb{AnwB=qeLi0cg)yKNj(B4mV0j!3F7D|2T8XD^DVQ5}yz`!^?n zP^S&hqyID|q4f3mz8JU@-~r+g&KdEmfXMYCEKCC)+71#s|3T1JRp%Cdkp9h5Xcby$ zF)15lhd;>by*=Z0&lVi~rD!;K=`t?2Ro@4mA&+>IYU%VF@#drd0SfHNVU`S_Gy3C?pD;qsA&iuP2FE zcI7^$OZp2RNd4jGHyNvM`MjtZ1xY z5ou_xQ5;E~H@jMmz#?sM4G8p;+R&k+G_lHm&$R)Nh(^Nc43kC(0z>WqZ`?$;>7=H8S znJiq~hB>!W#J7lgY<3?c6C==NmX~}GLof;|0LsM!_^zw`b(`+!jyz~Nb7GmVjJM;FHgxR_L{ERO z@*w1a)1A#2S7+bOEt}42T~Q#td*N$XK>3wTMgoK1DtqLg0ROh zN~NXN+tRc?T%{|#$F9rZzf~x zU<8U5UvE&5TL_``JOn>`j55DHDWT z!EaFRYMmsnV-lYHq5u~%^tQURpk}I3-d*gu8CJe2LT1Pyxd-bXd_aURh)`VYN7E5$ zZi!9^$K8N7e}I9Reyd#frKlgC*akj`)e^7)v?YMqnTSzuy-}0Pm!{AS7%U5}ZLP?5PuqV<;#(&=0^jmGrf zO1%Lce1b~SqG-Qro~nlvvn~#JJ~nndpi+UR^+#qwEPc#0#_B6Znv-dncsC;(&<>Fv z$k?l^^O&f$pa#_h1th!g#!w8?Z-mD~6w3yJJ7Lxy2YS5FhCnk-aFqs*g2fon;K_-5ObfN)2` zDvm`NThqrarDao|oAIN(vwf5F>DBf6^?lew8HzbjL{4jv7pMqD3A*d((VjKdh~0Bmm#>4)F>x(3JE9`tv>citNJ~omu5A& zw8uOGuW(%LgXbz|K^Yl?ysrzh0@cH(x1_>@EGMT7^4J6z^=azHM^;0&KczcefIH$M z0*7_%+NglIRI9!}Ms4~f2kG8h*LoC5u^J#*ffZn%DeK)X>Af=V`dR+I{xeID)U#O~ zGUm?K20@x==RhrW$=3D@9+-b+;*kT@2^UI3h}GW@n{PH6@Q4A|La}265;C@mu)7gU zIbusnIidc@zsdfuV9(2*SI;+uEFG2b!$Zv?m%(G6STiv`Rz6T*hk z?}Nl|17z36xOM6ckrwz%ed$L(k~Lx@{=kK+$JYOQcEYX&XpE@}bhiehYgZNa^mHlf zeZnh|z^I+Bwx!6oiNo+|`CD0a_}n^;4{-MUh`>_*>|DOHegh=9?r&D102RtZM!(6T z<)6*@q)U$;mKD9{10G4KcUNuU!8&&GrjftpAob-%LG||7Fg)V>a(;8f93Gf zTZ84&uK5AfuWNj}k|ouE=QRHze|znJlcO6rN#k~1R*Zn6y(jT&`XIt>~WI*Evdw0}}-ota5*DV+q<&#By}EyzC`c#7cbpfMo81P!{NOWz%ZXK80EKw|0@(TTK7WC(zT^gX5ukh=q(?kotm6M` z6p(30P{sq}wx4wz6-E#cWKZjE9>|k0YK@_c1A)AI-#!l|mF)b*Hb-(rqZtUz7Oj-?n*`pN$sP|e1b{u6R0gh>@;g{eNwg#jU#IZ1aP{B?C$}o@FwtI$&o! znrl93+LxA1&&z9AjrpK~X(M>dh55kfL_J!2WDxkB0Eac`RaO_n6v=o?I~CVJWw@O| zk6<(l$>=q2zsx0#KYgoQQkN4H-|u{55!FQT%MdEcCGc6sFveF?KZ^dfl1{{Ul!oOP z^MlYZhMj&htw1W0euh40z4RuA<+pv`OHkfR7{%AiCCUtDtKC=J9g}gL_0&T@#V!dm zpx*Fl>w-oD3q@-=XXXC&UV3GlzNOSv`U2Xr=oRGcq2yl%T~;(#i($e`a_V*G*qqyt zv8s``8pF{B!uQj0j9s9?%k}$i!&R>L7T579c0=IL>v4c#>{Peu=svcJhv~J)#Q~$` z_P5bV>PYG(h2ggM@ww_~!eJl@Ejsl-_;+psVdv|+dp{snfK{6E`yrBcQrqiZTXia1 z6uVy5#=Y_2=w&-oqxLbl=7R=ad*xPH)JQG(%-LkS($-O@eP3

+&u(RRy}Gr;)|S z*La|;Ojdu61rpMUeE4U!m6!)RL zg!E>{^Qf&SBvt!6cdFp?@enuHEnm9krnQ*ydH9mMBsCj4+e*#p@O1d}v-YJ4##~c= zr(|fTfampM?1CkBv!~4==5~s5hQ6iJ`zE)>^Ly1>pzCdTsoR!?R{d)e8?t+|R6b>ti$0FrsIJMUX?Kne%Pvz?0M(lf!*%Gp}dt?eh!eWsBKU zVQX~f$9wzObB5|Yx87K_dx+a>Kl?b9xBdwSN?XC@bCUn1y+&7iv{E(y@N9p(B8F8g zbi3iR%!l7rtNXDm!eH4&@#dMksHe))R59x9ySI6<{Hgj9<{nKRtLPaU!S(B_E|WDX zjeF{=_XeFsdIwY6_tQ~T%cEu1n(`(G2wcW#TP3$DZ0Rd*3S z{+r9r^gPi|>l4DrHfOi>`w6xMYh{P4R^PL=tx+rum-orz^=DwmCcLs_jP}p{{tlJb z;jyR==g;|#j5KCCS!1lxpO9s*&+jHzO;Na=??cE)QR^Lt(u=B_nz#MM6tnVu?YDl8o3r4#T}K{w2YH$@rZxEW9-DbT{~WzQKR?3p z(q<_9m?308D$!F_0Oi0ic+5oUc|;2Yl4T3Q3gOJ*!2R~jbe4|H?C6Hf*FXi~IS<4< zH$*T>0e*x4yh2EX{iVRakO&s#gity{U>8MlJ>j{nx4jKFojVL(mObA(n@tVc>e~N= z(0vDovkj~J+qSj^`)@85R4?~E`@diNiaV+JnePm|2dpJg0_A3%(su<7HV)b`t}oIo50cPkQ<>&t4>a5Ye==)QA7s4D+Vj6 znDEcks#C-H(%>o(~uWjtVvX_j^^{w1J{);zrRzziqpiE?d|;3J#i7BsftduiZll-% zpZ7GQWSxJ-QMEDaS9X=w9>JRr*IxcRSR6$&Q(m57ccK{(Y=#M2F5e z2Xt-YMpVLl45g0vkhRMUQgLR*9aJxZ{Yn~o+%Jm62?n6_pcjiN~gY-f~uIL4V32cRdxhP+| z?2(<}dc|NN$}Jv+Ga#YD?}{l6mY%44+{eIU?*?Mdg@%zKAlH=y=(|m0_@MRGpE$om z@s1)uv7MJk6J-Z5!!<$}T>*@RdP=a?b?)g~O{tg5=+ahSU3 z4CtuV%R^Aij&0mAjEUHlXyC$LF zA~jAkS?_e4>5k+^CkH^i1MJothD83;w8AyM)#Bf|M(ZvA$n+rh*JA_*?Uqx`JW z2EvloCXW$A1Uk@>@wu}?P4vYT^3-SGYX1BM3+$Z+mN0^w$FrfyCE~iKIzw7$+lO{s z8a>a)e?ZqzP0;>Z>5&1kC9p!Y3(C`b6Zz7sFi>;<>M7)1AI+gT>`R3WN)7>$ zKrO5Pr*E7<3K$rY7k4*fv}0aq8?AaS_+MY8HDUo(?9`b5!Ow~V{S=D;Si@nY@H|p8 z$o>POZ#Xnde4jQ9yDC4p*DPGZA+(h;vXd&B0VowhmpX(B?(Jh_vKjo0%B&S5|E?Bn z9k`qxr8x_b9)=&k-SMuv6oY`tp+r~~C}fph9SD^Gt`;VLfhW=%+dr!bQ#c$L`!K-h znHc9-1Bq*6ch}p_&K+_^f4Mc=a>RxQNE+k?GZQ|oeQWA|W2-SzM+pRT$|b0cB?yVx z=oEW2)KQrQQZ}Ph82}ZO7vTq6h&m|JT>&s&=6O3-QFCJ+)q*8cFwE8lQZPN!iSwB7 zp&vz-Bx8}J#@=^K4`vN)RC5C&cFWwhSPSCoPwB=f{G#C5y8Z#wXUhGEGy^my)P-^o~&wV9%=cuGu^?c_O89=U%Ucd0dz|YM^E1O?$O$KI^}I)c1LvpLKjq&e?PS^3_QF{6JY78(y&Wd7@I?aJpT% z>UkYs;(p(#TVU-z{C?i0^=&%J-1)6$XRUcNfV$Ut_%zBeTE%5Z3Rd{Dx+i}1*8l?MOno> z9mGY5=R}E#rB;Wf^OSm+D%|jBg`=7Pefl}fKf}N_0WcGbC4M(?%#IALxveIwa~iO4=sr>r$OUdPHk3 zCoVTQPNrAKQqtJ_*)h;9=UFuRQDpAHa!EdoJpA@Z`vUpTnFVqX+m!jIB9H!4k^i64 znu)QU(SMbjWfEoNHt5ku^M(panoty+)@v}i@CoLV6$#keuO`YzCVyL(NmyWV)nuWm z#RjcnSnT&)z<&HO*E8h09J0YPZ4|0PI4k3(^W%FPeZCtu!2((X7~E8oc>n{sMZSmT zBk8#(-vUvv<|+Es-#U)(SK~V6AzO?YTHcozrr_$0RX-EMxMMa@WGY&msj65Qp}zi< zvNG)xNB9yCD1 zaY@(^+kH&vZeDbuy;B93!?w0HfZmH$akcsUjUDamYbUg3bL~8D0)jorBk-FQ3r@dl z5XRZIb41;wrM_=$9_%TDilVkTLZM;Ku3CLj7ddr6K3VMx8D)5cvGxq0ZK+z%(M5A? zO~TT1U*-VpzWfT zL)7zL4n&+KF(3qvl2xyy`W=-&wSYeZNg_Y4Bq)@s$S71P24Vfk7cHyjwNlh*0Gj1x z+j(`}%$2+00>`zLVh%XioxsY$(-W4@VO-;8H}Cu!%0bJuEgws+fLMNZgL8dxo=&7L zXj~As*~Z&>y!gu0yR!=Ek3J#HN3d=+%D9{+O(X8PFHWv47!a~FDX^B3)87e zgnN8$kx`avPZZu#t!^N~twf~~;G{wsH7QfH>ws-0USG;S2vKRo1RI|?;uf@SlM$peDp}8M^Q=X>)gt)9bYDhx2fRnGg)FDI}n^ZMx znwd;Zj1f5;QJ92OJ)vBzE*(;tDAaW<*c4bvF4Em5(mYTTJla%b-c5{A5l zo-g(BSX}yBWhm;YP%ZxL?JwM2g8xT%(q^_=?5hDVD=;iZn#aCi520_KTDX z)onU8f0(ta6w5xc`L2_hml`k=&Sb7YX(RXT8Vz?(?;q5 z|Lb92+-0MVc#{>@8&JA>QT}d4v25aNVRy9%GdS*X?hkkIbLdAqXiI6_^j4d)3b1Aa zFu2(IDlqSImb1_4b4k`qGRe#yyVo1GD?Mn%tqposa1bs3er!W3So=Zw^Itm3extFA zQipsEIp|PqB0#OYa|PU~M5lBiLgKNym->bKmp#^1=;Bwxg=AXpIK-x zxL00?jX01!aRU{6^mhbLnEGlY$U8NSWYt4;C3w<;D?qkNup&x8MiE5`0|m^5nTDbgURC)EME!j6RgO~u7p30<7n_A*K$%F|sh=Ja`hvcx zxUWJ&U>*ihb)|~9Yb^OdI6;VbNGj40v&fSlJ%*o)Cgh)daET5>+jM3;>SDm((tE{l zTON2emrFG}CJEt#l3Ub(H07ZrdJl>Pi%e5(=BMxqQ3be5(>?k#QHZTC_znw*EeHZ( z)UlNW6KMa0)c8-mkN zVG~7YX!9M=_TfOT7xveVLd@Jc zfVZfhTao9B1z*R;E)W=Wn7P*iQ3Jdp|8)A&fGYcy< z2A8lUq7IUByDIfhaz9(wdz?WPyA}0IskvJI<`Jw^(-qOo95qlAB`Tz}SYSjJ3d)D9 zIVuOil;AczZ~9aCj0>g?ymi<*XpAyZZ&`~?*gV3zEmI=`=82750j{lLlLPIEo=+6k zb?clSN)x1Q5uY;P8Bi@up_Wi}NZcNpRy*L4DdM0R+0+MNpR3|Zk;TG8Q?`(xfU{SQ zkO_q?cou2U04z`gAFV2%xhs4Na=}}Lj^T9sh(=ck zDI`9eND9tGQa#g@? zWVk%O4!Dy8`Wg%oWL`p6&y@IxnQ`3gm?rvKRJP8FMfzhr5f=NEU{gp4OcNniSLB~U z!V7l`c>O!gmv{D>81%^HCW8jtH;tRRC(pWNp_(6O>Cl`>^RtF7+~i;vGdEf13>8($ z@@xs*$0q+`E5hA&bXy%BAsd8wDv_YoiG>AZyv>xZ6I3UJZ~@FpxR4?HWEy#Ch3^-~+R@@gWeibTY=cK^++(Ll0||AQq+1ArHa;OXL=jr1 zFUqz28)~|mz*eXE68NO3@bTnH4zQ`M!OnE>7g#fMujsQy$hkttrrRi=Ug_7W?B>R| zMM?!+GY?61*CTwRb*uBytWJl$lM;&mFvZs3xQ8;`TL_g?&?bxd@}%nyoUc5#_^LulA0yXJ6oCdomYrh`t% zCbfvU4nQt(XL(n{!(5dII3%=9L4#6kWMi(HX2}L5bX$T>_b7+NVaiulYT~hJw!{DW zA6rz15e6xPu=5$P`Ts~ba_z6qO0_P&pfUKTfmdLO$Jd5T_$tp$KK-&}gBAiy{?5lJ zz4r7NFT@^;QTp_+kf`z)|NQ`;-cGO&aSJ&V*|%!rLUH&*MIC zLPgw}{Ngk$;ZnXW7M?dHmY%m67KIRppj{HLQd=s1ptA|bCHM(vghQ9IX+{s>gY!=` zE+(nTW!2@C)M(FNHlih|3C<^#*hyMYMR;+HapW|KQe>`-1)x&L5v>6oofetRC8oA2 zIWn6A5$b98j30+oQ0ajXktQ)~E6uFDQ&7qUd z$pV1`YCvzXLj-og!qM_E%CMRpkSo*cBgqGPZdjN&2w+kO#Ai{dI9(tU zIYb67gjQbtge1+fa1vkVN!2K5c+j_r8L?OthmC?buJ0Wuo<~woyjyLy1jl9}Q)A6Y zy$5=xEOZQ~QQp{2)=5759;=r~AXeePP4Lc^e49o1`ipV6^E1+oY%E9l4wrm8$jQ^L zQzG?Yfw}aBu;(LmtckPm+=2Um=uY|f?9VG+OE%`*y;wajn|+@doa=wa5X8-|^Z=yj z6ucQ1ICHxIm|$kZ9OkM;R}f41m=9j}L`%=m3HPTUb$$$69sqF`h=wWx*2-RThc}QR zsy+}pG$>e1?^M3cJk(Jot9VG#4fBL(=IED#Yu+AL_;4fx-FGEpaloGC*%>)6OuIo5EFWKY~h>h&Fa5Hl+GGv?v|X; z`yekerNcsKm+oB`=;e=2uyBLG#6$i;D1s5{GarR$m=BnbKG1|LG zhG+H(PLFHcT=aRs`xb^S!|*^~A{M7`>9C=`1!e7%J^y%vFPgrVgpQ+s&sZd)#|xa5 zg|d3cwWf<_(qWd)Qwum`^79`hivFg47>|I?iKH!_aboivx^~|yY`+#-*XtSpPBu1tA8_2TOm9Ny58S>Vf|(x@sCv0aVQ4)rrq){Z0#1ygi%ypzfXZZ4ni&B0h}ug5!< z(Dzq!m~3k5vu+vBOV;VQp55yA)sJ39=S|w$&1id;Z$8h@=24FK^EB_d?zQ{*AmN~_ zaPnhvByZ(~@9PjIuc!X;qg(8&wC-+a*3Rc@c{MAd7Y~znFwf&{I>e}|(C#NqowP8y zr&9NI%4KJ()pNQQ=eIiVvDeueaRUm?r}gmqva_DSY4#l70tfx^b!WGNv-v)M)AMmJ zMK?Or+s&Ho6Y{JeH`58dv6lU-faCYv${8%DarFX ze;5f7!n5mqX&!mu^?lX9$MgI;s@uM9e9Ov#W(w{{%AA_Plzd+*99ZAR6{3DFO-1K_S*E`tzO`)Vv^oD`VQ{D z9(}5F3>iD3!xq^YIZ56OjxlcGSjXItu3{c7_P9VF*bSVTGGF=cOZgco-4u6!V%^Mfb+uvz(C>16k19Ql-E7rLu zMGolr>|^1DwdT%De>9E^3~!9qayyf=s0#26QGuf%RCxc@Eb86+M^VHxif}VpW!q*>l-(&-;!wV<>U7O}vDz0oU&zy1 z(RVdB?vmYsZu|cApBtKZf1!T}KXk+NA9fYi|NU+Gv!U@YcDDZC2yh?!#bb=L&xw|O zb>Wsjdqf13ta%YgYYzC{$vuz`(&)j|@*U617{iH3BrS&$jCw)3*ZEd$DX}$0Tb|Z3 zd#bKQc-2LZKah~tlo~&0Ey*=bTeXSRE%Td^3EQo!mg@@-6EJO0nd=B#o|}iy?VX1^ zO2wPweozsIu+BxDtPblUafMup$in4W#wCKasY+9WRztJ~j{g>RxuK{sIBG1?U;3LQ zPiJSUxUKmXS(~9u|3QW+(YBZ74NrSAkaKF3@5f4{c|t{JobW@^KnR{5L z$5ge!Q)dyXqsS1g$k%950U5c3ZI@RCL}sQj(Vh5gheNwE^y$rGyI@IjgKxUnKWMRGkZL3$$L-=v%zctaQ)0 zjIX|_Wm@X8tn`#s5r;xGs`#5<#bsq8GF}PRTz>9aQ;1sX(raTZ!R#ldKVVept3-ox zn0=l^fTmM1o+VGQKLyOPk?h23WZ|W3rx8XeM^<*`Q55^H{oL30Q}(gt*_{CGL$QGd zL2d*jGlgYo&ryJtZ;|O&ZUj4%rTM$ryZA2AFyJlFdZSWT3nQ$d28B9~WnV>Rk>00; zq4ZqU=MzaKPz~uYiKIPCC$m-U7IT=x|U^L^I;H>;a5j*}Tz=Uv1I`aG%{!^MS%U zeVb_wk2K6MB)+K~fu!#w zTRs}#p-n#57h=KcJUK|^z(kS>bHGmoB?UYUg$4hke?EEB&*d-zYPs%LC@W66hYiGa zlENffa&-s^<|7ai=Q>!A4RjT{E5$_Xax)MvqP$ci?BX((<0R!)Wd)RhDVY{I}6Cm>%dDROM^ z8|Fb)4NQG>el>WEiEu@{vL9bHJCFtos9B_VZA5Eux2zU13_`wZJj$DM-=gl%DCq2g z>q2HJR3E6^n@DQd4*Xme9KS^V3i1$aceoJUZx+Xk9hfJx?Dm}K{izDqY!13t24{E` zNrQv(GW08M1o8|d4rR=D*^h7zk>o9f!4>Un( z1C#VABV)m&z93fHFnx)XxhumOko&-Iscg){ynJP zTAc0d`@+<&1SjRiMY-mh;fdPE?KsBbt&q6Ng_v}?`smvi?uA)ApUnn7NlfP0Swa-}h&t!mP2GF^3kRb3c(bNVdo zAT$rNwkCp`>eYj)y#tjKhT{c;-TD9X2i^R@hX4=cKi@o|nKuiZ0xl3R4VrQ=f}GJ_ z!F8}1`lgKstIc^8u6j(rXoWXUxab8nuDEFYQ@r*R5!BQ%>s{EYvQ|RLvp16XjX(iE zGJIx#HDulC9S1I`Ck-BJ0>y7U`5$3_!n$&ag*V%q$ z{s#7|{M!ss50*!6=@toYhuxp2{x*-*x(TI(s$qA?Dk|5f`pyx+RP*wE5hsS%wOXF{ zzPbxH|y#0yEvEq4i`_y3uZu4xjg!%1Dj`}|jI_+5BVDq#b zR9rNZ6(@(c{sO7Yt>g_ev08(pBDtsdm!|&{OKy)voHt0*E=N z*$}#z<$YE;c)h^0=F;M9?D0N$T)`{c#^w3Yi(EH96rarn$#4*xt^5eq$(ef6RI~e~ zaqt|U9aoICaafCxCmKIoo0H9#(BZ>+qSQKD&A-JvNw5;;!hwK|CthP^*70NBb87GX zvA5%f2y#(xwahm%==`q1Qn`K3EhJ}mhs8=pOaJ>?96kJgF(XPWvS`JRcBk)wsZ^Z!YJnW)-X zYpJ31&c1N?x?Ucy&)O`ITG)MbrA)E=M(C`Al5P<@$;dB@Oh@|5u+Kir+#7!IyZ|g# zERak{hyvl1*U5@uZPws5`TF$NgW)lS=m|(=FvUOS3S|%s5d|xo=#UK8!DpIp)!x_P(b{l=lQ$p=#tGt9XzE4)p=NF%*CfSeeB(!n zOIOPSeYPK&x7byKNr;QRr$v~xjiz;@D<1MzoiJ56T8$fBHY-S*cS3P1nJs7zT4%FN zHf8guDS%(s%`|2_iL(b}SJ(W52daHVn-AJ_y1j_EnQ+p7mu#yF_4gahgrwWF*fg@M&_fr6`22aPA!q_pMi|)mI|};r z9W2+62o+&0D<6VOPcg_*dVnr)*){XG-!UCyEnNa#)hPJ5^%nLcWrom@AP4C#ymp~g zL>1+%TJo=?Qt*+pdNJuBeBtk^Om@?T4EWj5^ckb#StG;rf-e&uY*Vf>#KS8M!kJpuaeERCPi*>L{u4_H)478@k}y-oN`)!uio0eN3Te zZLiK6N^YjCF&ec-Z1a5F2zpf{v3)szfL4Opkd_;yk@IRHo*2bcTXhCU=W6wiJc$xN z*@O_{Uu#P~4MI=?_;dxufkf&=b_7Q%{&+axj=A?4|R3Z>^8!4o-(4#_<-DO>j!KqDRZ z43C|8l$Z#g#x1xd6$*vs7V2D(RvFuw1Pd{~&@tu2DI}yEQcsndC?$--YRou{xc*bt zhzaQP3rrP3?x>f!(G@}Ou*wKf7T^N7SMbG$3576?e|W{3q6=ffA&@z!F$Lo-{ds`< zf$cGPk}wA)Bn8r_@u`EbOCofmRe0l-&L5UVTSmFd#swYJr#uJ$-Dk*H z8Zd0a4VelY<&Oh|m57cYClw8wnu4zIGKV{Li#s)zn<+-DJ0w=Z2kWYA7#oxm?*Kwg z=F@-0d*=7c+E6Yh$?M$j&hN9^z2rW4?pMmur$+=2wGXjJ+;~^;=T0LD7kqnNfY19- z3^bpo{V7JO_j4HEs>k*E@cr%Ug1Y?IBzSMu2H=g4tb=YRJv00B((vl;g0K7jjErul zl_{b2N5FL%vyoA6RvT02`xSlzI*5t^95vWrY-BeUrFAj@p*ik zo#^pBuKGP*H`XbC&L<49eWUAj9N_|rZ%5AHw%dkoydNhPYqr=fT2*lyF8uOx(LH*S zKNdOSzW1NUL2kSUJDl#C-E6&C=iM*$^f=rej>C2ouK%d?hNW(X?)DJBcqv{d&2{0Q zAbI_aJVaD1D0hEW0Sjn1??wx9GhhK^P2J0=8dwRW18Lr6ZhP>_TzCH-?0(t{yQMDx zJUH9$H@K`U=%^XY>I$AN%3B7s9#HL3w15Co$)}m4OgvZ4R5uZz-<$OQgMULj!HdJm zU7?4E?r>Q>kcNfI##gMv*x2Z+S+d9({0tn<3f7CF;M6qD&DXOt8OzEfSss!(lsKO_ z#6R8#ByH|C*Tgi{N%R zG!&|CNQJOfZ)*?ogxk0=ZA7M}B#2T?U`Fj{(v+BS zho*>F)9#+~snDM^IvM8FG>f#jm&`f!=A210SIk@+Q^}1y=u%m+(v_3Wq0=q3;<~ET zso~=491$_?rj)4BV%Zt7%7#+178O-Im`q0VtZdHCB~z*P%tsI;1EtcT#6cOcUHa_S zHi0Xy8*Ngl1hzj&1%l+JM3O~buVV-jEk()~J{0Kw@fl)SC6K|0KM+1QGptOj8Lq!K z5G$gyk0Wym(lMgBlyYb)p1G-PjkBXJp==;0i$gx$ti^>78h+%tiwshk70;l?4STX~ zB21-0>N;oD*+fRAOmE1IsnUfc;oLC9L)5;684{gEXQ|Vsl{5*gTL5J$Y~qx>5|c)f zBsgd7IkD!=@qP6>yNHnuBt+*b|E+&0FgAZ&(hFi}`ltKNrGd+-FyV$f21<=>q1xlY z9RsDsGZJdm=}AaB8CSfaNc7PgzgqfUTGBC&?fTv6T{l=46GVY@c3FZ7VzbQ&(}n^u zSSFK&IZWO`w=^`93yp1ZD3DA!$8^gwBuNx=FY>iGk@mE!Ytt#T!283`_RVH!aOUO3 zc&l3m;I~Xeoi+mZ^eGkR5Dte>BfX@&)V%poiLuf%?=jixozwg2D+#L-#2B*Eq%-P4 zVF%vsgu8|}BX5fce)G%WMM;MQbu}G@a(3)vEmE(B)g}o7GV!6c zuj=(ljYgoa`mdvs*&{}?BpLE5V=Lp%$72pdr`$u+C3DO--Ob^Nry^~ zDRt`xfx4;T0;EMYIv7iXtu0MO>8oI`bTxNeX(E6 z1648DI_Ijm@z7NLPE+CO5QR?$tWpX->%lD#swat%l<8-aT7tk#$A5-uS}T`}<{T~r zkHaX2Aiyi9m9itduE#4EsRIW|%|{z9#YgI7)@CU-8>3=$FVeDvcN9Xx40Grb<(KCg z9x#oUV1;(_J5o#v4=P>4>(T>b?rQ2tHZ(~&qOk}20e%mYT>heG*O@?orwte^$u7u2 zm{TFr@B7;rs+{hlW!jB0fC^3@%}S6E6o06o+(} z0D{YL>o^tP6?~ZX$F4xg$aheH4)!W8^stC0-v0Q(3p-aZY=%_Y_E|_K_WTYjfcUbF znpVi&^G|=tSfmL(HyylkeX{~$(gTTuoNRzs7bz|QS1Ngf7&nMu1b*%`8fY3mT6{qR zX;q;6FYuB;_q+9eT;E$>SW>Hf6Dqag(DO3w_DOP$l?D*K$lW{=a*hVs$&eE1`a)pR zHVO|Yo?%?RP4fI~u3}uBdIyeTU=b7(Fwsd|#fm6L0Sc-|)kCUvYj7o)C8_r=d1^RR z6PMUQMDnSX$F&e^`m?9RbpnX&(F7Ar?y|r9AdB!os^o>*$#vw(Yyz|kYY@r^SPI74 zhC*Iz^a{9^pt0gC)@|f)Opf})_L6~E%SCPR40(ekL8Fp+)y=>*pn=qJppVX7(U|2_ zswfO~Gb4zP>VddhlOy5x=0x_t=Q7FT>hoDDNnidzcc@S7bN+EF$&nXF2?lONel~}C zjx1nqt0LAD+O3|o8XrW~!TtSYG@k14w?q@(&3~1>dBR!jVW) zqaL|U|FM$Ax{DkQDvt6;n5C9=h?ohIMI=d17TxXoC}m71A3iVh8?6f-_6}SEl$c$}5v3k@kP`^B|Rb z569=$k>qlvf}xR&;Zq@}$b3|495vbh6i7JdAeAG`agmEsJgL-i!9CCDE${JdbGG^$@3DhjK$ zP^2|S*->1f3wf6;d2vuxdyU>A*I_CE5Q>qe84=tP%3)Cbb|8t4atZB$3#Ba2GL1dLm zuIfJmOH-?Qfa>9_Q-l-bfq2r?>7G+os|%LYh^b3I7ET(3nE#V)gc2%wr9xf6ib~Mm z82-yc`S4DXDn!7NJh?d3Fk(QVoNxiq13S~9BBYQD^P$%F#j!T?`&~O{Nule4Tn)mb zhCFEwzVZ({58PC<>f16!2cw)4$n`N06z*M;-GBxUklH>6n;t*EtwIv=XZkXFYzX@?Uj(MVz|#HVcYCA@Ic}Zyi5K<<7#zh5}4zv@k`qRMq;UTf6qH z-lJQ%lR@)2Ew*ZI4B-xZsFLtN4Z&83auZh#I^zOMo<`=F6921Be zPRmn4X_R;8c(DXW9H=tWy6I|)X?YnVRBPOQOhAq{JN5&NWIggcMV>Rqt}NtOW~R-z zFYaWGsiPIiW@N!%yK!A*gdp1^UozGH2o*L*;AOE?ggSGFYiCNw5a!E^x|%br_)*i7 z5~0W@MOSjR%mqnU37B6V*q*Bd2zh8}rQEOvIwB+0p$rb_jlXq%oB8OItG>a6acug} zB3h2sr<+^#X9|65L#LS=c+N`(qncSo;9vPqF-QFVcIMv-hRabU*Mt*c9@UBlepk&Tn5YpBND-I- z3(wT1n6SZj?a!4*u@ahEC&qCCGH}AT?%|ZEDkJR2IZdiY+#Q@EnCCCzPgV~A;z#Gt zx?`?VN9Q*Hq5w7C>Iol*74DEslvL;Hb=ml+-GLx(6>KPq6v4+xaF)by{@|AvkZ;yi2ourpk!7^kR$au5~5Qt$*LRGGtTCBbGwl0WLG2kv?$t$~L31@|4hl5EYUunWxhWFHgpsK$3$guM0Fk?U)F zP2$zAuWeY~Mvd3%ePgnXonWVRom)|DYX6_2+@7eGHJh#6W>`$u*EJhwXw&9P(081_ z4r(MDgEpHmSZ*&H5WuxrThCg)Zb-b4$V3qwYw&^zuhouut893;DNWGvosG4Zh(|oW zElJw#I1?QXj$$O8NmLi14YrDOaK3M2b=RJ50zC_oY{E%TD#mUVTv2{u z+GyE~AdJ}H;96naXQB--nUTf$4vct#(|Cd()M27UASz|p>Jn@46|kcHsz zA3*iJkx%)%p5-biRVp=du`HoS=fre%zB;)o!?6B${I2<_4Iq?uAt3*t+(2rYg*vYh6w|MB>`vaLBl1b!qu4W?Y`Qe7miA=G8op(pFsJ*`=xaV5&!7%23t z%)%n{DF9*n9~-4y_V7+a2v588N?yUgysar9a>Moq!E-B&U(Z5SuI+eaa&ORj572t| z=HZa-f#Z8a43JT+!PT1JP1ynH)>h`wqY72vD(7+mTPU*I#0x!RkRrj!^#!WGjA$XCZjeWx?@~aX8qV?llL~# z6XyqArc`s?@Ya^LXyKPOr^n<54S2GLyJNF1c}JtTQBG^UeZb#=<`$KR?S`eA@mSU$ ze$92~YQvon2!EhEP>0QJT*{W^&>c59F|673s>~|!mTO)bdjn+4?bp{Gq$j=Ro+|Gw zZJM5koneQ0Rs`Nuz>zt90r!x73aFQ8ry8W(@*LTB&GCUazAtyxvafw2#{qa_d-EnC z_aha;ZdYgCKJIBaP2~zP>SGo^ECucVaex28ZM}Pe*%Y0BTjdy7ojAkUwCh)Sn8ahQg&J#2)nP+&s24~ub!WYZl>85mS3F=8B{EA$z^b)vt_O-JbrI-Trnwp-&;jWqdZ)dY$s<2 z-*2n+kva!zCon!A1JQ1@vu#{UZ}-7!h(}mAI-i?k4@lD(b}*W&^v`;x<7s-|XH2X0E|nPX+wN*|@4p_u_o*W<--pf?JlS3i zzpMQ=vsv4=qrBXAg(b8b8ytODZ<>=~6{}D{A)$}=KAx|SYK@nt&07C8iP(BdbF&zg z&bRv|P5XG;qspxe(Oh#!;qmR#d{~{VhvN{zLp@BHm0ky6bLXr4_$|--?+=T&hs$`$ zY~1NYJCCn3zk>AXRK;eaLO^U*h1=zM92$-8a_jE4XH?Ib=lu*o;w0PswS3Ok<+Tzq z*tq@L?srAyh4*~}r{{Z;asGg`v;C@7@|GaR>wRz*tFYZbmBKVh*MkFHwnFng9S~x- zG}1M=^6hcF^8GXwJc^o*v|Xdau}{XP$HD7)lb6HSIp=I?eXQ8$Zm>#MO5^!75!5@o zq3(N~UdL%B^|-RQ{Oxd7siWsxkoV~Qw%x0i?9=70jT$8_%+E5y^5wOA47YX4qp_gK@db_KAwk?_LPz35(-4n2MgwzZOUJq!l3aN@M> z9zy9)Z)WYd+CCNY(*W!bg56y5b{Yr)0@<(89NLC&@O{T6L?Ur9AybuQb>{ zd2;@o$*1C`1u_aY>7;sSHN$%8-TkJc*`>{MfBtpc-?x_qOJ5_jFt-bFwlY_72Vvah19xNfeqNzIi5A}Iqc zCiTsMT@=+60`#p#FtbAWl`A{=prmz3iwkHpS#A9A1=NIF87=a8Zr%i?NeOxGA9#OK zIljFw9(;~Uqq2_#|K7VJ4qp=-l&T1Y0q_LK2hJQ>^NV05gk=|{iVWGFEH!A@nF3i+ zEE%;`6M+v+g|LsE$TT_%4Yh%#YD}e#h3KVww91U76je1a$k2TT$=I4}GVlaB95u@K z?a{ezqauR|=?{}I|Os~mb270<-4(}~&5_b;S zfhMei;Ak-uYH9tUC`P`K$zNwf`m7QW+5U-5L3+pW$%3mi_O`O|ir6=(L$?5S67j0l zFn7sB^G(rC4#-{eSe!(9IE09@tpNvgyRc0a2%taFu89a0oL?@|)3^i5(|ZrEpqArQ z5b}fud3djNNMFL{uG4cvvkm7G(U+8wBqD3Dc{XwH)s~weSRnU zh#+DKNeM0I-j0e7M)EdbC|mzP*8t6zNI@)`J737K@ZYJUMfxx()NqXIxSS7l)RPLW z?2se?*z7+Bz8avJ2U_OpJEU-f5(JmWsrfwlh}eY)LJTn;_|nt9^{l^|i}X?NK#v1~ zM(Re^TJZake%0Afc}k1&J_nS0~;LGP>C#H~Iu$_2~v+jjmd< zwI=Fs1k^WX)F-_-{}^`!W^H8=lLU6}5Au^W8#2$&hHMc!dIB-784J^0$40A=k^HY5 z2122c7ZJ<==#PPcvVuJzW1arqClH;`gX2Vp#Yn(spmZg))>k{W3w+SFWXV^Nja=4k zTgC!zAlhtmmloTNbT_w0H8|ISPp&L=tx`=`cJ7PFPUc^+1*=F`#*CSHf@O+Db zI+Bi(+HidE&-g~JorF+@08klh8WHJ%yXtpw)#)!I`+gg!J$ubpGY<*{Xz90s&cy_3 z=z!V!x*UAIBzD5|ag)Nqf}gYogev`YX&bYlo|S7GV=|;Oe28QM7k3+++oI2I4N8Q= zJ3_}B78VCh7M6ek1^ex21Rvc;8jecCfs^O-n@08m5Fd!1397YY##Rw0|0raGYzf{a zZZ17~b;h278+?X-3lsA#N)U!jAsJFHMQ{OnUWyiri6CUIk$m-XFzk+_vp-t<7 zTYydD!6CrTYU0EuZ`ZaJ0Hhw4(~D%CcC`c3GI~tLE{J40_bhSML+S^Tw0mhcaA~vP znh$xhi@KAb{dd_HeF_Y<<)o-3?y?KVHT$-i&EUh@uSkq|ltD!e(sVOc#vn9=+N&#Gcemy?ZUMI77y$^A_!*#O zXn9mQ+S$IZuXWsUW;<TDh=!uggw;TzYbP;y50JEZ<>HH~~gDY5+B%z2R@} z`{gn`48D)8jc?r88sxn87w?|?eh{LYtu;V1Td&hi6;J5xg1<;elwFR` zZ3G;mjTs`2o(^aGm3?)UFyd-)vGPCO6gClP9 zzojm1d1Vg00s6FAqS+z9V%Uwu4x10LhUmuoa1U>NuN^)OY-B`0e+=<)z<9>v<9vmY z$v;hHX#@8*xhdme4ynbiYApQ9=@zYKUof@nvA+q`2|Z) z+1jG>>Jn8G^$pcC6bwSM!_{!}GIeK*oKSgvPJj3BstpncKmJ$#1MRhL1ovlF5&rRo zqy69e+S=6QKU1_i$~NZ6YREnh^>}O6QO^G-hoN5u*udf z+qP}nwr$(CZL7;wUFfoH+qP}{*37wa=FAtd|H014$TxGXXXTJ=(F87mB%j7NZcR>i zZsmMstaWC}Dy67cY0Fp)X+`iSSrDMuP#YQBR5ksz6jssd3lc3mjSrv=RiYX)lBV{x zXBONDWOm+ZJK6F+VR+zbe+~n5xU`6Fy0&YIAp+P{j|sr9h2gC--NdvYh0e0PqqdP^!&f&@(5z`jFK+a1;;F!(a)B{tCB0aJ z<%JY?0F|AY&DxW}E~C(vi-4-nb=1%_fl+_PB%N>}vPeyg!zTsSq`?elyI&F&ragem z%4M?}XX*9z($5$*XY!*IM8+^69auqh>om;SQU7IJ-+ zU>O**Ito(@KU)26`W*Fdw}j?)mu%iK)QaT30@|gl(#kjt1vQ}RAfdHMjoS3 zIBiHqprp?=gaPbDvnUO7N|QFKJmg?pI|vQ4pNKqQIuNWFE4b!Y)tAp`WlImKAFB8Q z(zAW6Zp_7|boLaZ>z|YJvdUO%p07TqxE$#hTm`-Q(b7Dhx<&sn5#|mln1}D=(w{5cJMlNr+v_JvR)XN3(N24c(*tZo+mPpp6(NPvmP&{?7Xx% zK`t*liT~vl!&ebIv+Z-;?Q~0S=Vi3o1%UkId~^K0iPL)bc!4`X;3Ch@EH6p!_v5)2 zUj5?r!jADrPOsC!@>tMGXoKl-qC%S4T<(uDdLi>=D#AzKe$zYdikmMFZ{JRr-$|^6 z|JLgLQ$ujN*~<^}fjkiNBWWg=b1UvtO z;<5G3i9}E8%IPrnRKP|YFl0oslZ)CIyqelj6TSAm;vJjj#ntsx$eE)d#Da}>lA2UC z!wMQ56g~1CGp#bqghViyc)UN@`ULTmRN|QDrx_V0D|etXg#YDC-U+&xh-XHz<$PR{a48 z9R-B;rMrmiAn1BQTW<@bvd00d20=+eKzJPlJ0&}<#NW@l-n!U(UZTlzz2WiH5TtfA zJ388%u)uLcX7K~gbVhBdD?J6NkKXOFc@GP4MR@r0u>*I075&j8O=;+>Y3XjQTh@O) zF~AJzfYuMfHz)e;1k7@0v8@UWEaDR z0;Ihg@i=e@eZYD&MT47QTcHx_s4UeH+3q3gsOHy*#<0pdMgCDWNNQ~fubS);7$!ZJ zM7iHfnA%^&OxDLsEUQdpXIn{(Jf%ZkORX!1#D}7}2&^&Kbqm|&AQ;CI_{Nqs93g+D zOd&b$R^3A?svw~MfxkxnEH9=6MfOi+mlZqC(8GzST}zoQplSncRVbvAm^#Z(V=6Pt zP2M={97MS?7yW_#89bISr29MXsI~|_RB2eBLWey$*i)}un921GO*T%Ikp|7+KG|Du zHjflBt8}J;S?VPbJr06fcSe!+WZhW2K}|%fS5)NHDkQCe$4J;!wV;T>EhpW)E7k`% zR|k_qw~rAa|4##t{VW3kANUTE3-BwMFsJ^EC5>ET385;v(khB$=%37>rP>zaoo7OZ zkVdW{)|#4otqlYaOO3#v-54&x_V4_}8Uj!WV3Oa6y=t&T5`4K=A_I~! zalpUQpDY0S<9|EEeZOP{z!*{&Ye?EOm;;2E#3#M!_KrYalgR6eW!cfP=mGKYx>m|o zE`W0Iv!1X=Qa`ahcRcFvrH?xBZ)*1Kr+*^9tNFf&lE2wM&R6ng{OiB=^5Rilp1HSx zlm(61?+6>olQjOvOO4md(y_mSmQUP!?c2S#t~eNeAZi(mMew{^c;zrL0y)6IAA za4PLzWhuT_Zsa4sZDvt-+(;r~f-Qb{%S5~6MxgZ3VKej5x$RcPDD67B4Qe;UG*|3B zc*D6*_Xl>!6`XO3E3a&Aw#%>VY_@ER_Cy<$7}2R$tdEmCAKvF1$lmVWi`IvqyTEN( z7M+ZZJ==`5nmuP!T=T_A0OZ&11fxuu+>ShShE9^cs>fR~=BFZ%8dnQhV0M8=@H3+Fn{BRS_IEQErP2 zh>1F`={q-_cvR0}YEM&sps3NntNqu$tibl@q{vF!02p)D^SN-C)SmRa93d&$F{x3Splue{xc^g1-g$00M4Wx*5bA~Z?5NtlhG1DcJyy{Ph zfLelimOCi7h*ox}XABZd-k?M;V@N0jF%?FrgGg`i=<&o#Sn~0>$FyY;(Z|6 z)fKO;mEpU`r9UTjk;o)jM|JjiP*OlsV9Uf=utLjj4A2b>#j?8$PFd_4E1f9x0_dGa znC~iAxxN6s6ukl;ou4H9Q}EGyDfWg?qyyErwHWemTuKEdJTU}B2v8!IZ0e41?v8|q z(IAYq5O*2!MCejv2@IhH-Msq$;&Wg@&;k3*PNf3~a|mMtt7SG!5GbTu(ZB?Umg=C0K)EK#n%O)ydMCBuD$y1$6^7Cf1=>;S86LameAKbz+nloIr&H zXr!l`1Cyj70uZZRR5`DY-RgjQfE2{)C89#8=EAxGFkhA#TE}}WgVm;wwVIjVe{M79C-m)HNiuZ$}K~Z zfS|~g$!%xVva<+zZ7vQ-7U7BDh@>z?d^Gk+44Zl-6;wjBudxpcSvqh^=^$_U1bRE0 zbl9iF_E`U}UDq&O{4-xp-c3U^Ln?1XKP3&+o195z4c4`FX}Drt;m9sA@J7Nr!xd31 zN~??YDUVGVzm-H6%z@trybEd-&`1C~X+Ru=2TEL|Ngxx451a%m5Y!Sl=BNWPuQn!c zg+9M5G)5QbkMsTI4~}d@)-Tq%2hzBA`$ytcHMp;w@ygL`ik4wO9PAF_a^FcVo>8A0 z^(dO_YmPL`)U7;X%`CaPR#hGNbQBpU0FL-WnCRjjGt4D_NX}eg%nzu_WuKm=1Yi9d za}eD)z9pZbB&XYZRk{{lk@T78jOX-dyg2$EVJ}0=ypiK*H~y(3?{0E?(2T6%X?vVc z-pTgb#os{VYvpsg8&y0UH_$i|8)0QUrcXG*K9ZaCIRDb^*uuc zk5JhLJ~;1iybhjjuXH~EzkGn-|5)-TY5CtB?Dnw9;n*wH@Syp4n_s~Dzpwe4<4xOK zPq_Dde!%-b@5g@ldA?0Lhi1l^URExwfNRZk&wK1AdULp>_ z9SB7}@#FP;yA*f-DL=cKIPFf~=Ea!uy?w#v4q;A*v&W~-wR`{kz_p*^_Gdr3$#tG{ zFHyDrnd+MPxyp+T|DJR$_WO80vli#i*(Uw`=#T5!ez>gr&YN8C+I?*Bf4fZ1!!Ip^ z^$MFb#|r5%<$GdotFk*;=HOZj^fTAPB}hWv0IQ$8BfOa-8rg=Kzdk+ug5=^Fo8acC ze9+n8%riCzqnRM!>=EB~^V&@PJFBPMfX|Y9ek1*J9t*#%snqT})a#zN{d5o+S%63j zZXVmrFZ|iHRM8{}lS?zSmR{MYNttSL0N_Z_qS-oyP9cV=%xYAy1azd@BvY59TCXB3 z8cf~#k8;fs#C(nB2z!&S-cZ+TV|3Ug&?-u@t4pbpJvU8F!uJl9mE3J)*22qV;r1A4 zDMOvs@n~F65-*v|M$Q{`07MCNJm*wz4VRd0SV?-k)f}dKaywO*iM-_2MIQmL@f8P>BoMdZ3zGDS%b^E4BP~BL@nH-6 zSFxPg;44YT_fgs5JRi|yR4c1k5otq}@AetkXlk0gPB_?JUWT=P3ZBzk7dJ1Nti>|S z(xg0*Kvgx*b>%seK1iK~cD9%6{p&RL&O3SOI!CUK!gNt-UUU8zS=o>kwB6e`hfQ=WCEEc-FUQg1gwLCu<6Z$?#FGQr8s2g%7A zWq2y4OS&-qFOj+3uFA`Oo4#6BBMl{k)@5}C`#lU zprTxJP3fsnd}$#o>rUoeOt9I zojWfTwxL;Pk}}y!ajb5F@Xurw8u`<@)SoQA&|ZPvLd|8G`Czu5eg$Lp@xxTlg?ZRe z-MMI+DHD^0>M?l*w-tt7EbH~zlUu0y#Hk8wPG!J3GT5#JGRPRsX^pV#rq1Ln8%?RX z8k5C+rf#yLQP2h3{`_;Q0+w?;)?k z95xoPS&fe$ooWWy;6W7C2Eb|35-NXagDnG^SlvFgK(x<71P5sf8b=ACPITpcq;qj; z*m)0iGRbJ98Bh<(m`xz;P3Mp_61=%^8-iV3j(GTGfZyIg5|3tqqh$ z;w4c%frP-d2rDr39R05}1%=b&8$IEBA@wOV)T1gtfF!-$Jalwm*ii3--LdjJ7zr4G z-)MCF^Ok7GHId=p%Z|;+oV+lfr^hRT<5TYtj9eHnGctfhSfDA86^Wn_BsSj+ir(d5 z5F1{LUMHf71(=rpk>#3m1Trpz!e9~RPk%_jT4^O9lij}x{jqCL3ax*PS`LIgL?HfN zmm)AEgrV1-qDr9>3RIiIc^yLWh&=pur6z8nP4UYe+@@vJ6($4pHqT zs-^D1Xbe_K&caet25D=OkSOT8w?^0sbqy+%8e(UiAwVf5>`1uT`%1V4c8>Mjh_OTp zJn%PA)lEM|+5Lp~kv=J!ILbk;IgkM;<_U!}g)4nJWssss?d1NM1Z2>4c~*(^%2b2q+F zNdM9y-Fr#g1RQ}uwDdXQ^SDb0x&Sa=Sds}55!}noLx6yq0Z%mw*Ww4^cG;l3G2)42 zZ$_9*pD%Q-o0E2~qJd!`bo$F_fwn-#(6segjJ$C9(PqF; zigp(mSZBN5`@(WK-cNIS;MwFH)PQg78jRvi#mO^ZGRa+w9@*~&i9GBvX@8ow@00+rhldTbql?Ykw@mT zk(Zaw^j*97+_wPJoF2aAl-ho%L{9=aoyF(zybSJtcTo(J%gg=b=2G9YcahsQZ+o_X zt4{tL6xq&bzUV zh3k7e%Cb0KY3H)|4lR7$=(i~-TelfIUhT*qe~3ZB^M0)t=emJj3Si^fNmjE|)*o7iy{Tt1&&CvL9UPdPDD@+0=SpkKaD;zWCTWTFLFfR3pDCtiObFEgBGdSs6r{%I_pHr#BZi8Md@#yNN>-e3CHJO8R4`ecH7AgsisnX~Dg;BBc5#UHs6UE#OO)>Iol4MiaeW(=aB4;3EC>{Bw3!a|2y1FQpJ4VEJXgUD9RTv~!sVr`0W6@Wu5 zX1MhsKvpIc`jV<7=Mtt!7e-5kG9CkPU*Y_9$iBwLSTkkJJ}Ri&&IB%#+X*v*C5BZ~ zGupgLsbQ@`w;V!0hE&4PO94W25~xB?26KtfFG^%12ni0kk_8tYJ~J4Z1rESop6Oa? zmfVRoQz?Z0U?>S~L?}Sq>_8~#=cPVN#-{=UuDzR;bL=3kS@cF?g_=}?38*%g@nQ>h zEF&~$OJlQI5^@5gvJAj+&OU2pt)3*@DJZ=v909#lKK9qAG{D0pVe&y_UBckIAk`74 z*?(?i1%MG)LCn>=Hmdrib)cTXm0Xyr!U{APW_`w{H2_`(1lcs4N1)7k<1qRYg`iur zf}*Et3{YVjAW=~VR09A~*U%-_*UM2@MP_y zx~{nox&NT<%wb02IZD0P8hHmkOXU@^2tK{L^aMn?D)LoH?2Z8DgA_7FPN%(Q7me%w zy;Yu?fx->Kvhq~2^;J^k$=WV=5-O#76HLRq=lrX8RX9tgQ#rTAeB-E|cXR{fi4eea zR6NWNK-KA2+6eC|F`jDCU(lcTK+#?ewcuofRh}ZYp9hH^D3I_5eF6GS%K%C9QL9rZ znk#U;<&2W+LCrearSHD-uid-)a?y!)k-Tvpi~5=`q5~k%K}s1be;q^%I8AMV>P3d? zp;&6Zt!#!+M08YCrmq(H7n(~X4TU#~u#rK=k=jwLSFNpRd=1Lzui`qY7S+dk6o{q; zyZ-Nr2Hc{FXI0Mpeh?kN^;#=;a`1gYK%JE&b}<0o1TCXliYHT-QnGo=uhE+`2N*{U zH$oB7Wi^X1vl2j1o%Zx!&ax^s{Jja>c)EZe?RXw^A^Ow(VY}HssN8bBn$ss&2qC5T z*Cz!H^$r)$3c3urGaRwCkX<20zM>JAOwer;(JqZ5KbUlUWa$&Z(c=dbJh7ocsH@aI z--T)=k*M#8?Z4;R1=K=bN)*BSdHZ8NfADzL%sq{sLPgTqgD>F()qof2J3ecbxfc~c z+z8b{+!Es?^obnYZHrU7{JCai>>DXk4iUUo|D zhEl;zr>jvxP19)x2psRJh!yZjTLjP{dvrJuioy$MJGXn2mOw~to9UCC4+5AOiqndmw#PgSc~A#tJi#_;DUaoxFV$rC zLe}A;#M7fdQ3XLsmEpR{49k4jQytbX>eQ#>J71@ImX)UskI~>E3t-6ku>7)~tYoXQs+1i@ zJEZs=nYB|*u;cG9xL6f{JK!;6huhrY(X2_oW)o-#nuHj=6uy_Jr2jX5$}z|_oE&So z9=oxb*1Lw5zXp@%o9e*Z;hlfsWeAoS`dm>AUdW0(46pc?au$(z zsqC*9LZRO*NP^8)kqO^&dB+VDy9=|@j8N9vvPowAKwsBV{fk7V$Y%=#1bsiy-l)+d zP^n7HWF({_rbBp7~eK??+#2OnN+E>>cc$FWWI&FPD0SZAbmu3cT-F zB%V|m#KZoTu)j(wi~?y?3w!D&6lz-;zflOkfTku&l5N3DW^s zR}p$0a=b~|D=cF4b=DA0#phG(4A_n>LK9>`msHgf{&WtoWWuQDU?T#}cvwreWv3F@ zd;^H;-CNRyCp+-0B>G{tZOLEm1lSHwXTH^?-h8{<0ao`FG2{&!qyz5GUSbR>4Z>!J zdn{>N3e0l`4%69Q;>d$pTa7=FTXtD{wQBwq8_fDapcL$dnnI>Io(y6{03wN^9lBtE zDGwt62jopkp!wW%z!Rp%q6J8#eYz~5f+?!!lQnV0bu&Q*qZn}C1 zcx&KUo}p5nF~@)_$Y@Mp2GLHlB{V`nZ=8TG8r~^FK=11yEJOh8!hi6GiBUrcySY~S4mNDZe}f17;n47C4aEz> zz^OM108e&E2n;5kdOS|b-qEm{OH_(v4?iPe!UG+qQF&E?=7mCv7pixG4GF6vuOFvF zb&#Wm4OH9jKu3hFRa@8h?|tTq#T!}k+4LgY26}kIkhkk&k9ZOd136saGUrFW%ZZXt zFb>eOb0=>Ugg64WRZ%dd*V!R?HJ4|`=bMVS2S8bO%$_ChMMNfYs1MBr5SI%e#*qWQ zY2unHbr{(rCblVX86ijK~|geK4eyfxD+Oxi!*|#(?d2h3RPw!L!vEwm^VOIfZ4y zxYF-CMO>Epjh3|#g4qGFR)yT`w-MV|m-0|&=9zCyA8pH>bLl-dVOd}pCV;B=<0~)z z-bDIbV|)vyfsF~f&(6p>>Mbnk)130^Y7E&~MS{MOiIdV6{YwA@ld_4^2bSN+JFH#~ zP)v%RL-MEu+3w%fk(5E`E(e)rZdEYiKdy*)Jq4^3rkgZsik~CHjQIgGoHFn^x?IrO z8iKi(b8@@Nig_V9I2g}~_l3aMJYGQIc2D8$LO14`eh$y%nZCQd|JXCmxN?q(KiIAo z@D9(2wR}Ev_DFXj~pRV%u-k$Ygc@??ggZDk0*7m;(JAmDC9T9H% z3)V7e!}~NI>)Ne8!{1{#qt9ZiwQ7cwsrxe$fBon4I_(1A(`&?e=Idpu=Ii{yw1pR? z_3B`!=UeAu=<23~{SXfSn%m#wtfs~PV+fhmo5X(Lx&!Y6dnBEC$D3*PZvEk&-*3)S zMw^FIn#b9)>!(@N_2#?(sqFO>xaFL;MaBO6-;TTajQck&$BFIH0YBTP;WoD|deegN z{Sdw0*T&n6s;`&f)7vL*H+gaIi?F*bos+E3!g9F2-(KAEN8I+u<6=R>MIrVCeoW%^!$8co~7Qkwt!cdq~2p4N$CwLgWI@(_FTr`ua+d|umLB5?)0UE506 z)8(r_30_Ri`|S z#+t{%hkq$wv-=YJNlef4T3VLSFn8x|^549?*A;xdPn(LL>(VB6_9ouDhwE^9Al(J& zBY8gG3vPZdpQoR=C$r+6@2=|#{BalNla#)>-~{cFFo!LT%Lx>%kz z*O$u&=8kw-7oNK=b-owBr)ldM`j_IA_s^N!WbYeG@6%G_ZOl@-?zZoz#IS~It_Pk9@AUYk!(%|*|*=H(3~_nYeN zMycFNm!J1j!ASb_uHMo0Pi*P8=+#Zn*qIlw6MgUY9?=`@qrq-st)yeOKP% z=kAp5`>+j=RXTs)+Cx*LzmzxGz^!=oStiRxGH|UDOgt}d|L!Mim&!Unq*$`i*Hps; zugr~}Mh7OI`Knyl$Xavop5wQzS=BbUHE(cTO9#d#Fm=C8MBjW{g|jPc@Lgx>T%{qy z_=xB7=&c8T2&NxrrB_&u@yFR)XGDWJ;S|#9N+3s*ThbjuaOH&Jst2}(8o@en8fdU`1U6WI)Xyp z>DQNGoc~L1i9dlS67+1gAX9#0lw$!?#vHsYXS91K4<&JbIw?Q|oKBLQ-ts(XkJSa% zUa^geb7WStf`wfl51S2GE^7+GDvca%me}cSpW5Oz)){gbS5d37n%xgxFEd5{7&4Bb zhOW_QG9uj_9NfBPou^+^B#G;Sx*vFd?_#lsDtS;~);kH|h$IP5l@g>YFYw@#a zh4TG6=|C0Z<6URe=zTx>{twwpyWlM5ec}O)$(0+jd zWh5_|O?M^DZ3;bNHwZ*)!A_Y~&T@i`$`*GW3Y?Bmc5_+QNIj0(7Cj_n7&DSvNGO6D zoFpzkQoc78n!Ov$EzMH5SbzKB=gXE~)BfkQ6*hn4t83d(z7_+ZTk;$mN1o@JPLt>t zt!_do#TT7U(r<>RZY|2+kGD1YUC|ynQ=1p1z3n6g%tML-zEr{OBUWmnmTR~tHu+LG zo|+AXM5@KKqM;F~nJ|tGnZpG9Fj~~mUSBf`jApR*qAdwr-JMuWMp+oP+-XfXah4Gl znpnm9f##HbVDqG6El2ZM50y4>lWOCUalSb>nY6UpHH)u%s-_SA&==|%nKu1*8w9z_ zudv8n>w!H#UQQXGmF7{k8tb&Q=#Nyb4eqYUZJm`72EFQL8VZSZQoRV5fv!@bMH2V< z6sZO2%Ja0{Rn^gxRj#m~7$N@wml+O&e|Asky76KA9CFZv$B%3=l`}2C7fZof5Zms+ z%XP(f4Sl%3zt8u}{#WV4kP$<%vL-W=zu=p+4JAZTBvK53RfhBM7D=ped>(ahpAsSh zo)DlsgenvfVG6`k5lb)NCQPqRdJ{pw(|5yOpRqd+&n9%icHM?;$HKPL6|T3t^~rkU zvya0*ocM=0p^BM^#+Kd2GuboI#Tm+j2pg)9qjy8cnxd9q0>(@%q`+BxsV8gAX=<0% zvO&Mn^#Oub6=$>~^`MKO7UYZe+ToT-g>eVvi5=kV9}*$nPyhd|taNAu;-UYoGnU^v z`yX?agOln1F0&cC1p$O%^Q%G2tPqX3!L=ah2r9>UHC1XjIU#DICwMFD>ft)}vEgUC zMm=>=odkte!aIm;&L_|Wg&LfM)_bPQ_G&DeFP?@SpANj5p0MW}xTgf~NA?${`_i-Q`=uU`WS_%6vjf zV!5WgjY&sWnxXNb#d5D$wRdjKw#D+G*v+{?$EW2`V3o{C_42>f<8LW`jk9Ng@a1UL z3pCm-?X4iR_g5`(ebx*3eEaIZd0A}jiPXqnE>CGlHsau|2%k!33_T8mh-y z5bvr|51_1p*WI72--v!=xj%k9c>b^58ZhE6RMtnE`bytW+ED!#|3HXCz>c#W>8Fwg zPi|8C3^E}g@W2eJC$up7j}3UZ{El?v4%WbHF?&Z8|wv@Fb6F~l}z>aEa z-e7w!)xW&94aS-9b|KbtIHeuP`&|UNDB-!@4tY(il{eCiTN3;?^NNLZ{}Tu4BzA(5 z_ltvE{KY}y{ZA)z`W5gQxtf_d|L^6r?SJ|B%>T>B$LSvFdu9FG*(~REj9!=>eWrDi z>_Da=tJ9mTE9guug1F3gE?$!!a@^hRxC05WjH(jYRL{enM=*|rBV>)JER+QyEXer= zSycZT{lKK8N-De>S1NFkWq{0B>vMtEK63NFcz5TW@P5GIaHffteLSrRefM?VEF$h# z6NQky>uN1;DGj*#lx|k4btw&%v&%%c3lTu5wrZGZ@|bFgiMq!oH1vPiC5KhCAI&ki z>W*C5h1X@1!kiXX|J{%_>^bHt?MID1ce57_FQJTfn>N=nJ8Or zPLXT2pQNA18LG)zIpD{F`9z+lYK^+I|BKUJ`Le4i+@O8Iv+|ZrPlH(?y{g94wt6o) z3Oy$$25TzYaXZN@sgQM1+z5;#PAG>wF41*mqn}ede)8tr!csM3vQz}dp4R}d%Dq0BsNkcrmm&*dDH;MAlS&nt= z`4}a2=bK?SHxBy3QEJJ+dNX} zBo}P`HJf>=XetnNkwuRa?b`)dQSaHg{)=l?C1P1ykmq1*o|9KVixh+6AVH6cW>VP# z*x*>T1J?F5&X?O5Znp)p0Y{a)j70+FS}>t-Z(=SMzyji2xmHW|Od#Ha=CO07ehy@B zq_)dUYu@8YOaA!F_of$swX~>-xu=;qIW-PdGc}f#J?kJPeQaRV^^XOFIx)#Ck}Th623_#iW~u?CHQ*>nk75%&txsn*VVBDVQ8|}T@cQJF=T|F`yEiAC5#2GNZx> zT{AubD&uryM<5RP3-7|b9)+AGDtUu2_5uOp%y9`RvN@!oc{)XLZTEB+rzJa0CMixZ zfn@{_AMIbqlv+4Vkhg0=gBgKRdoJ==#t}$|Y=Du;#^U`lDI*_m4$9olJ~$1iheNn% zAkk6CXOtvSCxO)Haw2^h6cdP2Aq-Y>Sp&=g;O~SRx>|6*?rnn|f1pu-l6^p&eC%0d z2Sfpu-8FkrngmRI@DPBmfNlY^&onqcf?EBSy`WZMT*qecXD9=b1a@FCZXa-<+&3~{ zKG0Rf+YW#ZZlk!bbR4Q|yQe?U@0-}C?x0LiMv{U&jIVZk31`eZ?7e?~9-W!xr*H;6 z??V!%{9=2Uv|gQPlGMe|<8?eApvV78Z?A7y`)77O-nJai_~O!^QGUKRZBCk}>viYyJY0UGf8*tS&K(_3)O6pjbGHm1Mj+3|e+sPUu!x$ODjeEnXisdD{# zpPo91=QI8KG}xKZYsam#!|{X0;yzSC3;F?ItBEvV!xZ337y+*Jyac?2dEEohseS2- z+p>rDaO-{t&dkZH3Y>JyhA?4!k?q4H^4xJ>Q=Z5*e-2u>l^uD;m@_3Pf4Ww@&w6UV<{O4-&n|)YpQMf` zHC-@+F|Q`*You0N$Ok*OTL-JkVq>+!a0v_aGK-(Ve;2Vd{O;?ATIVS@YdAM~&P{5U z-}0a7b|t<`c7{(qhR5fq4z)P;}I65QJyTpT1h@Ytzb&{D@F z+-vcYNPn!|t`gS<^^#ktnnh(vE$4L2(kytc&%E1|DtV&2D|r0T8zbB8vcfFzS$kL$ z_)q;78u+}Z%Ktw+{SWEb$<*fmF{bM($yy@|V)&6aj5W&&hdu5}YKRDm{E_8{_?s?k ztwm%@^@YG=%a$q0v{u8^p}jyv^lJwllSM$GSCZfFx!u?O1jQS4?h9t#phX4G&3W%_Ggi0SEM+Z10^T7w*PY zI$&>c|Ne2gaJ)8N_3)6ITyrw=IW;p<8~q5`tG2q)^SB;iOjxr)&&%qYQ4|ngNI(i0 z$ZtNAmV8&^p6BO}qr;E6gA==~EvSFo*r=!cpA;?G)^Hy4ruC&$G|nnBu+V5iQCwGz41|XRRVnk+MvdaZWZ0! zW>ZZ=r;jGftF_aheHDv~9tDht{p2;}(tb?wvuM#Gxa}iIDg)LZv3yk!rfzCnPskux_fuy1X&lSS4jZ#3{@$fzr>2%s{doeF7L28`Y9*i!HyKKh&NLAnqeM-fGtrV$ zN&BS=D?^kCsm;$_rby$m8z@hs$=F(Se(m!sZDkL}Y4=1hY?ri+E z5l2209<_ehgO2de!k7j}qRKxha_R8Gy6i%uf27z`qa0|o7BZ?_=&~UPZI>oa;}Y&u zz35SvP4x`Q5EVl{T4KGK^J76;45f#eVNf58)Kq&HKX(rTV-mvgF7Jv{-!hEco;p&;TgGbu{>7s?;tF z7lAEGqzqTes6^Lb1V=|To1`es(D!>npiWnzJUQc1b>xI8M-3``MqtjN#&nvYcvOyL zI5Kl4F>&B%sI3H}&4@66by2B0WHMju7p<%ycYlz@sAkNS_N)VYGcBY_WI`(g*=3JY zT6Hoh%qDb(pM6g!03G75GtRvoIpT)>5N&`zT=U_m&Y>qB@CtK&oJtC8bnJpj37-Cj(F@ z6c9s4QA%+zQ99Znp>I4S0&*eFo?62Qc~GKZpFz%|WhP}!qzK=SGUX(eF#ph-tXrh3 zISaScO`Rh5`|)5^)_kQ$M(%f(782uHeAm~Ow)QR!J{4wRlwI-OWelYJ9|R!tvD6qA zD-$%zvm``ph(~QrrW4srY?-0dA%pb0#S%gQmIdzL zj6fm>!(x!^@fn_xS|rvl9qgMlH!AIllJ!Tp*s;3+7ugj{41{7h$u6aOCXq%!uqp_N zxV4zf$pyto7){)uNGJ$sBtl|ooZe_7a#$BX6^!DwF#zHKXbsAkq2-WL1i~0DErnsp z87?)U2NLiE_|IPvp|~OiNd$vxQ4Ck1%?NwW(<#dSgv3f%9%wR!Sz#plf~qV+UuvTX z5{z1)nRRsrya!Z!_Cg~7bN@pWK?_bnD-S>vLY!3A1&Xf9F9xh;MH3@XOC;dl!|JO! z(9;7kZBAkWw?}RhvqDkEbDYAOUqd3W*@)86fzHwrP}YTn-y=zMpdF>jal3_$NG@|L z($y|pzYz3R=3FF8v_3mYuW!ZkurSll}*~sPJJhZL055iyKp= z<1h<3tQHhJNCAY$U3Ffj*}8jNus$O7yp9%q+?adn#vzP&21JbxI(-gQ0kC4#+q6Db zBdWEvZn*#eaHdJ$0|rp3NfE}XpqedAEz}w2Fhy!tm98K9Po_o#@n6F05{+BHFdTXG ziYhXBUB^_vw%#_d7tc!J@geWNSmMiKGR6+$uzP*0a6&d9H>g`65HZa)iQTEu`lTz# zoY}LG_CRRHulsv_#m$y1BWLyesH2*F7XY|UG<7D3InfV~!L<(z%eXGtVQbb;0~?`F z-2gwjzl?zA^4`mG+z^`wkprPq1C(Kj<2sOmvMV&A1_b=M(ag0 zc9{*yry>Nju{A=s*vl|8t8`uSl5A8ox))Y-S8(uXx!IETF{tY42(KEQGd^73KAb>M z!pahUAfTa5pfvJU7@=PeVSXGRnkevW$P+PaZfO5W3*Wc`&kCM)7B-=oj{@SR-=NP` z&yN@bmx7%OU?35{*7?t_2W+F1f4S5SN_I8eDUB>{QG;tA2NRYe|f2|0m(Z; zuXBoi?9sq{{KzkFp+U*;L+rzX0S>~S4CuSTGAyfpPG;sGt zCTLtstRT7qdB%j=`3Qrq-vJZy)-{2Rk+TEd(tUO=x8AcE$H0LbB}3$J)!=z8R)9tD z^c1k?<+*#b(7ir>mUf6bAsfT-4jXT_08Zf1;xumu3Sw+TaEU;+w)=(fsz_Mm&X5r^ zPE5tXb%94_^Z;kTR1@!br=KBZylOT43SmAHB#pF_ZSKUjuVtjsex=ws;ygWUKv90mW(YsA!h{2>LhLzRYoV4(q zdi|V6hAYFiQ_0}v<$+TDuMeF^QE#l{=Q(6X{+c$Yelqu~gR#dwJ6j@w^T{~z2mDAB z7T zk-YM(GyWSAD@YLzIzi!50}19Wj0`mzTqF%^-*#J$ps81O_O>x>I$o|14lC^VSQoYv zG}2kLNuDGT)>l&m-Uq2hUAsOtC7L+p1_slHt@oD4-dpdK9Slkr=_x4geZpr`4sa~& z(<{*mulf+soNL&M{C5+`>497^$Q7nFdnmVdQ2Jtek%iYy<+tfvJ!2<}8*#z5uZPh&{DW*7}(AyiL-=9v=y3(X&g=NlZR!>EvdvD3bsh=rcO)lbExmwr(Kiofj7+ z;|v$HQDfaq1i$T|aEo6}HrsP>9zJWQ7kV=NY1(5YKg;Ky3SrFPk=Z?Gb~_^T@i^+V z+{>zWA1ZfqzPC6X-0q0IGT#KKvAWz_=x;c+cxoDb_p`^6W_VK1-kK>?H@rsl5OeN2 z6S}zHtHh?gU%OK2ee68!pL5xKzH(~5_qI+egzW5|zAX03uxD!_`KGhwz=U#5yl62z8#A<}bT+Db_?nL+J3gz8;yy-3o#+XCSYzL3xV>NNS^(^u zckY|+?_r3%Tg-P4>0U|z&ubj3m94kACwP_E+ukQi8bS_?}+Z{askLNx{i_JFEmgiYhRE_hrjw+o-w^q|5 zz_k7|jP?w#@I?p|#!0+!x(z03Ay zkbw6+Npq$s=-S))@uB6qru`vg><19(d~}w2*#-E1eV)u*=KHGPOQd1*J#(oz=KFp> ze;Q+RdjVvAK88J$+GU-8Ja_oA#&_5)RZUvavrKUm=VX<|%bdff`Zb1l8C~`??z`Ce zUX>k9kKn5iYsCd7+&+y$9>>a>ZoA;!GjFziU>!D1mQlw6UfWMoApKIczxL$6KDN~G=sB*mzNfvu zoYB@?qQW&a#!uGxO(?*)5<==XX>5w48EgDnZi= z9;!9>&eOEs_odj%Dj{c?QVUxaLV*HdFdCQS)C~78m7Isv!K}t~4J|iZPLK#MV)aS2 zX0rz|%G+5J3vnFa?eYow?w8cYt@4w`1>u z&`0ZP-4Eh#Br8Ng)W%a&IdBZI zw--BMwNaCqA(D2$NOnrSpSj0qy`ZlCralE_;5PzKhPeHo49kBQ>0;48LeAz-`p$nu zLq`(_7mFW3$JNR9zeGc28EYI7)SNYqYfjgUN7%H zq{_6ZkDhMUHJ4j1&lHwOVIYB^C6rWxXyPKu0!8KW!Q&xHUbU_E{Ag^2FyJu1CmuQuasrih6ceB>Fo93n$b2yrMho9qxp1A|xb-$H zE8cQsPbFllHA*=;$D4B%#Q7us$6PMD`IO5>I01JG1aRTSSasU^^`O3T3n|SyRw?x+ zd@nxl7@a347-69q;AxDIf)@k?bbP8o#(?b-i7Emosr{ybH6AFO*IyLHG&reqJTLXj zT6*kPbs+BcsQzCxG=JncfM|j6KShLlEQN$*B2RF5v)h548y$8`6EYeV_v)Xwf?GYF zjvDvPUr<2f3si$gc0ffrlBfb6!4%rTo-h^csPT7CiKL50misYR{|UP9bD(C2LLtK@ z4Uc*cZYhd75)Ls(Jj}DTJ$8>NW%ttw%6Fs)ee~1r42^o@(DpHf(r2+^=~IS(0)d z2tQ!Vb6;45Agi9Ko^*{ZY8z`)06@GtW!p3DrNH|`AkbSEYx~9N*pL{LynB1tYiuK~ zzJ)*{HP`p*Usq-gVCz%<)0s^p+RCcN6>u`#Y}?i7dw%##?MRqgR%~4l+MJ&6Lty({v}njw5+;Vwf-uJD2&fed`wjBn#aL^~j$_52exmr# zH~gPYj+2X_v4!J*#azlLP5daas9jI&HRSpKT47VrO#*@w*(EAcwhGCrh#v#i|B_W3 z$tB(zk>CC6MF@y*2LsdU4zSxH84rvLNMBU3IvsVJ=IDN&ewpq&)j$MF=$gB6Pm90= z@|EPaBzHa|m(7s#Nh!3N2&O~Xonx*7z<3N7G%}T>?{_^`%pXze?W;b3<<@z~lqKA3 zienh?p!?!0gx#i7E|5CDdh47bo{U_+Q*HTj-Q{%Gb#ygvRZpu6x8W#}AVq^-z~+wQ z`$_1x`c#uGlfuEDS5c&xvM`X3mI5I$6Pl6Fu|))o_Li=uVIHwS47^3#c!K9eSv~ScSA`56T}bUE7m(N;sH~uTZ|6F--hQK2|FH8*&BOF8ONN#|W0RH8Bl; z_x%}gttW-SCQ!?F^r!466)yh+t!^@Hr;K=)K^zJqo2eQz zCPdy~Gs-ddO@5+H&MaEVFQD+~y|&qnk=OH7dU)g?((=1&joGso=*V`n9$fRa^wP@v z@$}2V4s-BZmdWz8?2K!PYwg)-B>9h68$7RSRZh}1(@xfM#HtEk zOf1KZ-G6^2IM|4zh+k`);l8yb4iIxwD1aEf*w|Ye{2z8-wz`iq%1UzX9FEk0I^r<4K{s49);^Hv(`sU!iM4w(4eUKUYhN&; z27!@QGY!Wd5<^*8MGy(r%pt8`<3vP6@L`2A8%`kb5}C~#V4d-eTclF=O_}vxl8IAK z7gq+UjhT&PW=UOk%+26M?;~$n)LS0c&sU#Y?^hl7m9a8fR#>Jf>*%&yvzDe|SP-9o z1{l0dPc*rU#bj*UN%8)$=!lQ|S%~UsFXW_w{c;G58$1SUDpszq2`SBTzrt2yEUEFK?vhZj)wdMm!Z)PhlF}!Uw zMO9D=wG!W0NgXF`Go~^qh;~&=C_dNdV(+0eE1nlspvhU9vS^c51T9RVIg2WSkLjQ; zHVfvRl7l8%_q%SZYf8uo#YYDHULs`WZ|i)^KW!4e%Er z;b?Fto;Rn`khGA^NS6i%kxOJrmc|lN+K;W=4gO8rkXmxYRQJQfo-Fof8q&n?Ovzw@ zY%I%W+)ScN`t1{%U(_`CmyH+y4~^2o{F?eUj2Iu`$kRWDUkmWGFdD3paDOVShT0NH zn@nTWPpOtzNHVu8L|kYS7Gf5+KMI?g6h;lXlOs;wwdEm2>1!nRdcT(f@f^A19@+aa_{xtw8w?0;eNW&M$E=jzw- zgkhft(taSxj)eUU3iWh6&!_{$$FU^|Wp>6PvnU$SRwh#n-Y2bKsL!5BBMZmRW2qp| zf)PEh&C#S#G5-bx^R58r)W&>aUQEvwlp@i04+5Afwo-kWkBH>4l&O+?d>(H$LCC-_6115O+IfGhyrh zdqEw@#<85MDUp%D`mhzsBS zQU+dNa<3#K`y~K_EZGYL%^Q_LC%aogCn*T6z>d-25y(lSh07h85sRJ|D=AnezF51* zzYDZO3>J7s){k3uNcO}~8C+9RK}T#*P5H-TIFpQc^55V4;rR{iNq6 z{V&F7nvUA?l~|OtUQ|Nh4EmTtHw``kCW5`4utKEEoz`HcO5~!5CA@3z28wAwH^}D7?8H~FSD3OTg|o6y zbK^`g7r_vjB|y3eE^3~A3r0Yo(>v6_0~zj)Oy7Z#+kF$xNaJ6KODl|&c$$&Q#Df17 zxW8~z@H0r~uN+etk>lQYrNK`cJ(wUVefwo+tVqb2d=qRA(ARch;$zZO?+{_Sz7-Zp zOy*y0ft3_vhSZuxzt<*B>cj^n7+7(lRR^B4pbZ6Oa@Mq?(p4Hhv@PROYk+O>cU003 zC$Mip79&ypkYI|!Y+z8#m4uD6bF6;dE;u@mb+)3;h-H;B;W0&m5u@RQh#t$~ zQfrH<`w9Tf2o=zTk!8T^3gaB=rszR!pY>)|?uD3>^~0L=vF4-x;a! z(YnT>bE@i#{q4t7@Q)4TCT7<;Fg51y5bi!m>2-i9B%*d+q3ZSFY1P{dw`&Nh3f8Aa z!y<+kDAb{}eFsH}qBDvzzuduIdO3C08_7Nkd9 zZWu;0hvjAb+ua%{?#CH|=2+oKEr`aPISOm>w^XA>C1kN1_TDJ^yhzQhoi6_bTBvW{xZk=kbVob?7O5PXm=?1D_iT$)i{y}V<~-f4C#Q`udRf6 zl#vNQ_@G%I+F=ee_!Xly(g_-BMj{n6`?4}S%SDtGW8b~Px`!v93QZF3^=k@xQ|Q?| zJCd#LqrDl-sg{-qU%_FfFB$UmuYBMNY~~F_ik=+YL2$~{9nL0+97-8p7XLM8pm?gz zpfEJjzsx++2a${@!fmJrH}rXc20x$0T&~nUf4e8E!0Shyiw+qQ!KctsmN7 zJY57e>s`fR3u z*QrK1D4Z3!yXm|YiRn!xs0O<|e5W&U>&mn6!o9gBD@BG0bHvHx!46qisR0m(l{_+dxagYWc=Jl}O(rYtX&EC`n3q$q?wv$bjV zk-v;b_!@mnr*Y#)m0Q9C-h`g2VDrB|slRsoz?)?ys@Jf;4DV}!V{2iEytywM0@trY zy)B2CvZ1JY)ses7*fC(c&5I${o4)fpfx^9_oC1X#=k8qcG_dD_8^(6PHK<*UB%Su9 zrYodSNbf{fXjvo+yQ6P$2YQ~z&jLc>>J_d>wi=}k1V`BJX8&zF-DC5C%wS$K2f{9} z^3a#Tr7z=0f**|X`?o+=Vg~=ta%HmwoHqTX7C1figG=EZnu;aOSsT@!jMkq){usxt%f#S7xAVur*KNsITNa2ve^)B?I((itW z8iWlt+<5e$1~xw3RZ4oWHgH4q^_TKJXyCZE4YyJPDQli+qj!N#yjmsuP0d%3(5Npu z3cU?=y}n5?`JN3`E&i2M;dN8^hyjfoo{-{$u}1LXtIcib#@R9XZ=PBu-kh+lHotc| zmxkr_w!pn@dn8I;ass>Za9tI1b`{#=Qs^m0S-o@?mrnt=){|DDw#u z&KMV4wqCq88%DT(-cNv)66s!c{b(+Zw}ZJW+BG-j+p6ussIv=pzD&QY7{BX|<0t07 ze6IP9&VD7H_&$!l7Y`@^31Q!x>H86Yn~vXSoE(I2ckgN0>qC;7@7++j-t|+k-`Xy` zT6RoV#Y)%dr}JxHoYX{hw~wPVz7B(}aRWxq=Sj{DPU2fGU(XwH$M|a)wM!UV*4B5j zlL~CZSMO?Z!XG=_p6@Y|6#^WuSM%3QKu-;?%ie59h=<4KP5og z{cU=Y@5Adz*cET$*a807=If$b7_a-?xn5#qmYR<( zyA5<^0M`}AlH=anM2+?q@nx&s5dcGP?cvPeoACK?rB$U$ts3xrRs9A_Pv6x@!&_DO z;#~RKn+@ytNDwp9OykJ2uKQ&tqeeIV5@qL$&*kGglXl%eu+?Cbv_Fx&BCE$d*{Kqf7+9ISP+hv5${xmR$_;q~u?;7BA zq4u7`&T6d8>(q2v<%2QfyXEw(wt?Xin^5QF=J$S>9kwo;g}_st{QgbE{IUMn0-lx0 zbLH+!%CY@DS`@Y9<##W#HJ=2Zy7m2V_-TeO@wxY|SBdBS;%L3!zntrP*(S!s1(7=Q zQRY!linsd1G9yof4+kT(Er@!=A{cPdE z5hEBpJPlLDb^EYodp0-gTZT4H>X#4CqOdF@jw!7#%Iz#=Dg{m<@;7}@Aw#vjH==}(5nf4VO&_GXR-#wP!N=hR0X(H`{>4bX8-*TPXh=C%g= zDwYWECA1^Gk%j%YCKX&-Exe-;kDky?$F2+9;ITZWbrZF=1 zg@XNG5&P$UY4mvE5_%Oa9=VC63X)Vwwra`s#;2gHiD!DM7vO#SXLa!bOdC80;4k6! zvEl~XtIwGlkwfeF1{fwaW!2u%coatA&m<*>f-F^+(1dJ?K#u(r$12!MD*jni#mk*o zimKdMrkf-$)`Ol3Jm*=GLj3tjKu8zr!a9}SE_<+QS135cBI7cDe#eubEvgr0E5XdX zTxZBE=^U7aE!UzqMj4y3bY#EA7B^dzZto$@mMfg+luFSs5q5#IIIZ+Klx(cGAtR49 zXNGo4GH)X+@Fp_K`%uU4v$c?2aZGpK9!)i7wrP#ukD3xy;mKXDzrgz;8JC^>?x=2e zW=T~@wDj=ftT-`(7Lr18!eDVg@l*XP8{QocD&NLf;u4rRQMUG1OQJ|x*`p|$KTsS_ zX5wHCYnZ11u7WgFaJuZQy_ieM($AoFQBp?~N0{G9y2x8m?igX$!cWjLD{F(sYwzvyKUzE`5^0ew?>0wUS>F^XP<3(Xkt~`}^W%R0Z&7R7w*}41e4+Hl2!;eM7@e|V4J(|V#whZFZ*$b2Gv`1 z{_dwiI$hBFiy(OfB7EqV`uBqa5uqS)`$LU)(hqNd*0mz>(`{0a&_C{cCPLC*-)0!} z2xa?+0lD9dZ-K?)o_zJAJ6xshPrBTTljjspr%;MAi6{XJ%6$+Fcf^Vc@^X6I)j$D;(B59ZU3q7!mgn z`9I)I5_cTP(QFQDlv_$Y3wyxB z-K!71xj@&f{kZ zVjGs6xdW-8%99 zsd8zcZV=`T^adtOC4#3t;0PAC1XCuqhqbv-sa*iyYO94y+HgU_N8vuG1pbTBry5*W z7j-D<%wvjUQ<*ESZw74vj|5|#6k`K!z~-*58DIjO6~(Aq$dE@Y7;Y7|-i)!*oSh5f zkcbJviHD4ZiVS}88@f&Hy^VDnuLZm4bv(tMT2t;b>2Rj(kG%5c%kr*w`9DYn&G%(ltIhJC z75?ao91&*7hoAgX{J>*q%#$LZg z>vg3bL6uVl5Skm#wuoBM~fHLR=Vr zMsB56bI8ZGU!jpkU2!}4vmdLubg#C4Uu$R2h_*C(f_-=YaAOO@h|is&@zPi;Z~{G0o*@8g0LwjEGZmdj9d!=Cx)-nvsKNL=~LMAu~1sZ z8%q!O&B&X^&FGlTt-AGLb|%C2eMe6#3x`ve#!M>))Q$IPb^Wsimp8@Ze!qX#t3=jk z{&Bwh)J=eewM%)s?e_ z7We%Lynm{N?{m)ow0bho^Wy7$&HpOoeGF+Q^HG1wwws;pWf=-GV`FsGnB1>ixsq6@ zHyqGP|0v}U=d6(PkT2~rYF%C6^LrjEGyNE7$&T`D#xf7wS#_*y_h0jYPesC+y|SI@ z4;&!X87owF8jwKLk$L$WBIc;!i1?=uDfJ;>YV+^Y4uy}8^KC8PMM!84f0WFqA@dEP z;CkK>qBG$*B9C;){^;F-SPv5`Al0Ejz5or*odeetm#Q-#C4!b`vUPsVxHw)>N!o%a zQxS4rgQW?pPnT+_m4xl7@>=%cNgD&ll!~+P$+hf6X7MfOyDQY<|4SiuJO z#|dH~wAS0BqwqmW$W;0P05%Z)r`7p&9)Z{i^Ay>|NrF#%`kLA`L@7(MOwkk~Z3V_t z5zq9)CbJ2-$wqw&p=jgIvwrFa2774wby$zOy%v_|{^=1k`SlI--{nm-c&1|IpF#E3 zpFwq`|LX+&55q4;-Oqm0H91vPvqrVv(4*-KnL^h1dRXFuJ*CUldPKHhYsxcoP&S+0 zcD-;)b31t@a%DcJFh(hD5=<}+6@t7`J`^GQx6E#wc3kM+f16Xde~%E>^ZD}*VU%`2 zz)d+B;@fn?2v**2bDqvk+q8kqavrvi-@C7WuCD8MDlUiTUEfH{_QY}0lEj=YX@v{O z-;qqC&(3XRMLP7y_9PLFia|5M>P7T{I)LLxrZDx?ue^nRJ#f# zmMrEcM^P%(Fm}gYGWGVEzj`ws{V{fqndXl(@!GXaS;ZyDRzt6@-*v6lu@vLMM$%-{ zdb-(>x>%)FZPWj(gvN{P*b^ajMJiNf7O&eFH1mGa zWG%1*PujbV$;Xa%T*G5Bu4umy?U?ZazgUw%(YxAjfR zReFiHE4*}I_~GUd?u*I+bv1D2Z+-@>0Zz10jO79OVsYk_BHU{>(oH_kPCcfOo=WE(8l3^3}Lg`$BeHmhV1;%RITCb$~Su<-?nr%M9b zM1B_%8;z|Vl=#kVzDxcHLsLSp3BR+9Ht?yy=3+ODY19bkMX%%pv6y7N5dJER()TDq zM76tA>p0wn0*O+Q4b<#HRHk8j7+rr`RnyPG?p|MBf8Os~USp5UR)F8DrAd8&JArn= zcYWc%zeT(*Z}Ux&V4d_V^zLLsKwL4~kFfme0$zxH+U6wj0W}U>Jf{nkA0Lq`&8vJ; zRK|`>D{x1COAuE1rURk{{@sWg?yT6TD5pRaiH&FIiohrtnSX#vGUOS0@E6^P_^)(@ z8)GASIBv?xsi(v|W=aung-O{@zG{rMc=F=?B&Jihh*!_8eMrtijxlkfbZS$@*ZYNU zXg6kfC;yyWr2^%pm$bOzG#J^V51f6*urLd6AZFT}fA{W^Ss(N%Y?0tP=(qs(@W)_D2 z8Y6v4agL~pApu)6Z-M0k1|;w%QTU6UQ3%nffFlvwit&>$DjCW;ni}K%Qv4tfIrD(D zP}}-)c7`|BhWkscIVC{K$U%!1bNBswncnP56Q(uL1ry}oaH<8i8wV7S0Rf1YG-0Qh zrQXm|Y97PjF-uL?3}Cyx4gU5>I+J4tFwJOiz1aZkbfPQ*Oa^3N1kmX*{RXN0HuJ6m zb0%0|D#i6A#uy3y{ziW6r*RoRL>QU8*5raV(;lGexJcSb**rHkV1GFO%3@=xgv26e zNmWyzXkbjGqT^#lMylLM30tb-tck23(pI>nG0-WXqJUo~ry4g85ENU%$(nhIt*Yv( z;9)DvN{Zrpg$i}>%Qh9idFo`2xOTDj-<$sG1!`x0YeOnQ_TE1}qFU1)l{7BQxRDG< zXp_*)Ay5TTP1%&Hsl-HBeISX#C&EgJcn->vdnxRT8CUUz3K1nIsnmgi}XVfEiFo z0iln5KNIzn$`VRUj)W-&dCLuIX+bb6f=-_d83FF_`m7(!{IceBbHrDMqC*a=pYAqY zF+vtGBdDbnV2di6Xd@^_&S%VaiwPqGXD2hqEj7BJ|+mVW&3z`v-Z z`3nA=Sm@Y3T7sm%g~2q!iAlp87>P1a3j~v9qve&{vGR`!XK5Ots-T*JVWFDHLsSi_ zWe^+9xhrmNMsb#Omqq0KbfP?497~m0+{iYNQn~jhc zQ>pm#+Zl$DC{~_ETYxeLb+4a1A2we`mW8dkT;n<)nXOG+D<~aMN?V>H(J6VE|6e^IU!H4q+Q?}th^ZKy z(9Nmnsh745`q&%DL4^#QQ{)i?Mr)zDpUAsc5A$$JfuN{8je!Op$-nh86AZC3bdhZE zrGV;=1~FWzg$mW{lO1&xA;l63RERCCiL8ne0hq#y_~l*m0&jH6;@(P z>BNM55(APcT}XP$o@b}?#tV4%)lE-xuFF|h<_mBM29Org)bgi5cz&EQckIyE-q~%- zfArG{u~}WGEyn0tOmJrWHt5q6V5hqFy7gJX&+yv_FeTBEO7`Z(mRiH8&CzA(v}FCu zgP_c*c*rG&W9r24*Un7YX#vmDj^w~YT;LRBy9kj=SEcOftI&mozfhAT#b+TX>NunY zo!F_ihXB-9``0MX3!6HLJ!q4DombL=?vDY)Dhz&YX&!4G3O=Aw zjT~j2R4AWLseKM}Lg#-)e1dMr(Et?>eU(A7CjkLNJ=WWjPHOARR{63Oq=q=5%m*)v z5E;dQ#`|T;eGKIbccn!)km-xHN=Jwb6MsF(IsQ%eP{+a#&(FL|RuJZG1dmVV4l7Tc z-ho~kb3)jY^Rw!PWg&O>`7^^2GXt7|zysO?JfXVbOH)tmAkRpi6!fZYDFj~d_y<^C zC~s{jW}$W+wipZkAT&nZh=a2DZmSrX>BW4Z?jnM0$vhzER;PQFiu$#O<`QwgIU&vw zl;6T150g4!J4yPaA6#H!@d>*}zPJVNcm*J!@Pac-SYMY#Kn|P6 zSGMGn3B;$6I*Eh4~8oR91NGjBG<{XkFM<8#c$_cc&L$KAf-HH10V?Q#7GNs{|A zf1InB)8_k?y_lYxm7e~-U0qam9yh;5O<=>$(sfl7$NagUy8S&NB=_vyaK~cwG&S+$ zm;8Lh|0;h!LH~7r%-U_B-M#v~Rd-`_nI6aHr?A2Gu}QGs^LFt?e{Jcz1K7sx_I{(- zZB9Ar0eGHO<#xR+Mm>$>aNL%S4FEVl2h8xlUS~w{J)X2`_@252j*1M0Uf7ii{O;c< z{Juj7a(}4y^N5_jpL~7Pxs6|3U+YPL?%Qkv-i6OxfcTN#=M6xo^Scc(Cw)iH>unZc ze?$A+ispWKgwNiFAK+^788fP#x%5RswbIS?l3bnblk0IxlKZ{k*IkpV`#NXjh0piI zCXbxtif{GFsdA}89#Qk^XIatAYiU)`j9_V1-8}tYcTRel*Xy`5zqnBNvHJI4*QKoY zUe{%|&g>Zb*~ocj0-jTx$b`{B!6P{iHM{jCl#f=Y2DZxVc)w!&_`W;7!su~%Jeay> zG<(asd86KlI->cE+n^TFDDRYVwKQ~>vmq7or>ot;7c>ggjkX(+@~6nCj5S{si}X=T z?T4r^VT(;r(XlY8Q7L&@I_wf7^MRze z(k%!sIQ=~^@LA4Vlk?!iX~Is&-XshT*@9?Cz#lyXctSrX+{<5i^rt%$;Hg}BM^N>X z&@5@Ah-c>(1YjDe2$2*M*cH&A<0Vzf#KVc#?{3Ausb=pG8_SW< zM8coq`9B0iJdEUUoh;*$OwEp_yn3FGp0}mBTim|{)sZ|g5>E#u`>WKzfqf{U`pXTq zJQk*P>Z%e4d38rvY*v&2#Ic7*REM25>uOFL#T@}_j#u>-E8|TrBU(((UR8|0Kz@bh| zyMq6%AX~Wod(P`$nNdA|mbR!?-O%amATb8VZaOxufmUI;q@8P`Ho4xOD_8WB-uLe9 z(O|xQu5(%Il|~Vo>~sd-q5sH`a{ekaDq5W$*=eGhj-qGqPhrEcHr-8o#6}Q6gV0@-LHu6vk-ayuB zD!v&bnJOr}JW;R1f_WBt5i?KnJ31R3el~ve4{=fAI<>+f(i=hYyN#4K zc=C3<@HP_tJmESc7@tS=A!)=Ln93$^(4du+xcB@*2^{jRDL+!L7WH=O9(`ULhUP6f z^4y4?e3CL`Tx*;uv&#Gpp_m5(k}gPaNBGM+F9=kaEAqC0)3q~UXG2u2bV4H?|0b94 zja7|9RtxYk`VJgyfi#eL9FDx1vqZVt0Icv|F99pCw{9P7Qp#2Y0%%sK(FWf;)_rL~ zuHcs@EnniKWnAe2{er+#1)0A=zy%5_C0$gYYzN!y+i^$seqybyddY;_yakbWzG+06 zo|+B5J2Ln{g+~6&m;;dVAg2lJ__N1(Siq7=oDt+Mv|vy1T-eZp*7@wbP;>~y@a3i} zQkr*p&QSTzs2K<{A?whOq87tIFo(RW%omdrtLTTI3f%twv{1W*@ni&nNW%t7Vf!yg zgam>R=yc8bN#aq3Bsr`!Q}PgHgquuEVOOp>|-pBc#C*s=tD-}m?DG)|5lFkEXK}3By}my*m)!W zy2HhlL!+HKaKmhiYRH1^WC|0mEC0f>+ILr0JKDiH4Uv=N0M6JkZ#<Tx2kK7_UQRNEP|0Kwqm?BTzm2@dWU&#+FCvxkc z+6D1aA}_j&8C&3=PlmJ;=qJv!Y2Xe)$H+Zk%gAlB|C6uS5+{IJ5Zh?Vg`KdPe`Svu zV;HnMBX5E&Foq9w>D8z93o0#Um)(p5f@VaSgddJj({f9qA)B2s{*|A6{Fbsim1r7R z5=LIOmRc-5OpBzImf7wYd|0HGL%LyLJL8l_)C76d8m3pk@S4tYTjs8BIb=A|>DO$k zf7~l3@6>gol6>tCq*K3FhZrp1REW8M_jpXC-ni*k7@^=*PPW`=!|_Qa{cbn`+ClrHW>VzW_<&UP zloW{4n1%RK5e3NTqOdP0dFJKB)`VslbMNr1Nz4>HdWAt89DM zm;fZj6crZ77{T!!?f9mrrDh1)A=`2r)h(X_Z}eJKU@MN$ncL_Z|JO(u>-uOC2E=+k zEYEPAiELq9gCs&}^Lb(GlJ4iP`oSY#izjHyEvfPBR;VYcEdUi3h>+eIqo@S|KyOXBVLLK7`lZSj^a) zoKtf=qpktgMIwUB__*7&>rhw36^?DjoIq3Om+alDUJ2dm- zG;W^|2Ws4X#!06w2VQh{?T$?kbR3pqg2VfJ?eFMXsizq|mwYLQSLkinujd}AlnJ^9 zpM;2N@AEGTS+^d;cs*X9i^uEE&)$iDb=eY!a}9hOW$-oH?iVZmh4l zAlZ`{siKjTW4*Y}uMOdwKlHw2i=XnHU;A1UL4Xc>nWml_gXa#Z@9w9#65bcJQ@(C7 z)}JX>ZTecC&WUs55h|Vya#&pvuJ#;VRYPCYC3-8u40LK3#zIRd z^Q2^}ILeTBGkes(H*naxU&P|RSgy@-5xKkC(#Mx2YhMhM^bFs=-dMeq*`;@=VOn3Y zCpcm(ml|uB!1cOz`v**<;w!z|hDi-Qq;68JLL~p0qlI*}oInIN87jypKAWXtI6j-G z&1(tHG$JG5EbNL;r!np&r0bBq1}tjQdD-56q|A2dx;Cw?)fybdOqoij(rg6eHG|nX zJGwe%jFGlzH?ewqlr8}>@li5$8z$hGL9KbXT7r#~a30H9noO2hCzN7!(k#EO2ewOJ ziL-*$9*g7s+2~20dj9)Hl#?7H9^%L86!#Mhi1o7(wRf~|rT;%R6DN8@gCBN>t%WI_ zQI+zXZ6*V%&oOm%sWRE9W&+ggtZh0P@{6dg$gdl14DvmLbO*$zcjb|g0~Ha_ z@Am;C`(r(>*rB#Gig+z0;pH_hm7bfBGId9qj`l!1Q7xU+<5&Xg6IUNOA->62iaWroN(J#8r;4K~3y+UY@$TJrQX?LJKiCWd>HF67wsNzP_+|w{;%>;QSLKv z-7bR`oScIh%FM%FWoU;obt+AYQu zssuF^OH@g^S3FkTKgNb=IF`y4`TSoz4&3dFt_Jg1YGP$)T$Ragf)Z}Bi}W+is4uUuP&W1o{iDlYEt_(Y!UtU*0l#f_zT8= zFP?W*mM~^uAfVNs0{Z`u(EqREF>$o6N|ccsV1Ns~{zOwPl1N{SbHKx<2pu$mWR-POg767vFh2l#@TWFPu%SPs_d-8vgn?NPlE_Z35axex0Hm^Qlg~5 zO?M-W($d{h(v7qTlF}(%5-O4+q4aOj2XEb1`CjkMwfr}qot-o1?CzY|nWYXwn9TcH zP4Ys;V$lXZ(r~az@E$4rI)cL9ONMo`Pk7M3li-$*5Q}&VWi;eKxSmeVw@&w_%?^nA zII5h*uN{;~P?zR29HZYQNL8nOZ?r+YDQcQ-I>yqJj>Y3VkxBZev2jlrGn4@%2LK1z zCC0v(yoD^XOx<#F-O|isN`WQ)efQ}5q(_w$yNi9^(oz`nS%CS{|thD`xp^>|AD^UdxHWfIdUT2aXKTzIX{^BoH z5hik$E_wEGChKkPR)ef<`*M7yT%%V4`kyvDhG|aJ0`%<%4szfCIV@D?zZ+43iLO&_ z=-h?~6)}IpprQ-pU5sz?MFvaU_y=b{T$J}k{?>-7l$GbC%I~A)l28! z2r^7O$d)7&jZEoXZj0(Y7K{ifD0oUKCPB{6PD@MA&i^VwmbWUCtSL`|vc8t8JWpa? z>TX6YRg>sE8(u+AoSpZy9Yf*(qSLE8?redm`AkxyO5SC}!NbAa*Bv%$alK)%G}^Nf z)pVJ+`4e3x3CYr)3ZnC*Q(Z417tsvA3c9j@@Ov)_fUD{SR*6fm%-+$4<$S47R+4RH z!}J^|t&P8pn!iTkj-}dmg;5UoU9OT$Tn-D9yi5{C_HvV(h*zvL-#qRv zcHxpT4aCeW_q_lpGuilO$Ua-@+t$>1vxmQja@N7P1S zb3_T34DUtdpv^Uz-(X)EUb;c6U)BW&FQGOgwRA&^R@x3hm$|Qnt!?8Bf{D~UhEiH& znsXQ*;%0-j1aLf(5F69sPMDLrDM^3&jaq}d-s+mBritmM5I_I7k6I#XLqN~+ z3RA*hlbS+avs2pM1C6};%$F!i$rL@0{xI zy^rL7D+TAno$fUD8Lw4vLI4p_A-3H_N+0N7$|80h_A0tQQlDB~XnpOcu)Wx_rflw2 z_x4JK<6J_1oxZH-t{13vTz7=|T%9W_?{?rEsjY>gc2~Q{R7I;NJxI;>yStxj)78IU z`)q^oDC~oDH{2RVIGW{(lyBJ(a^>1fqFs2{H5!J-MQL678j|vQR8y^S9R`94cn!+j0E`DoYJ3$>=*MSMv-9mk4q6H&KQ%k52kY81 zy|Ow>1ZDp%m%9v*%E-_X215k@3ac1VQCH3mM$%Ggr2e4EICl=iagn@D*|Y6%hH2$F zGtMj|1$S4X@)FSu1_8@MMA2lCQjgrCH#{@MS*^mmXbRZzoOB)E1uRaKdlJg;By{Qt zgmn&Vpd@`vgiI!t^ZvQ8$x=i_sR6eNU7&W#=^q4H`1Y!5$D+_}mk8Icgk;5;ZE{f$h;yJK=Fyzl9ePfj?A-#cADe)fbiZ|1l* z?AXfmI(Fm?DoAa2-m7|>eWB|Cawl+DoY;KmPs=HawS&3K6bSHbKJa+^IZ2`VWsQq$ zIB2ktcwu+l7RfT8hqIuu^_6~F4v|Xr&X3WrQ21z~n%qtyMxQTl8ibiDqsRPVkiK;4 zYNIY12d8iRxIeN)G=EusiJkORUdM-T)Od?^eQ|?@nGQD+tKpSB{~hW@6=7%qNLd0MwE zNzjnw`@XU1zRjLi{kww@FHG4lkG4u%7H(=i5_gHIkt-j(n>5ECN}g)TG?kJhH&9)m zT-|YI?X!u}WyVK=o=>-f!;Ia2ueBzuUdk43Gw()~WyxEKmt?H&g7Gqn7Mx$ER69=B z4vaZO@g5$^5Co?)>g);BxYjcTi^*5kOe%~`&sv=D&AsJ{>xr?PkPF};k7x<*MN#UgIU9j(nsu5>KD( z7%K|`w6y0fYz>ock3Nmq4p{59I^A_6X^ffTBg@;Iz;+tX$#~@`_xwR=?Y(ri#XUD# z_@+I)=fw;gUAf^xXZBkgphh6;Od_R$h%hiCz$YpHTmXK|!t;OUC13j<0H1pCV0sNa zvc<$y)%Z|?_)Lhdvy5eJO4xxoYQ2jS;ZCN0KL>q;L-ZTzv;En~m_0py;wiX7lH=R6 zC!5aB%d^|xfkLgly2UcjVaK_n$7xT^X$-ouQ%8>}OF19AV4rF_85-&6JWHw!4-0=X zZ=^V>Qi?j3(uiR1Lpn1&JvlCdA})oEu9+P%sHwxN)hd}G3CU0<^sUr-*@9|dL%EJ^ zwjFs39jCGhF+7VKhr=_wR37eSS~v`8)l(I=IMd`EBkd09)wrSeYnrklZ&>)*RBhL$ z?X8$#U)Tf%&zwHNCB@7fDtO}hg)QGSr)Z}_;1Thu#Ghb*O(I3dY!^qY3Z&kBPF0GAA_>fJ<{?;Yq#j;=D75+^+ z8u|z(#&LC@FDJa$(ixCH9|PatsF=hwc$$d66E<@k?)pI2M&s+-&6Z|vv=lhFpd!9| z+ukRE+QsNPCgRSz^$ooU0h(aUqkZ*?sSZ}`nhwJoa4SUCF9&ml!%=uNLT>1_&EF>w zu2qYX3vp_9nDR>-O?vs2MUkt4%D>lEu9@z;F@z~5*^%>(A*NDFrugZI>@GY1XC*E* z?ju|`nKVLDyjfiJ=GFT6eB66MZ&F1GJe^N=iP8+%%e<`ytxh#4VL!6pdLZKg&*}R$ zFg8cj_glftyS$yFJNiM2*qaLt^d|7@iO$oIVO{2szT)o0`)d=FG+Co~UX@q@0}b4> z9GliH`^^;`6l1UXCpG7F%CY%#YfBTA?6wr;+xCwKIHz(SH!v_=Y3NN4b(%L0=M{dq zfrBw2oOX=)NOsykT zy&thmsdgL~lVP^$2pKne>o(L%O)dxDYM@K%m6mE?PJw6eWgt+iamgWwxX*D)MOWTu z$e~z-H@N`2#uhn3EN0|8WQPuOy5Kt8&=(FAAsYJUHF+yHoY?gtcxl@Ky-~UYMjQ)? z2cBA-ve_FoJLrYoow3(KH)#5!wls_ruRB zfx2oOjcOH+I>ngTthj7C=H{N)zw%~a6}C)CkQinU_H$aza6LaLohh!;oG|>1O92-; zY=tH+#y1nk-}Ip!i4~ta+(TGE#iv4}oUkgV`xN#?SEgVPhf+xYyBwC;UFkv5&Oh^$AMx7|P1 z&5s>u`f1u&SSV(sJkc3pry9mI_7kHVx0^igvrGH$p7o|G$5e7dJ~qMkv1dZI1F-_doRQP16K z=<)vck&!&HG3ymUwNL}|uw;@Is{tZBj1{izc3A67&_D~q?vlNj#anVBU+h|Sib^)& zT&rw!yNJyF;5DwothXBs&%Y{+zqB>S=|0`CnvaNq7eCV|=cKkQ6JvIdFt%u{qffBl zoQ)>Nu%1?y=GDid@=&qH#Bmj57tY`pT(4p5AOB&h4>DRPpZ$YO6}DZg$rKdS;x8xpJS# zOyM7GZyFv*qV^-|k6P}6x86&w<@)J?Cn+|dwiDf@Pg56_yMI1W#VT7``;cLtZkCd{ zPZ4HLZY35g)e$PABAIZ${l4|;mWO_w@1w!^Ng`#NRQK^l&ss$phFRAUwJ z%mm>>53k7Nv~BNqjGtNi!=F^Zu)r=}e*gU!_}ce9+LG__^EHG#o*to& zplrr|+NoNi`>3dRi^0e9HMcU`VS|*^kt&mHtRxoub6B|7!=-+Crls#4ZUqT4R`R7; z*;ocVOR3oStl)}jwdc1Z1KppXk+YNwnr@Q(TgNHizA+}w^>;GZf%Q|RNCkuI#B=3rAJYmLmwypbzt~R*9cTw;$edq@o ziblK6Y2@SK+7&NI(o~oClMk~Rl(@#fG8E(CJlya!>Woxcvt~+X6RDNo~ArW z3>KG@B2!(r-+BU*LFeCS?o7~#auWBLh&1X;QX*}V3}07t9VJy-xtmN#u+G-Nz8Xrx zn#JjT_9BM{Vbp7CcRbhKH1E@=3Y&hiZzu>1Ey`SlAR?y1tvxQn+Z76%(9K6s5X-b# zY=^~UK^%PCjpjT-fOfaI`M$@xo846*g+>`ZI2G!N zrj(y=fjgdTzgX%`V1s7byxRZu-2K@~nGKAt5w#o2El~^I*iRH^?;5V_D;&EOyU5f< zc)v=xQ;W|g`EKZ;)bqoriiZS^G8Lw&34BGoipHCYO$FwuPAGHb&msN{;tCD#uMDL# zGJQ(7n@#^P_ErrGn~IN@g_ip?!c?Q=iWL8qVB@tfj3sFP+J-QFcr^B!ZXfF{{2LZ= zupTyeTIHv{UU3u%wR&k~WtkvgQiK+z{++RXnO#q-#GH=yft;$M3$>*-jkH?jOjbsw zG1j6U5|d8Qt!#HgH+BI;tupv<3G?CP5agVkWzr5GIvge{nufEbJ_dec<2#L$O!=F8 zp?5h$aMOFe38~5B9)ExF!MsYNPAtnfWfi-+z4ncwt@aR~hZc`eWBT@29NOg>OP1uB zKaWv7m6dMp1OU&vqP%oX#E)hV?cz=pya_x22v*YL=VTQOdY@JqztA{()_7g!&6a zg-NCqi>cm%e71!COR?47#go|Nw~g;HjId8K;3)8@+ma$pCX~dm5jHMs2Ro6o7PZM%7sE|!CuWwJQtFX(&=m1w-RH~txRD;H#qr>?LJ9CAJ^V+lOqc!99xqC!-t0Ar%x0M=$`BO`xRLKpyxM;0$Jg{8) zU-lgHe=WxfDh$fYKh9$J&3Bi{+w!aR^U8kAhp&1P7v(GoUpoC_^(((-1aix~sS`#g zRcaqwg)g355Dl)Mg)5rDwuNQLn?-%Ou)ie{W z>8UL<>bTeV2OF6@)zY>9>imq^6Xx2)Q8C$*L#GqISh!CcRy=7}HqGi$htxjRm&nD! zpWNP3u2R@5_eVReXWGW6_J)5#w6~GG#CKh+ekCuCoHIa8%wu^hl7m>Twi`=9~BhC31t&`v=sp#(!_%{vbFjn~8xYdqVHNwRUeA*9FUf(RCNe(Wv+S4;L_U;&@QNaEY4gu zaC52yo(nF2x4Q^AIY#uMwyH%y#7 zz@uRzLTr+;(YD8!3L)z#F&*_8a5+xn%ac!iPGhWhcYLpWyx$oKLx+~f1O3tmLh7qMy4EGhn z=Id|F5$yO2e3QBt1_doV1%>xYw_XYHdt^(9IFwYP`0YeYznL?NWJlhO&54zPt2lU& zf>OM7z3LMhw|KF1Y1y5{yMvM1i|l1w@b35C;OVM&5{P1RaFxWFQw>=VA--xyayqr} z=?AI<>_j;`08XCYgw6(8V7;po zPjFQ*ug6)JR7o4-ef3^rWMmkEViD(VUq=Wf&g*zD6xog@)c%;x+tgB;QpQUk48FWd z@$AR5ul+1EQQh%Uy1g0k2_)dZG}#&uE82*{7h`U1S#9 z3pK#tUO?pFp8-~TJE!x2VU@BH5T?iU+$!x1#w6FSIjH^-rfzD8N*Yyt2+|j+&o%L?M8*IWeOGkQdCgSt8|QX)9HP|ARn?c`U2(*4}`QPay|>FCjkF!lOkRIkanca56@W`ej90LsoMimDJ2Kb5-_+1D}060rWdj~y;$JW`mB0D`mJb9h4|x4=N|(C+5;oPdaB=;I z(XO--8F2wdCjgB8=h-1X7XU*8Ysg7e{!DzrzB? zECDdwdA1DH@J8{!QCtihEG~B-9~2Wy8NiYG0N8nLU?`|8{TJxx#>hXc@o_KSH3zUG z0k8y~e*g|8Fy;OtI9i$heI(`S+`oDNGdc!v1i*=YlZJV9A#G%8VR*@W#Q$E}K?2N2 z3BZqmuNpGiL@AAQTc)&(M0)SKj(0Q>cs41sLEL3T-p1O_;c_j&UCN}^0PfA%Z~&oOWZYA8ed%P+>C{iwkRaEoCmqUkFX z@%M(qU>3NiBb1eNaKQp1beA|8xE~*sKz95a;a7J)um<2R4p4apRM`I(T5wMXFaumk z9?DonzhL}cR~{@6t^*F0x4^uRzj##n`|Sc;O$|ygA-y2{URw>U0k~KMRKAPhm%M|i z#orfh@P|%NX<5Dt>0h5cflm!iPK64;6N3fsz(Z42!5DCwA{2uq`ELyHKn@0hQ}Cc5 z&pZDHT?`MH22P-Z(lGA+KQw4++JEVEU>Z2J4N7adjP@_(!0BjEoVL`zmAgnz11kqk z0fW+br7xlVmk$7Ttb299GvX;Y6b zqy5XF!673kuSxeZ-hTlkuzKKN2o%Sq{}0@Mp%E|%{2C7>wLvZ+L9Y`Lcz%X~;RpVL Nf!6_fgY!4&{{uDhM9BaE diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/scripts/new_mochiweb.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/scripts/new_mochiweb.erl deleted file mode 100755 index f49ed39..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/scripts/new_mochiweb.erl +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env escript -%% -*- mode: erlang -*- --export([main/1]). - -%% External API - -main(_) -> - usage(). - -%% Internal API - -usage() -> - io:format( - "new_mochiweb.erl has been replaced by a rebar template!\n" - "\n" - "To create a new mochiweb using project:\n" - " make app PROJECT=project_name\n" - "\n" - "To create a new mochiweb using project in a specific directory:\n" - " make app PROJECT=project_name PREFIX=$HOME/projects/\n" - "\n" - ), - halt(1). diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochifmt_std.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochifmt_std.erl deleted file mode 100644 index ea68c4a..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochifmt_std.erl +++ /dev/null @@ -1,33 +0,0 @@ -%% @author Bob Ippolito -%% @copyright 2008 Mochi Media, Inc. - -%% @doc Template module for a mochifmt formatter. - --module(mochifmt_std). --author('bob@mochimedia.com'). --export([new/0, format/3, get_value/3, format_field/3, get_field/3, convert_field/3]). - -new() -> - {?MODULE}. - -format(Format, Args, {?MODULE}=THIS) -> - mochifmt:format(Format, Args, THIS). - -get_field(Key, Args, {?MODULE}=THIS) -> - mochifmt:get_field(Key, Args, THIS). - -convert_field(Key, Args, {?MODULE}) -> - mochifmt:convert_field(Key, Args). - -get_value(Key, Args, {?MODULE}) -> - mochifmt:get_value(Key, Args). - -format_field(Arg, Format, {?MODULE}=THIS) -> - mochifmt:format_field(Arg, Format, THIS). - -%% -%% Tests -%% --ifdef(TEST). --include_lib("eunit/include/eunit.hrl"). --endif. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb.app.src b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb.app.src deleted file mode 100644 index 4a6808e..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb.app.src +++ /dev/null @@ -1,8 +0,0 @@ -%% This is generated from src/mochiweb.app.src -{application, mochiweb, - [{description, "MochiMedia Web Server"}, - {vsn, "2.7.0"}, - {modules, []}, - {registered, []}, - {env, []}, - {applications, [kernel, stdlib, inets, xmerl]}]}. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb.erl deleted file mode 100644 index f597c73..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb.erl +++ /dev/null @@ -1,76 +0,0 @@ -%% @author Bob Ippolito -%% @copyright 2007 Mochi Media, Inc. - -%% @doc Start and stop the MochiWeb server. - --module(mochiweb). --author('bob@mochimedia.com'). - --export([new_request/1, new_response/1]). --export([all_loaded/0, all_loaded/1, reload/0]). --export([ensure_started/1]). - -reload() -> - [c:l(Module) || Module <- all_loaded()]. - -all_loaded() -> - all_loaded(filename:dirname(code:which(?MODULE))). - -all_loaded(Base) when is_atom(Base) -> - []; -all_loaded(Base) -> - FullBase = Base ++ "/", - F = fun ({_Module, Loaded}, Acc) when is_atom(Loaded) -> - Acc; - ({Module, Loaded}, Acc) -> - case lists:prefix(FullBase, Loaded) of - true -> - [Module | Acc]; - false -> - Acc - end - end, - lists:foldl(F, [], code:all_loaded()). - - -%% @spec new_request({Socket, Request, Headers}) -> MochiWebRequest -%% @doc Return a mochiweb_request data structure. -new_request({Socket, {Method, {abs_path, Uri}, Version}, Headers}) -> - mochiweb_request:new(Socket, - Method, - Uri, - Version, - mochiweb_headers:make(Headers)); -% this case probably doesn't "exist". -new_request({Socket, {Method, {absoluteURI, _Protocol, _Host, _Port, Uri}, - Version}, Headers}) -> - mochiweb_request:new(Socket, - Method, - Uri, - Version, - mochiweb_headers:make(Headers)); -%% Request-URI is "*" -%% From http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2 -new_request({Socket, {Method, '*'=Uri, Version}, Headers}) -> - mochiweb_request:new(Socket, - Method, - Uri, - Version, - mochiweb_headers:make(Headers)). - -%% @spec new_response({Request, integer(), Headers}) -> MochiWebResponse -%% @doc Return a mochiweb_response data structure. -new_response({Request, Code, Headers}) -> - mochiweb_response:new(Request, - Code, - mochiweb_headers:make(Headers)). - -%% @spec ensure_started(App::atom()) -> ok -%% @doc Start the given App if it has not been started already. -ensure_started(App) -> - case application:start(App) of - ok -> - ok; - {error, {already_started, App}} -> - ok - end. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_acceptor.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_acceptor.erl deleted file mode 100644 index ebbaf45..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_acceptor.erl +++ /dev/null @@ -1,50 +0,0 @@ -%% @author Bob Ippolito -%% @copyright 2010 Mochi Media, Inc. - -%% @doc MochiWeb acceptor. - --module(mochiweb_acceptor). --author('bob@mochimedia.com'). - --include("internal.hrl"). - --export([start_link/3, init/3]). - -start_link(Server, Listen, Loop) -> - proc_lib:spawn_link(?MODULE, init, [Server, Listen, Loop]). - -init(Server, Listen, Loop) -> - T1 = os:timestamp(), - case catch mochiweb_socket:accept(Listen) of - {ok, Socket} -> - gen_server:cast(Server, {accepted, self(), timer:now_diff(os:timestamp(), T1)}), - call_loop(Loop, Socket); - {error, closed} -> - exit(normal); - {error, timeout} -> - init(Server, Listen, Loop); - {error, esslaccept} -> - exit(normal); - Other -> - error_logger:error_report( - [{application, mochiweb}, - "Accept failed error", - lists:flatten(io_lib:format("~p", [Other]))]), - exit({error, accept_failed}) - end. - -call_loop({M, F}, Socket) -> - M:F(Socket); -call_loop({M, F, [A1]}, Socket) -> - M:F(Socket, A1); -call_loop({M, F, A}, Socket) -> - erlang:apply(M, F, [Socket | A]); -call_loop(Loop, Socket) -> - Loop(Socket). - -%% -%% Tests -%% --ifdef(TEST). --include_lib("eunit/include/eunit.hrl"). --endif. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_echo.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_echo.erl deleted file mode 100644 index e145840..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_echo.erl +++ /dev/null @@ -1,41 +0,0 @@ -%% @author Bob Ippolito -%% @copyright 2007 Mochi Media, Inc. - -%% @doc Simple and stupid echo server to demo mochiweb_socket_server. - --module(mochiweb_echo). --author('bob@mochimedia.com'). --export([start/0, stop/0, loop/1]). - -stop() -> - mochiweb_socket_server:stop(?MODULE). - -start() -> - mochiweb_socket_server:start([{link, false} | options()]). - -options() -> - [{name, ?MODULE}, - {port, 6789}, - {ip, "127.0.0.1"}, - {max, 1}, - {loop, {?MODULE, loop}}]. - -loop(Socket) -> - case mochiweb_socket:recv(Socket, 0, 30000) of - {ok, Data} -> - case mochiweb_socket:send(Socket, Data) of - ok -> - loop(Socket); - _ -> - exit(normal) - end; - _Other -> - exit(normal) - end. - -%% -%% Tests -%% --ifdef(TEST). --include_lib("eunit/include/eunit.hrl"). --endif. diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_socket.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_socket.erl deleted file mode 100644 index 76b018c..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/src/mochiweb_socket.erl +++ /dev/null @@ -1,84 +0,0 @@ -%% @copyright 2010 Mochi Media, Inc. - -%% @doc MochiWeb socket - wrapper for plain and ssl sockets. - --module(mochiweb_socket). - --export([listen/4, accept/1, recv/3, send/2, close/1, port/1, peername/1, - setopts/2, type/1]). - --define(ACCEPT_TIMEOUT, 2000). - -listen(Ssl, Port, Opts, SslOpts) -> - case Ssl of - true -> - case ssl:listen(Port, Opts ++ SslOpts) of - {ok, ListenSocket} -> - {ok, {ssl, ListenSocket}}; - {error, _} = Err -> - Err - end; - false -> - gen_tcp:listen(Port, Opts) - end. - -accept({ssl, ListenSocket}) -> - % There's a bug in ssl:transport_accept/2 at the moment, which is the - % reason for the try...catch block. Should be fixed in OTP R14. - try ssl:transport_accept(ListenSocket) of - {ok, Socket} -> - case ssl:ssl_accept(Socket) of - ok -> - {ok, {ssl, Socket}}; - {error, _} = Err -> - Err - end; - {error, _} = Err -> - Err - catch - error:{badmatch, {error, Reason}} -> - {error, Reason} - end; -accept(ListenSocket) -> - gen_tcp:accept(ListenSocket, ?ACCEPT_TIMEOUT). - -recv({ssl, Socket}, Length, Timeout) -> - ssl:recv(Socket, Length, Timeout); -recv(Socket, Length, Timeout) -> - gen_tcp:recv(Socket, Length, Timeout). - -send({ssl, Socket}, Data) -> - ssl:send(Socket, Data); -send(Socket, Data) -> - gen_tcp:send(Socket, Data). - -close({ssl, Socket}) -> - ssl:close(Socket); -close(Socket) -> - gen_tcp:close(Socket). - -port({ssl, Socket}) -> - case ssl:sockname(Socket) of - {ok, {_, Port}} -> - {ok, Port}; - {error, _} = Err -> - Err - end; -port(Socket) -> - inet:port(Socket). - -peername({ssl, Socket}) -> - ssl:peername(Socket); -peername(Socket) -> - inet:peername(Socket). - -setopts({ssl, Socket}, Opts) -> - ssl:setopts(Socket, Opts); -setopts(Socket, Opts) -> - inet:setopts(Socket, Opts). - -type({ssl, _}) -> - ssl; -type(_) -> - plain. - diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/start-dev.sh b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/start-dev.sh deleted file mode 100755 index fb7c45e..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/support/templates/mochiwebapp_skel/start-dev.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -# NOTE: mustache templates need \\ because they are not awesome. -exec erl -pa ebin edit deps/*/ebin -boot start_sasl \\ - -sname {{appid}}_dev \\ - -s {{appid}} \\ - -s reloader diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_tests.erl b/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_tests.erl deleted file mode 100644 index 15cb06a..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/mochiweb-git/test/mochiweb_tests.erl +++ /dev/null @@ -1,199 +0,0 @@ --module(mochiweb_tests). --include_lib("eunit/include/eunit.hrl"). - --record(treq, {path, body= <<>>, xreply= <<>>}). - -ssl_cert_opts() -> - EbinDir = filename:dirname(code:which(?MODULE)), - CertDir = filename:join([EbinDir, "..", "support", "test-materials"]), - CertFile = filename:join(CertDir, "test_ssl_cert.pem"), - KeyFile = filename:join(CertDir, "test_ssl_key.pem"), - [{certfile, CertFile}, {keyfile, KeyFile}]. - -with_server(Transport, ServerFun, ClientFun) -> - ServerOpts0 = [{ip, "127.0.0.1"}, {port, 0}, {loop, ServerFun}], - ServerOpts = case Transport of - plain -> - ServerOpts0; - ssl -> - ServerOpts0 ++ [{ssl, true}, {ssl_opts, ssl_cert_opts()}] - end, - {ok, Server} = mochiweb_http:start_link(ServerOpts), - Port = mochiweb_socket_server:get(Server, port), - Res = (catch ClientFun(Transport, Port)), - mochiweb_http:stop(Server), - Res. - -request_test() -> - R = mochiweb_request:new(z, z, "/foo/bar/baz%20wibble+quux?qs=2", z, []), - "/foo/bar/baz wibble quux" = R:get(path), - ok. - --define(LARGE_TIMEOUT, 60). - -single_http_GET_test() -> - do_GET(plain, 1). - -single_https_GET_test() -> - do_GET(ssl, 1). - -multiple_http_GET_test() -> - do_GET(plain, 3). - -multiple_https_GET_test() -> - do_GET(ssl, 3). - -hundred_http_GET_test_() -> % note the underscore - {timeout, ?LARGE_TIMEOUT, - fun() -> ?assertEqual(ok, do_GET(plain,100)) end}. - -hundred_https_GET_test_() -> % note the underscore - {timeout, ?LARGE_TIMEOUT, - fun() -> ?assertEqual(ok, do_GET(ssl,100)) end}. - -single_128_http_POST_test() -> - do_POST(plain, 128, 1). - -single_128_https_POST_test() -> - do_POST(ssl, 128, 1). - -single_2k_http_POST_test() -> - do_POST(plain, 2048, 1). - -single_2k_https_POST_test() -> - do_POST(ssl, 2048, 1). - -single_100k_http_POST_test() -> - do_POST(plain, 102400, 1). - -single_100k_https_POST_test() -> - do_POST(ssl, 102400, 1). - -multiple_100k_http_POST_test() -> - do_POST(plain, 102400, 3). - -multiple_100K_https_POST_test() -> - do_POST(ssl, 102400, 3). - -hundred_128_http_POST_test_() -> % note the underscore - {timeout, ?LARGE_TIMEOUT, - fun() -> ?assertEqual(ok, do_POST(plain, 128, 100)) end}. - -hundred_128_https_POST_test_() -> % note the underscore - {timeout, ?LARGE_TIMEOUT, - fun() -> ?assertEqual(ok, do_POST(ssl, 128, 100)) end}. - -do_GET(Transport, Times) -> - PathPrefix = "/whatever/", - ReplyPrefix = "You requested: ", - ServerFun = fun (Req) -> - Reply = ReplyPrefix ++ Req:get(path), - Req:ok({"text/plain", Reply}) - end, - TestReqs = [begin - Path = PathPrefix ++ integer_to_list(N), - ExpectedReply = list_to_binary(ReplyPrefix ++ Path), - #treq{path=Path, xreply=ExpectedReply} - end || N <- lists:seq(1, Times)], - ClientFun = new_client_fun('GET', TestReqs), - ok = with_server(Transport, ServerFun, ClientFun), - ok. - -do_POST(Transport, Size, Times) -> - ServerFun = fun (Req) -> - Body = Req:recv_body(), - Headers = [{"Content-Type", "application/octet-stream"}], - Req:respond({201, Headers, Body}) - end, - TestReqs = [begin - Path = "/stuff/" ++ integer_to_list(N), - Body = crypto:rand_bytes(Size), - #treq{path=Path, body=Body, xreply=Body} - end || N <- lists:seq(1, Times)], - ClientFun = new_client_fun('POST', TestReqs), - ok = with_server(Transport, ServerFun, ClientFun), - ok. - -new_client_fun(Method, TestReqs) -> - fun (Transport, Port) -> - client_request(Transport, Port, Method, TestReqs) - end. - -client_request(Transport, Port, Method, TestReqs) -> - Opts = [binary, {active, false}, {packet, http}], - SockFun = case Transport of - plain -> - {ok, Socket} = gen_tcp:connect("127.0.0.1", Port, Opts), - fun (recv) -> - gen_tcp:recv(Socket, 0); - ({recv, Length}) -> - gen_tcp:recv(Socket, Length); - ({send, Data}) -> - gen_tcp:send(Socket, Data); - ({setopts, L}) -> - inet:setopts(Socket, L) - end; - ssl -> - {ok, Socket} = ssl:connect("127.0.0.1", Port, [{ssl_imp, new} | Opts]), - fun (recv) -> - ssl:recv(Socket, 0); - ({recv, Length}) -> - ssl:recv(Socket, Length); - ({send, Data}) -> - ssl:send(Socket, Data); - ({setopts, L}) -> - ssl:setopts(Socket, L) - end - end, - client_request(SockFun, Method, TestReqs). - -client_request(SockFun, _Method, []) -> - {the_end, {error, closed}} = {the_end, SockFun(recv)}, - ok; -client_request(SockFun, Method, - [#treq{path=Path, body=Body, xreply=ExReply} | Rest]) -> - Request = [atom_to_list(Method), " ", Path, " HTTP/1.1\r\n", - client_headers(Body, Rest =:= []), - "\r\n", - Body], - ok = SockFun({send, Request}), - case Method of - 'GET' -> - {ok, {http_response, {1,1}, 200, "OK"}} = SockFun(recv); - 'POST' -> - {ok, {http_response, {1,1}, 201, "Created"}} = SockFun(recv) - end, - ok = SockFun({setopts, [{packet, httph}]}), - {ok, {http_header, _, 'Server', _, "MochiWeb" ++ _}} = SockFun(recv), - {ok, {http_header, _, 'Date', _, _}} = SockFun(recv), - {ok, {http_header, _, 'Content-Type', _, _}} = SockFun(recv), - {ok, {http_header, _, 'Content-Length', _, ConLenStr}} = SockFun(recv), - ContentLength = list_to_integer(ConLenStr), - {ok, http_eoh} = SockFun(recv), - ok = SockFun({setopts, [{packet, raw}]}), - {payload, ExReply} = {payload, drain_reply(SockFun, ContentLength, <<>>)}, - ok = SockFun({setopts, [{packet, http}]}), - client_request(SockFun, Method, Rest). - -client_headers(Body, IsLastRequest) -> - ["Host: localhost\r\n", - case Body of - <<>> -> - ""; - _ -> - ["Content-Type: application/octet-stream\r\n", - "Content-Length: ", integer_to_list(byte_size(Body)), "\r\n"] - end, - case IsLastRequest of - true -> - "Connection: close\r\n"; - false -> - "" - end]. - -drain_reply(_SockFun, 0, Acc) -> - Acc; -drain_reply(SockFun, Length, Acc) -> - Sz = erlang:min(Length, 1024), - {ok, B} = SockFun({recv, Sz}), - drain_reply(SockFun, Length - Sz, <>). diff --git a/rabbitmq-server/plugins-src/mochiweb-wrapper/package.mk b/rabbitmq-server/plugins-src/mochiweb-wrapper/package.mk deleted file mode 100644 index dbbe133..0000000 --- a/rabbitmq-server/plugins-src/mochiweb-wrapper/package.mk +++ /dev/null @@ -1,40 +0,0 @@ -APP_NAME:=mochiweb - -UPSTREAM_GIT:=https://github.com/rabbitmq/mochiweb.git -UPSTREAM_REVISION:=680dba8a8a0dd8ee18d03bf814cfb2340bf3bbff -RETAIN_ORIGINAL_VERSION:=true -WRAPPER_PATCHES:=10-build-on-R12B-5.patch \ - 20-MAX_RECV_BODY.patch \ - 30-remove-crypto-ssl-dependencies.patch \ - 40-remove-compiler-syntax_tools-dependencies.patch \ - 50-remove-json.patch - -# internal.hrl is used by webmachine -UPSTREAM_INCLUDE_DIRS+=$(CLONE_DIR)/src - -ORIGINAL_APP_FILE:=$(CLONE_DIR)/$(APP_NAME).app -DO_NOT_GENERATE_APP_FILE=true - -define package_rules - -$(CLONE_DIR)/src/$(APP_NAME).app.src: $(CLONE_DIR)/.done - -$(ORIGINAL_APP_FILE): $(CLONE_DIR)/src/$(APP_NAME).app.src - cp $(CLONE_DIR)/src/$(APP_NAME).app.src $(ORIGINAL_APP_FILE) - -$(PACKAGE_DIR)+clean:: - rm -rf $(ORIGINAL_APP_FILE) - -# This rule is run *before* the one in do_package.mk -$(PLUGINS_SRC_DIST_DIR)/$(PACKAGE_DIR)/.srcdist_done:: - cp $(CLONE_DIR)/LICENSE $(PACKAGE_DIR)/LICENSE-MIT-Mochi - -$(CLONE_DIR)/ebin/mochifmt_records.beam: $(CLONE_DIR)/ebin/pmod_pt.beam - -$(CLONE_DIR)/ebin/mochifmt_std.beam: $(CLONE_DIR)/ebin/pmod_pt.beam - -$(CLONE_DIR)/ebin/mochifmt_request.beam: $(CLONE_DIR)/ebin/pmod_pt.beam - -$(CLONE_DIR)/ebin/mochifmt_response.beam: $(CLONE_DIR)/ebin/pmod_pt.beam - -endef diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/Makefile b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/package.mk b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/package.mk deleted file mode 100644 index 0a1b2ea..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/package.mk +++ /dev/null @@ -1,26 +0,0 @@ -RELEASABLE:=true -APP_NAME=rabbitmq_amqp1_0 -DEPS:=rabbitmq-server rabbitmq-erlang-client -STANDALONE_TEST_COMMANDS:=eunit:test(rabbit_amqp1_0_test,[verbose]) -WITH_BROKER_TEST_SCRIPTS:=$(PACKAGE_DIR)/test/swiftmq/run-tests.sh - -FRAMING_HRL=$(PACKAGE_DIR)/include/rabbit_amqp1_0_framing.hrl -FRAMING_ERL=$(PACKAGE_DIR)/src/rabbit_amqp1_0_framing0.erl -CODEGEN=$(PACKAGE_DIR)/codegen.py -CODEGEN_SPECS=$(PACKAGE_DIR)/spec/messaging.xml $(PACKAGE_DIR)/spec/security.xml $(PACKAGE_DIR)/spec/transport.xml $(PACKAGE_DIR)/spec/transactions.xml - -INCLUDE_HRLS+=$(FRAMING_HRL) -SOURCE_ERLS+=$(FRAMING_ERL) - -define package_rules - -$(FRAMING_ERL): $(CODEGEN) $(CODEGEN_SPECS) - $(CODEGEN) erl $(CODEGEN_SPECS) > $$@ - -$(FRAMING_HRL): $(CODEGEN) $(CODEGEN_SPECS) - $(CODEGEN) hrl $(CODEGEN_SPECS) > $$@ - -$(PACKAGE_DIR)+clean:: - rm -f $(FRAMING_HRL) $(FRAMING_ERL) - -endef diff --git a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/run-tests.sh b/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/run-tests.sh deleted file mode 100755 index 70fab43..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-amqp1.0/test/swiftmq/run-tests.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -e -make -C $(dirname $0) test diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/.travis.yml b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/.travis.yml deleted file mode 100644 index 6b022a8..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/.travis.yml +++ /dev/null @@ -1,37 +0,0 @@ -sudo: true -language: erlang -notifications: - email: - - alerts@rabbitmq.com -addons: - apt: - packages: - - slapd - - ldap-utils - - xsltproc -otp_release: - - "R16B03-1" - - "17.5" - - "18.0" -install: - - if [ ! -d "$HOME/rabbitmq-public-umbrella/.git" ]; then git clone https://github.com/rabbitmq/rabbitmq-public-umbrella.git $HOME/rabbitmq-public-umbrella; fi - - cd $HOME/rabbitmq-public-umbrella - - make co - - make up -services: - - slapd -before_script: - - IFS="/" read -a PARTS <<< "$TRAVIS_REPO_SLUG" - - export TEST_DIR=$HOME/rabbitmq-public-umbrella/${PARTS[1]} - - rm -rf ${TEST_DIR} - - cp -r ${TRAVIS_BUILD_DIR} ${TEST_DIR} - - cd ${TEST_DIR} - - ./example/setup.sh -script: make test -before_cache: - - rm -rf ${TEST_DIR} - - cd $HOME -cache: - apt: true - directories: - - $HOME/rabbitmq-public-umbrella diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/Makefile b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README deleted file mode 100644 index 278cda8..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README +++ /dev/null @@ -1,20 +0,0 @@ -Requirements -============ - -You can build and install it like any other plugin (see -http://www.rabbitmq.com/plugin-development.html). - -Documentation -============= - -See http://www.rabbitmq.com/ldap.html - -Limitations -=========== - -Currently this plugin is rather chatty with LDAP connections when -doing authorisation over LDAP - every time RabbitMQ needs to do an -authorisation query it starts a new LDAP connection. However, RabbitMQ -does have a per-channel authorisation cache, so this is not too awful. - -There might need to be more types of queries. diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README-authorisation b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README-authorisation deleted file mode 100644 index 6e0abe0..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/README-authorisation +++ /dev/null @@ -1 +0,0 @@ -See http://www.rabbitmq.com/ldap.html diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/groups.ldif b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/groups.ldif deleted file mode 100644 index 5a5c8d0..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/example/groups.ldif +++ /dev/null @@ -1,8 +0,0 @@ -dn: ou=groups,dc=example,dc=com -objectclass:organizationalunit -ou: groups - -dn: cn=wheel,ou=groups,dc=example,dc=com -objectclass: groupOfNames -cn: wheel -member: cn=Simon MacMullen,ou=people,dc=example,dc=com diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/package.mk b/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/package.mk deleted file mode 100644 index 02c22ee..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-backend-ldap/package.mk +++ /dev/null @@ -1,9 +0,0 @@ -RELEASABLE:=true -DEPS:=rabbitmq-server rabbitmq-erlang-client eldap-wrapper - -ifeq ($(shell nc -z localhost 389 && echo true),true) -WITH_BROKER_TEST_COMMANDS:=eunit:test([rabbit_auth_backend_ldap_unit_test,rabbit_auth_backend_ldap_test],[verbose]) -WITH_BROKER_TEST_CONFIG:=$(PACKAGE_DIR)/etc/rabbit-test -else -$(warning Not running LDAP tests; no LDAP server found on localhost) -endif diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/Makefile b/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/package.mk b/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/package.mk deleted file mode 100644 index baa4c03..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-auth-mechanism-ssl/package.mk +++ /dev/null @@ -1,2 +0,0 @@ -RELEASABLE:=true -DEPS:=rabbitmq-server rabbitmq-erlang-client diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/.travis.yml b/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/.travis.yml deleted file mode 100644 index 09fbd63..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/.travis.yml +++ /dev/null @@ -1,32 +0,0 @@ -sudo: false -language: erlang -addons: - apt: - packages: - - xsltproc -otp_release: - - R16B03-1 - - 17.5 - - 18.0 -install: - - if [ ! -d "$HOME/rabbitmq-public-umbrella/.git" ]; then git clone https://github.com/rabbitmq/rabbitmq-public-umbrella.git $HOME/rabbitmq-public-umbrella; fi - - cd $HOME/rabbitmq-public-umbrella - - make co - - make up -before_script: - - IFS="/" read -a PARTS <<< "$TRAVIS_REPO_SLUG" - - export TEST_DIR=$HOME/rabbitmq-public-umbrella/${PARTS[1]} - - rm -rf ${TEST_DIR} - - cp -r ${TRAVIS_BUILD_DIR} ${TEST_DIR} - - cd ${TEST_DIR} -script: make test -before_cache: - - rm -rf ${TEST_DIR} - - cd $HOME -cache: - apt: true - directories: - - $HOME/rabbitmq-public-umbrella -notifications: - email: - - alerts@rabbitmq.com diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/Makefile b/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/package.mk b/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/package.mk deleted file mode 100644 index 151c43c..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-consistent-hash-exchange/package.mk +++ /dev/null @@ -1,3 +0,0 @@ -RELEASABLE:=true -DEPS:=rabbitmq-server rabbitmq-erlang-client -WITH_BROKER_TEST_COMMANDS:=rabbit_exchange_type_consistent_hash_test:test() diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/Makefile b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/Makefile deleted file mode 100644 index a42c666..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/Makefile +++ /dev/null @@ -1,125 +0,0 @@ -# The contents of this file are subject to the Mozilla Public License -# Version 1.1 (the "License"); you may not use this file except in -# compliance with the License. You may obtain a copy of the License at -# http://www.mozilla.org/MPL/ -# -# Software distributed under the License is distributed on an "AS IS" -# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the -# License for the specific language governing rights and limitations -# under the License. -# -# The Original Code is RabbitMQ. -# -# The Initial Developer of the Original Code is GoPivotal, Inc. -# Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. -# - -VERSION=0.0.0 - -SOURCE_PACKAGE_DIR=$(PACKAGE)-$(VERSION)-src -SOURCE_PACKAGE_TAR_GZ=$(SOURCE_PACKAGE_DIR).tar.gz - -BROKER_HEADERS=$(wildcard $(BROKER_DIR)/$(INCLUDE_DIR)/*.hrl) -BROKER_SOURCES=$(wildcard $(BROKER_DIR)/$(SOURCE_DIR)/*.erl) -BROKER_DEPS=$(BROKER_HEADERS) $(BROKER_SOURCES) - -INFILES=$(shell find . -name '*.app.in') -INTARGETS=$(patsubst %.in, %, $(INFILES)) - -WEB_URL=http://www.rabbitmq.com/ - -include common.mk - -run_in_broker: compile $(BROKER_DEPS) $(EBIN_DIR)/$(PACKAGE).app - $(MAKE) RABBITMQ_SERVER_START_ARGS='$(PA_LOAD_PATH)' -C $(BROKER_DIR) run - -clean: common_clean - rm -f $(INTARGETS) - rm -rf $(DIST_DIR) - -distribution: documentation source_tarball package - -%.app: %.app.in $(SOURCES) $(BROKER_DIR)/generate_app - escript $(BROKER_DIR)/generate_app $< $@ $(SOURCE_DIR) - sed 's/%%VSN%%/$(VERSION)/' $@ > $@.tmp && mv $@.tmp $@ - -############################################################################### -## Dialyzer -############################################################################### - -RABBIT_PLT=$(BROKER_DIR)/rabbit.plt - -dialyze: $(RABBIT_PLT) $(TARGETS) - dialyzer --plt $(RABBIT_PLT) --no_native -Wrace_conditions $(TARGETS) - -.PHONY: $(RABBIT_PLT) -$(RABBIT_PLT): - $(MAKE) -C $(BROKER_DIR) create-plt - -############################################################################### -## Documentation -############################################################################### - -documentation: $(DOC_DIR)/index.html - -$(DOC_DIR)/overview.edoc: $(SOURCE_DIR)/overview.edoc.in - mkdir -p $(DOC_DIR) - sed -e 's:%%VERSION%%:$(VERSION):g' < $< > $@ - -$(DOC_DIR)/index.html: $(DEPS_DIR)/$(COMMON_PACKAGE_DIR) $(DOC_DIR)/overview.edoc $(SOURCES) - $(LIBS_PATH) erl -noshell -eval 'edoc:application(amqp_client, ".", [{preprocess, true}, {macros, [{edoc, true}]}])' -run init stop - -############################################################################### -## Testing -############################################################################### - -include test.mk - -compile_tests: $(TEST_TARGETS) $(EBIN_DIR)/$(PACKAGE).app - -$(TEST_TARGETS): $(TEST_DIR) - -.PHONY: $(TEST_DIR) -$(TEST_DIR): $(DEPS_DIR)/$(COMMON_PACKAGE_DIR) - $(MAKE) -C $(TEST_DIR) - -############################################################################### -## Packaging -############################################################################### - -COPY=cp -pR - -$(DIST_DIR)/$(COMMON_PACKAGE_EZ): $(BROKER_DEPS) $(COMMON_PACKAGE).app | $(DIST_DIR) - rm -f $@ - $(MAKE) -C $(BROKER_DIR) - rm -rf $(DIST_DIR)/$(COMMON_PACKAGE_DIR) - mkdir -p $(DIST_DIR)/$(COMMON_PACKAGE_DIR)/$(INCLUDE_DIR) - mkdir -p $(DIST_DIR)/$(COMMON_PACKAGE_DIR)/$(EBIN_DIR) - cp $(COMMON_PACKAGE).app $(DIST_DIR)/$(COMMON_PACKAGE_DIR)/$(EBIN_DIR)/ - $(foreach DEP, $(DEPS), \ - ( cp $(BROKER_DIR)/ebin/$(DEP).beam $(DIST_DIR)/$(COMMON_PACKAGE_DIR)/$(EBIN_DIR)/ \ - );) - cp $(BROKER_DIR)/include/*.hrl $(DIST_DIR)/$(COMMON_PACKAGE_DIR)/$(INCLUDE_DIR)/ - (cd $(DIST_DIR); zip -q -r $(COMMON_PACKAGE_EZ) $(COMMON_PACKAGE_DIR)) - -source_tarball: $(DIST_DIR)/$(COMMON_PACKAGE_EZ) $(EBIN_DIR)/$(PACKAGE).app | $(DIST_DIR) - mkdir -p $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(DIST_DIR) - $(COPY) $(DIST_DIR)/$(COMMON_PACKAGE_EZ) $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(DIST_DIR)/ - $(COPY) README.in $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/README - elinks -dump -no-references -no-numbering $(WEB_URL)build-erlang-client.html >> $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/README - $(COPY) common.mk $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/ - $(COPY) test.mk $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/ - sed 's/%%VSN%%/$(VERSION)/' Makefile.in > $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/Makefile - mkdir -p $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(SOURCE_DIR) - $(COPY) $(SOURCE_DIR)/*.erl $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(SOURCE_DIR)/ - mkdir -p $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(EBIN_DIR) - $(COPY) $(EBIN_DIR)/*.app $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(EBIN_DIR)/ - mkdir -p $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(INCLUDE_DIR) - $(COPY) $(INCLUDE_DIR)/*.hrl $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(INCLUDE_DIR)/ - mkdir -p $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(TEST_DIR) - $(COPY) $(TEST_DIR)/*.erl $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(TEST_DIR)/ - $(COPY) $(TEST_DIR)/Makefile $(DIST_DIR)/$(SOURCE_PACKAGE_DIR)/$(TEST_DIR)/ - cd $(DIST_DIR) ; tar czf $(SOURCE_PACKAGE_TAR_GZ) $(SOURCE_PACKAGE_DIR) - -$(DIST_DIR): - mkdir -p $@ diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/Makefile.in b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/Makefile.in deleted file mode 100644 index 0b46f9f..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/Makefile.in +++ /dev/null @@ -1,26 +0,0 @@ -# The contents of this file are subject to the Mozilla Public License -# Version 1.1 (the "License"); you may not use this file except in -# compliance with the License. You may obtain a copy of the License at -# http://www.mozilla.org/MPL/ -# -# Software distributed under the License is distributed on an "AS IS" -# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the -# License for the specific language governing rights and limitations -# under the License. -# -# The Original Code is RabbitMQ. -# -# The Initial Developer of the Original Code is GoPivotal, Inc. -# Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. -# - -VERSION=%%VSN%% - -include common.mk -include test.mk - -clean: common_clean - -compile_tests: - $(MAKE) -C test VERSION=$(VERSION) - diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/common.mk b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/common.mk deleted file mode 100644 index 9de9221..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/common.mk +++ /dev/null @@ -1,201 +0,0 @@ -# The contents of this file are subject to the Mozilla Public License -# Version 1.1 (the "License"); you may not use this file except in -# compliance with the License. You may obtain a copy of the License at -# http://www.mozilla.org/MPL/ -# -# Software distributed under the License is distributed on an "AS IS" -# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the -# License for the specific language governing rights and limitations -# under the License. -# -# The Original Code is RabbitMQ. -# -# The Initial Developer of the Original Code is GoPivotal, Inc. -# Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. -# - -# The client library can either be built from source control or by downloading -# a source tarball from the RabbitMQ site. The intention behind the source tarball is -# to be able to unpack this anywhere and just run a simple a test, under the -# assumption that you have a running broker. This provides the simplest -# possible way of building and running the client. -# -# The source control version, on the other hand, contains far more infrastructure -# to start and stop brokers, package modules from the server, run embedded tests -# and so forth. -# -# This means that the Makefile of the source control version contains a lot of -# functionality that just wouldn't work with the source tarball version. -# -# The purpose of this common Makefile is to define as many commonalities -# between the build requirements of the source control version and the source -# tarball version. This avoids duplicating make definitions and rules and -# helps keep the Makefile maintenence well factored. - -ifndef TMPDIR -TMPDIR := /tmp -endif - -EBIN_DIR=ebin -BROKER_DIR=../rabbitmq-server -export INCLUDE_DIR=include -TEST_DIR=test -SOURCE_DIR=src -DIST_DIR=dist -DEPS_DIR=deps -DOC_DIR=doc -DEPS_FILE=deps.mk - -ifeq ("$(ERL_LIBS)", "") - ERL_LIBS := -else - ERL_LIBS := :$(ERL_LIBS) -endif - -ERL_PATH ?= - -PACKAGE=amqp_client -PACKAGE_DIR=$(PACKAGE)-$(VERSION) -PACKAGE_NAME_EZ=$(PACKAGE_DIR).ez -COMMON_PACKAGE=rabbit_common -export COMMON_PACKAGE_DIR=$(COMMON_PACKAGE)-$(VERSION) -COMMON_PACKAGE_EZ=$(COMMON_PACKAGE_DIR).ez -NODE_NAME=amqp_client - -DEPS=$(shell erl -noshell -eval '{ok,[{_,_,[_,_,{modules, Mods},_,_,_]}]} = \ - file:consult("$(COMMON_PACKAGE).app.in"), \ - [io:format("~p ",[M]) || M <- Mods], halt().') - -INCLUDES=$(wildcard $(INCLUDE_DIR)/*.hrl) -SOURCES=$(wildcard $(SOURCE_DIR)/*.erl) -TARGETS=$(patsubst $(SOURCE_DIR)/%.erl, $(EBIN_DIR)/%.beam, $(SOURCES)) -TEST_SOURCES=$(wildcard $(TEST_DIR)/*.erl) -TEST_TARGETS=$(patsubst $(TEST_DIR)/%.erl, $(TEST_DIR)/%.beam, $(TEST_SOURCES)) - -LIBS_PATH_UNIX=$(DEPS_DIR):$(DIST_DIR)$(ERL_LIBS) -IS_CYGWIN=$(shell if [ $(shell expr "$(shell uname -s)" : 'CYGWIN_NT') -gt 0 ]; then echo "true"; else echo "false"; fi) -ifeq ($(IS_CYGWIN),true) - LIBS_PATH=ERL_LIBS="$(shell cygpath -wp $(LIBS_PATH_UNIX))" -else - LIBS_PATH=ERL_LIBS=$(LIBS_PATH_UNIX) -endif - -LOAD_PATH=$(EBIN_DIR) $(TEST_DIR) $(ERL_PATH) - -RUN:=$(LIBS_PATH) erl -pa $(LOAD_PATH) -sname $(NODE_NAME) - -MKTEMP=$$(mktemp $(TMPDIR)/tmp.XXXXXXXXXX) - -ifndef USE_SPECS -# our type specs rely on features / bug fixes in dialyzer that are -# only available in R13B01 upwards (R13B is eshell 5.7.2) -# -# NB: do not mark this variable for export, otherwise it will -# override the test in rabbitmq-server's Makefile when it does the -# make -C, which causes problems whenever the test here and the test -# there compare system_info(version) against *different* eshell -# version numbers. -USE_SPECS:=$(shell erl -noshell -eval 'io:format([list_to_integer(X) || X <- string:tokens(erlang:system_info(version), ".")] >= [5,7,2]), halt().') -endif - -ERLC_OPTS=-I $(INCLUDE_DIR) -pa $(EBIN_DIR) -o $(EBIN_DIR) -Wall -v +debug_info $(if $(filter true,$(USE_SPECS)),-Duse_specs) - -RABBITMQ_NODENAME=rabbit -PA_LOAD_PATH=-pa $(realpath $(LOAD_PATH)) -RABBITMQCTL=$(BROKER_DIR)/scripts/rabbitmqctl - -ifdef SSL_CERTS_DIR -SSL := true -ALL_SSL := { $(MAKE) test_ssl || OK=false; } -ALL_SSL_COVERAGE := { $(MAKE) test_ssl_coverage || OK=false; } -SSL_BROKER_ARGS := -rabbit ssl_listeners [{\\\"0.0.0.0\\\",5671},{\\\"::1\\\",5671}] \ - -rabbit ssl_options [{cacertfile,\\\"$(SSL_CERTS_DIR)/testca/cacert.pem\\\"},{certfile,\\\"$(SSL_CERTS_DIR)/server/cert.pem\\\"},{keyfile,\\\"$(SSL_CERTS_DIR)/server/key.pem\\\"},{verify,verify_peer},{fail_if_no_peer_cert,true}] -SSL_CLIENT_ARGS := -erlang_client_ssl_dir $(SSL_CERTS_DIR) -else -SSL := @echo No SSL_CERTS_DIR defined. && false -ALL_SSL := true -ALL_SSL_COVERAGE := true -SSL_BROKER_ARGS := -SSL_CLIENT_ARGS := -endif - -# Versions prior to this are not supported -NEED_MAKE := 3.80 -ifneq "$(NEED_MAKE)" "$(firstword $(sort $(NEED_MAKE) $(MAKE_VERSION)))" -$(error Versions of make prior to $(NEED_MAKE) are not supported) -endif - -# .DEFAULT_GOAL introduced in 3.81 -DEFAULT_GOAL_MAKE := 3.81 -ifneq "$(DEFAULT_GOAL_MAKE)" "$(firstword $(sort $(DEFAULT_GOAL_MAKE) $(MAKE_VERSION)))" -.DEFAULT_GOAL=all -endif - -all: package - -common_clean: - rm -f $(EBIN_DIR)/*.beam - rm -f erl_crash.dump - rm -rf $(DEPS_DIR) - rm -rf $(DOC_DIR) - rm -f $(DEPS_FILE) - $(MAKE) -C $(TEST_DIR) clean - -compile: $(TARGETS) $(EBIN_DIR)/$(PACKAGE).app - -run: compile - $(RUN) - -############################################################################### -## Packaging -############################################################################### - -$(DIST_DIR)/$(PACKAGE_NAME_EZ): $(TARGETS) $(EBIN_DIR)/$(PACKAGE).app | $(DIST_DIR) - rm -f $@ - rm -rf $(DIST_DIR)/$(PACKAGE_DIR) - mkdir -p $(DIST_DIR)/$(PACKAGE_DIR)/$(EBIN_DIR) - mkdir -p $(DIST_DIR)/$(PACKAGE_DIR)/$(INCLUDE_DIR) - cp -r $(EBIN_DIR)/*.beam $(DIST_DIR)/$(PACKAGE_DIR)/$(EBIN_DIR) - cp -r $(EBIN_DIR)/*.app $(DIST_DIR)/$(PACKAGE_DIR)/$(EBIN_DIR) - mkdir -p $(DIST_DIR)/$(PACKAGE_DIR)/$(INCLUDE_DIR) - cp -r $(INCLUDE_DIR)/* $(DIST_DIR)/$(PACKAGE_DIR)/$(INCLUDE_DIR) - (cd $(DIST_DIR); zip -q -r $(PACKAGE_NAME_EZ) $(PACKAGE_DIR)) - -package: $(DIST_DIR)/$(PACKAGE_NAME_EZ) - -############################################################################### -## Internal targets -############################################################################### - -$(DEPS_DIR)/$(COMMON_PACKAGE_DIR): $(DIST_DIR)/$(COMMON_PACKAGE_EZ) | $(DEPS_DIR) - rm -rf $(DEPS_DIR)/$(COMMON_PACKAGE_DIR) - mkdir -p $(DEPS_DIR)/$(COMMON_PACKAGE_DIR) - unzip -q -o $< -d $(DEPS_DIR) - -$(DEPS_FILE): $(SOURCES) $(INCLUDES) - rm -f $@ - echo $(subst : ,:,$(foreach FILE,$^,$(FILE):)) | escript $(BROKER_DIR)/generate_deps $@ $(EBIN_DIR) - -$(EBIN_DIR)/%.beam: $(SOURCE_DIR)/%.erl $(INCLUDES) $(DEPS_DIR)/$(COMMON_PACKAGE_DIR) | $(DEPS_FILE) - $(LIBS_PATH) erlc $(ERLC_OPTS) $< - -$(DEPS_DIR): - mkdir -p $@ - -# Note that all targets which depend on clean must have clean in their -# name. Also any target that doesn't depend on clean should not have -# clean in its name, unless you know that you don't need any of the -# automatic dependency generation for that target. - -# We want to load the dep file if *any* target *doesn't* contain -# "clean" - i.e. if removing all clean-like targets leaves something - -ifeq "$(MAKECMDGOALS)" "" -TESTABLEGOALS:=$(.DEFAULT_GOAL) -else -TESTABLEGOALS:=$(MAKECMDGOALS) -endif - -ifneq "$(strip $(patsubst clean%,,$(patsubst %clean,,$(TESTABLEGOALS))))" "" --include $(DEPS_FILE) -endif diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/rabbit_common.app.in b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/rabbit_common.app.in deleted file mode 100644 index 930e232..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/rabbit_common.app.in +++ /dev/null @@ -1,46 +0,0 @@ -{application, rabbit_common, - [{description, "RabbitMQ Common Libraries"}, - {vsn, "%%VSN%%"}, - {modules, [ - app_utils, - credit_flow, - pmon, - gen_server2, - mirrored_supervisor, - mochijson2, - priority_queue, - rabbit_backing_queue, - rabbit_basic, - rabbit_binary_generator, - rabbit_binary_parser, - rabbit_channel, - rabbit_channel_interceptor, - rabbit_runtime_parameter, - rabbit_command_assembler, - rabbit_exchange_type, - rabbit_exchange_decorator, - rabbit_authn_backend, - rabbit_authz_backend, - rabbit_auth_mechanism, - rabbit_framing_amqp_0_8, - rabbit_framing_amqp_0_9_1, - rabbit_heartbeat, - rabbit_misc, - rabbit_msg_store_index, - rabbit_net, - rabbit_networking, - rabbit_nodes, - rabbit_policy_validator, - rabbit_reader, - rabbit_writer, - rabbit_event, - rabbit_queue_collector, - rabbit_queue_decorator, - rabbit_amqqueue, - ssl_compat, - supervisor2, - time_compat - ]}, - {registered, []}, - {env, []}, - {applications, [kernel, stdlib]}]}. diff --git a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test.mk b/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test.mk deleted file mode 100644 index ae3057f..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-erlang-client/test.mk +++ /dev/null @@ -1,130 +0,0 @@ -# The contents of this file are subject to the Mozilla Public License -# Version 1.1 (the "License"); you may not use this file except in -# compliance with the License. You may obtain a copy of the License at -# http://www.mozilla.org/MPL/ -# -# Software distributed under the License is distributed on an "AS IS" -# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the -# License for the specific language governing rights and limitations -# under the License. -# -# The Original Code is RabbitMQ. -# -# The Initial Developer of the Original Code is GoPivotal, Inc. -# Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. -# - -IS_SUCCESS:=egrep "(All .+ tests (successful|passed).|Test passed.)" -TESTING_MESSAGE:=-eval "error_logger:tty(false), error_logger:logfile({open, \"$(TMPDIR)/erlang-client-tests.log\"}), io:format(\"~nTesting in progress. Please wait...~n~n\")." - -prepare_tests: compile compile_tests - -all_tests: prepare_tests - OK=true && \ - { $(MAKE) test_suites || OK=false; } && \ - { $(MAKE) test_common_package || OK=false; } && \ - { $(MAKE) test_direct || OK=false; } && \ - $$OK - -test_suites: prepare_tests - OK=true && \ - { $(MAKE) test_network || OK=false; } && \ - { $(MAKE) test_remote_direct || OK=false; } && \ - $(ALL_SSL) && \ - $$OK - -test_suites_coverage: prepare_tests - OK=true && \ - { $(MAKE) test_network_coverage || OK=false; } && \ - { $(MAKE) test_direct_coverage || OK=false; } && \ - $(ALL_SSL_COVERAGE) && \ - $$OK - -## Starts a broker, configures users and runs the tests on the same node -run_test_in_broker: - $(MAKE) start_test_broker_node - $(MAKE) unboot_broker - OK=true && \ - TMPFILE=$(MKTEMP) && echo "Redirecting output to $$TMPFILE" && \ - { $(MAKE) -C $(BROKER_DIR) run-node \ - RABBITMQ_SERVER_START_ARGS="$(PA_LOAD_PATH) $(SSL_BROKER_ARGS) \ - -noshell -s rabbit $(RUN_TEST_ARGS) -s init stop" 2>&1 | \ - tee $$TMPFILE || OK=false; } && \ - { $(IS_SUCCESS) $$TMPFILE || OK=false; } && \ - rm $$TMPFILE && \ - $(MAKE) boot_broker && \ - $(MAKE) stop_test_broker_node && \ - $$OK - -## Starts a broker, configures users and runs the tests from a different node -run_test_detached: start_test_broker_node - OK=true && \ - TMPFILE=$(MKTEMP) && echo "Redirecting output to $$TMPFILE" && \ - { $(RUN) -noinput $(TESTING_MESSAGE) \ - $(SSL_CLIENT_ARGS) $(RUN_TEST_ARGS) \ - -s init stop 2>&1 | tee $$TMPFILE || OK=false; } && \ - { $(IS_SUCCESS) $$TMPFILE || OK=false; } && \ - rm $$TMPFILE && \ - $(MAKE) stop_test_broker_node && \ - $$OK - -## Starts a broker, configures users and runs the tests from a different node -run_test_foreground: start_test_broker_node - OK=true && \ - { $(RUN) -noinput $(TESTING_MESSAGE) \ - $(SSL_CLIENT_ARGS) $(RUN_TEST_ARGS) \ - -s init stop || OK=false; } && \ - $(MAKE) stop_test_broker_node && \ - $$OK - -start_test_broker_node: boot_broker - sleep 1 - - $(RABBITMQCTL) delete_user test_user_no_perm - $(RABBITMQCTL) add_user test_user_no_perm test_user_no_perm - sleep 1 - -stop_test_broker_node: - sleep 1 - $(RABBITMQCTL) delete_user test_user_no_perm - $(MAKE) unboot_broker - -boot_broker: - $(MAKE) -C $(BROKER_DIR) start-background-node RABBITMQ_SERVER_START_ARGS="$(RABBITMQ_SERVER_START_ARGS) $(SSL_BROKER_ARGS)" - $(MAKE) -C $(BROKER_DIR) start-rabbit-on-node - -unboot_broker: - $(MAKE) -C $(BROKER_DIR) stop-rabbit-on-node - $(MAKE) -C $(BROKER_DIR) stop-node - -ssl: - $(SSL) - -test_ssl: prepare_tests ssl - $(MAKE) run_test_detached AMQP_CLIENT_TEST_CONNECTION_TYPE="network_ssl" RUN_TEST_ARGS="-s amqp_client_SUITE test" - -test_network: prepare_tests - $(MAKE) run_test_detached AMQP_CLIENT_TEST_CONNECTION_TYPE="network" RUN_TEST_ARGS="-s amqp_client_SUITE test" - -test_direct: prepare_tests - $(MAKE) run_test_in_broker AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" RUN_TEST_ARGS="-s amqp_client_SUITE test" - -test_remote_direct: prepare_tests - $(MAKE) run_test_detached AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" RUN_TEST_ARGS="-s amqp_client_SUITE test" - -test_common_package: $(DIST_DIR)/$(COMMON_PACKAGE_EZ) package prepare_tests - $(MAKE) run_test_detached RUN="$(LIBS_PATH) erl -pa $(TEST_DIR)" \ - AMQP_CLIENT_TEST_CONNECTION_TYPE="network" RUN_TEST_ARGS="-s amqp_client_SUITE test" - $(MAKE) run_test_detached RUN="$(LIBS_PATH) erl -pa $(TEST_DIR) -sname amqp_client" \ - AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" RUN_TEST_ARGS="-s amqp_client_SUITE test" - -test_ssl_coverage: prepare_tests ssl - $(MAKE) run_test_detached AMQP_CLIENT_TEST_CONNECTION_TYPE="network_ssl" RUN_TEST_ARGS="-s amqp_client_SUITE test_coverage" - -test_network_coverage: prepare_tests - $(MAKE) run_test_detached AMQP_CLIENT_TEST_CONNECTION_TYPE="network" RUN_TEST_ARGS="-s amqp_client_SUITE test_coverage" - -test_remote_direct_coverage: prepare_tests - $(MAKE) run_test_detached AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" RUN_TEST_ARGS="-s amqp_client_SUITE test_coverage" - -test_direct_coverage: prepare_tests - $(MAKE) run_test_in_broker AMQP_CLIENT_TEST_CONNECTION_TYPE="direct" RUN_TEST_ARGS="-s amqp_client_SUITE test_coverage" diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-federation-management/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/Makefile b/rabbitmq-server/plugins-src/rabbitmq-federation-management/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-federation-management/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/README b/rabbitmq-server/plugins-src/rabbitmq-federation-management/README deleted file mode 100644 index a80613d..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-federation-management/README +++ /dev/null @@ -1,8 +0,0 @@ -Adds information on federation link status to the management -plugin. Build it like any other plugin. - -If you have a heterogenous cluster (where the nodes have different -plugins installed), this should be installed on the same nodes as the -management plugin. - -The HTTP API is very simple: GET /api/federation-links. diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation-management/package.mk b/rabbitmq-server/plugins-src/rabbitmq-federation-management/package.mk deleted file mode 100644 index 2a0757c..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-federation-management/package.mk +++ /dev/null @@ -1,7 +0,0 @@ -RELEASABLE:=true -DEPS:=rabbitmq-management - -CONSTRUCT_APP_PREREQS:=$(shell find $(PACKAGE_DIR)/priv -type f) -define construct_app_commands - cp -r $(PACKAGE_DIR)/priv $(APP_DIR) -endef diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-federation/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/Makefile b/rabbitmq-server/plugins-src/rabbitmq-federation/Makefile deleted file mode 100644 index b8305a2..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/Makefile +++ /dev/null @@ -1,2 +0,0 @@ -CHAIN_TESTS=true -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/etc/setup-rabbit-test.sh b/rabbitmq-server/plugins-src/rabbitmq-federation/etc/setup-rabbit-test.sh deleted file mode 100755 index 9b2708a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/etc/setup-rabbit-test.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -e -sh -e `dirname $0`/rabbit-test.sh "`dirname $0`/../../rabbitmq-server/scripts/rabbitmqctl -n rabbit-test" diff --git a/rabbitmq-server/plugins-src/rabbitmq-federation/package.mk b/rabbitmq-server/plugins-src/rabbitmq-federation/package.mk deleted file mode 100644 index a4c040f..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-federation/package.mk +++ /dev/null @@ -1,15 +0,0 @@ -RELEASABLE:=true -DEPS:=rabbitmq-erlang-client rabbitmq-test -FILTER:=all -COVER:=false -WITH_BROKER_TEST_COMMANDS:=rabbit_test_runner:run_in_broker(\"$(PACKAGE_DIR)/test/ebin\",\"$(FILTER)\") -WITH_BROKER_SETUP_SCRIPTS:=$(PACKAGE_DIR)/etc/setup-rabbit-test.sh -STANDALONE_TEST_COMMANDS:=rabbit_test_runner:run_multi(\"$(UMBRELLA_BASE_DIR)/rabbitmq-server\",\"$(PACKAGE_DIR)/test/ebin\",\"$(FILTER)\",$(COVER),\"/tmp/rabbitmq-multi-node/plugins\") - -# NB: we cannot use PACKAGE_DIR in the body of this rule as it gets -# expanded at the wrong time and set to the value of a completely -# arbitrary package! -$(PACKAGE_DIR)+pre-test:: $(PACKAGE_DIR)+dist - rm -rf /tmp/rabbitmq-multi-node/plugins - mkdir -p /tmp/rabbitmq-multi-node/plugins/plugins - cp -p $(UMBRELLA_BASE_DIR)/rabbitmq-federation/dist/*.ez /tmp/rabbitmq-multi-node/plugins/plugins diff --git a/rabbitmq-server/plugins-src/rabbitmq-management-agent/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-management-agent/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-management-agent/Makefile b/rabbitmq-server/plugins-src/rabbitmq-management-agent/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-management-agent/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-management-agent/package.mk b/rabbitmq-server/plugins-src/rabbitmq-management-agent/package.mk deleted file mode 100644 index 702019b..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-management-agent/package.mk +++ /dev/null @@ -1 +0,0 @@ -DEPS:=rabbitmq-erlang-client diff --git a/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/Makefile b/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/package.mk b/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/package.mk deleted file mode 100644 index 2a0757c..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-management-visualiser/package.mk +++ /dev/null @@ -1,7 +0,0 @@ -RELEASABLE:=true -DEPS:=rabbitmq-management - -CONSTRUCT_APP_PREREQS:=$(shell find $(PACKAGE_DIR)/priv -type f) -define construct_app_commands - cp -r $(PACKAGE_DIR)/priv $(APP_DIR) -endef diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-management/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/.travis.yml b/rabbitmq-server/plugins-src/rabbitmq-management/.travis.yml deleted file mode 100644 index 2d93510..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-management/.travis.yml +++ /dev/null @@ -1,33 +0,0 @@ -sudo: false -language: erlang -notifications: - email: - - alerts@rabbitmq.com -addons: - apt: - packages: - - xsltproc - - python3 -otp_release: - - "R16B03-1" - - "17.5" - - "18.0" -install: - - if [ ! -d "$HOME/rabbitmq-public-umbrella/.git" ]; then git clone https://github.com/rabbitmq/rabbitmq-public-umbrella.git $HOME/rabbitmq-public-umbrella; fi - - cd $HOME/rabbitmq-public-umbrella - - make co - - make up -before_script: - - IFS="/" read -a PARTS <<< "$TRAVIS_REPO_SLUG" - - export TEST_DIR=$HOME/rabbitmq-public-umbrella/${PARTS[1]} - - rm -rf ${TEST_DIR} - - cp -r ${TRAVIS_BUILD_DIR} ${TEST_DIR} - - cd ${TEST_DIR} -script: make test -before_cache: - - rm -rf ${TEST_DIR} - - cd $HOME -cache: - apt: true - directories: - - $HOME/rabbitmq-public-umbrella diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/Makefile b/rabbitmq-server/plugins-src/rabbitmq-management/Makefile deleted file mode 100644 index 559ffc8..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-management/Makefile +++ /dev/null @@ -1,28 +0,0 @@ -include ../umbrella.mk - -RABBITMQCTL=../rabbitmq-server/scripts/rabbitmqctl -TEST_TMPDIR=$(TMPDIR)/rabbitmq-test -OTHER_NODE=undefined -OTHER_PORT=undefined - -start-other-node: - rm -f $(TEST_TMPDIR)/rabbitmq-$(OTHER_NODE)-pid - RABBITMQ_MNESIA_BASE=$(TEST_TMPDIR)/rabbitmq-$(OTHER_NODE)-mnesia \ - RABBITMQ_PID_FILE=$(TEST_TMPDIR)/rabbitmq-$(OTHER_NODE)-pid \ - RABBITMQ_LOG_BASE=$(TEST_TMPDIR)/log \ - RABBITMQ_NODENAME=$(OTHER_NODE) \ - RABBITMQ_NODE_PORT=$(OTHER_PORT) \ - RABBITMQ_CONFIG_FILE=etc/$(OTHER_NODE) \ - RABBITMQ_PLUGINS_DIR=$(TEST_TMPDIR)/plugins \ - RABBITMQ_PLUGINS_EXPAND_DIR=$(TEST_TMPDIR)/$(OTHER_NODE)-plugins-expand \ - ../rabbitmq-server/scripts/rabbitmq-server >/tmp/$(OTHER_NODE).out 2>/tmp/$(OTHER_NODE).err & - $(RABBITMQCTL) -n $(OTHER_NODE) wait $(TEST_TMPDIR)/rabbitmq-$(OTHER_NODE)-pid - -cluster-other-node: - $(RABBITMQCTL) -n $(OTHER_NODE) stop_app - $(RABBITMQCTL) -n $(OTHER_NODE) reset - $(RABBITMQCTL) -n $(OTHER_NODE) join_cluster rabbit-test@`hostname -s` - $(RABBITMQCTL) -n $(OTHER_NODE) start_app - -stop-other-node: - $(RABBITMQCTL) -n $(OTHER_NODE) stop diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/README b/rabbitmq-server/plugins-src/rabbitmq-management/README deleted file mode 100644 index 458a63c..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-management/README +++ /dev/null @@ -1,12 +0,0 @@ -Generic build instructions are at: - http://www.rabbitmq.com/plugin-development.html - -When installed, point your broswer at: - -http://:15672/ - -and log in with AMQP credentials (guest/guest by default). - -Documentation for the HTTP API can be found at - -http://:15672/api/ diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/package.mk b/rabbitmq-server/plugins-src/rabbitmq-management/package.mk deleted file mode 100644 index 3d0817a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-management/package.mk +++ /dev/null @@ -1,25 +0,0 @@ -RELEASABLE:=true -DEPS:=rabbitmq-web-dispatch webmachine-wrapper rabbitmq-server rabbitmq-erlang-client rabbitmq-management-agent rabbitmq-test -FILTER:=all -COVER:=false -WITH_BROKER_TEST_COMMANDS:=rabbit_test_runner:run_in_broker(\"$(PACKAGE_DIR)/test/ebin\",\"$(FILTER)\") -WITH_BROKER_TEST_CONFIG:=$(PACKAGE_DIR)/etc/rabbit-test -STANDALONE_TEST_COMMANDS:=rabbit_test_runner:run_multi(\"$(UMBRELLA_BASE_DIR)/rabbitmq-server\",\"$(PACKAGE_DIR)/test/ebin\",\"$(FILTER)\",$(COVER),\"/tmp/rabbitmq-multi-node/plugins\") -WITH_BROKER_TEST_SCRIPTS:=$(PACKAGE_DIR)/test/src/rabbitmqadmin-test-wrapper.sh - -CONSTRUCT_APP_PREREQS:=$(shell find $(PACKAGE_DIR)/priv -type f) $(PACKAGE_DIR)/bin/rabbitmqadmin -define construct_app_commands - cp -r $(PACKAGE_DIR)/priv $(APP_DIR) - sed 's/%%VSN%%/$(VERSION)/' $(PACKAGE_DIR)/bin/rabbitmqadmin > $(APP_DIR)/priv/www/cli/rabbitmqadmin -endef - -# The tests require erlang/OTP R14 (httpc issue) -$(PACKAGE_DIR)+pre-test:: - if [ "`erl -noshell -eval 'io:format([list_to_integer(X) || X <- string:tokens(erlang:system_info(version), ".")] >= [5,8]), halt().'`" != true ] ; then \ - echo "Need Erlang/OTP R14A or higher" ; \ - exit 1 ; \ - fi - rm -rf /tmp/rabbitmq-multi-node/plugins - mkdir -p /tmp/rabbitmq-multi-node/plugins/plugins - cp -p $(UMBRELLA_BASE_DIR)/rabbitmq-management/dist/*.ez /tmp/rabbitmq-multi-node/plugins/plugins - diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/sammy-0.6.0.min.js b/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/sammy-0.6.0.min.js deleted file mode 100644 index 9733f01..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/sammy-0.6.0.min.js +++ /dev/null @@ -1,5 +0,0 @@ -// -- Sammy -- /sammy.js -// http://code.quirkey.com/sammy -// Version: 0.6.0 -// Built: Wed Sep 01 23:12:46 -0700 2010 -(function(g){var m,f="([^/]+)",i=/:([\w\d]+)/g,j=/\?([^#]*)$/,b=function(n){return Array.prototype.slice.call(n)},c=function(n){return Object.prototype.toString.call(n)==="[object Function]"},k=function(n){return Object.prototype.toString.call(n)==="[object Array]"},h=decodeURIComponent,e=function(n){return n.replace(/&/g,"&").replace(//g,">")},l=function(n){return function(o,p){return this.route.apply(this,[n,o,p])}},a={},d=[];m=function(){var o=b(arguments),p,n;m.apps=m.apps||{};if(o.length===0||o[0]&&c(o[0])){return m.apply(m,["body"].concat(o))}else{if(typeof(n=o.shift())=="string"){p=m.apps[n]||new m.Application();p.element_selector=n;if(o.length>0){g.each(o,function(q,r){p.use(r)})}if(p.element_selector!=n){delete m.apps[n]}m.apps[p.element_selector]=p;return p}}};m.VERSION="0.6.0";m.addLogger=function(n){d.push(n)};m.log=function(){var n=b(arguments);n.unshift("["+Date()+"]");g.each(d,function(p,o){o.apply(m,n)})};if(typeof window.console!="undefined"){if(c(console.log.apply)){m.addLogger(function(){window.console.log.apply(console,arguments)})}else{m.addLogger(function(){window.console.log(arguments)})}}else{if(typeof console!="undefined"){m.addLogger(function(){console.log.apply(console,arguments)})}}g.extend(m,{makeArray:b,isFunction:c,isArray:k});m.Object=function(n){return g.extend(this,n||{})};g.extend(m.Object.prototype,{escapeHTML:e,h:e,toHash:function(){var n={};g.each(this,function(p,o){if(!c(o)){n[p]=o}});return n},toHTML:function(){var n="";g.each(this,function(p,o){if(!c(o)){n+=""+p+" "+o+"
"}});return n},keys:function(n){var o=[];for(var p in this){if(!c(this[p])||!n){o.push(p)}}return o},has:function(n){return this[n]&&g.trim(this[n].toString())!=""},join:function(){var o=b(arguments);var n=o.shift();return o.join(n)},log:function(){m.log.apply(m,arguments)},toString:function(n){var o=[];g.each(this,function(q,p){if(!c(p)||n){o.push('"'+q+'": '+p.toString())}});return"Sammy.Object: {"+o.join(",")+"}"}});m.HashLocationProxy=function(o,n){this.app=o;this.is_native=false;this._startPolling(n)};m.HashLocationProxy.prototype={bind:function(){var n=this,o=this.app;g(window).bind("hashchange."+this.app.eventNamespace(),function(q,p){if(n.is_native===false&&!p){m.log("native hash change exists, using");n.is_native=true;clearInterval(m.HashLocationProxy._interval)}o.trigger("location-changed")});if(!m.HashLocationProxy._bindings){m.HashLocationProxy._bindings=0}m.HashLocationProxy._bindings++},unbind:function(){g(window).unbind("hashchange."+this.app.eventNamespace());m.HashLocationProxy._bindings--;if(m.HashLocationProxy._bindings<=0){clearInterval(m.HashLocationProxy._interval)}},getLocation:function(){var n=window.location.toString().match(/^[^#]*(#.+)$/);return n?n[1]:""},setLocation:function(n){return(window.location=n)},_startPolling:function(p){var o=this;if(!m.HashLocationProxy._interval){if(!p){p=10}var n=function(){current_location=o.getLocation();if(!m.HashLocationProxy._last_location||current_location!=m.HashLocationProxy._last_location){setTimeout(function(){g(window).trigger("hashchange",[true])},13)}m.HashLocationProxy._last_location=current_location};n();m.HashLocationProxy._interval=setInterval(n,p)}}};m.Application=function(n){var o=this;this.routes={};this.listeners=new m.Object({});this.arounds=[];this.befores=[];this.namespace=(new Date()).getTime()+"-"+parseInt(Math.random()*1000,10);this.context_prototype=function(){m.EventContext.apply(this,arguments)};this.context_prototype.prototype=new m.EventContext();if(c(n)){n.apply(this,[this])}if(!this._location_proxy){this.setLocationProxy(new m.HashLocationProxy(this,this.run_interval_every))}if(this.debug){this.bindToAllEvents(function(q,p){o.log(o.toString(),q.cleaned_type,p||{})})}};m.Application.prototype=g.extend({},m.Object.prototype,{ROUTE_VERBS:["get","post","put","delete"],APP_EVENTS:["run","unload","lookup-route","run-route","route-found","event-context-before","event-context-after","changed","error","check-form-submission","redirect"],_last_route:null,_location_proxy:null,_running:false,element_selector:"body",debug:false,raise_errors:false,run_interval_every:50,template_engine:null,toString:function(){return"Sammy.Application:"+this.element_selector},$element:function(){return g(this.element_selector)},use:function(){var n=b(arguments),p=n.shift(),o=p||"";try{n.unshift(this);if(typeof p=="string"){o="Sammy."+p;p=m[p]}p.apply(this,n)}catch(q){if(typeof p==="undefined"){this.error("Plugin Error: called use() but plugin ("+o.toString()+") is not defined",q)}else{if(!c(p)){this.error("Plugin Error: called use() but '"+o.toString()+"' is not a function",q)}else{this.error("Plugin Error",q)}}}return this},setLocationProxy:function(n){var o=this._location_proxy;this._location_proxy=n;if(this.isRunning()){if(o){o.unbind()}this._location_proxy.bind()}},route:function(q,o,s){var p=this,r=[],n;if(!s&&c(o)){o=q;s=o;q="any"}q=q.toLowerCase();if(o.constructor==String){i.lastIndex=0;while((path_match=i.exec(o))!==null){r.push(path_match[1])}o=new RegExp("^"+o.replace(i,f)+"$")}if(typeof s=="string"){s=p[s]}n=function(t){var u={verb:t,path:o,callback:s,param_names:r};p.routes[t]=p.routes[t]||[];p.routes[t].push(u)};if(q==="any"){g.each(this.ROUTE_VERBS,function(u,t){n(t)})}else{n(q)}return this},get:l("get"),post:l("post"),put:l("put"),del:l("delete"),any:l("any"),mapRoutes:function(o){var n=this;g.each(o,function(p,q){n.route.apply(n,q)});return this},eventNamespace:function(){return["sammy-app",this.namespace].join("-")},bind:function(n,p,r){var q=this;if(typeof r=="undefined"){r=p}var o=function(){var u,s,t;u=arguments[0];t=arguments[1];if(t&&t.context){s=t.context;delete t.context}else{s=new q.context_prototype(q,"bind",u.type,t,u.target)}u.cleaned_type=u.type.replace(q.eventNamespace(),"");r.apply(s,[u,t])};if(!this.listeners[n]){this.listeners[n]=[]}this.listeners[n].push(o);if(this.isRunning()){this._listen(n,o)}return this},trigger:function(n,o){this.$element().trigger([n,this.eventNamespace()].join("."),[o]);return this},refresh:function(){this.last_location=null;this.trigger("location-changed");return this},before:function(n,o){if(c(n)){o=n;n={}}this.befores.push([n,o]);return this},after:function(n){return this.bind("event-context-after",n)},around:function(n){this.arounds.push(n);return this},isRunning:function(){return this._running},helpers:function(n){g.extend(this.context_prototype.prototype,n);return this},helper:function(n,o){this.context_prototype.prototype[n]=o;return this},run:function(n){if(this.isRunning()){return false}var o=this;g.each(this.listeners.toHash(),function(p,q){g.each(q,function(s,r){o._listen(p,r)})});this.trigger("run",{start_url:n});this._running=true;this.last_location=null;if(this.getLocation()==""&&typeof n!="undefined"){this.setLocation(n)}this._checkLocation();this._location_proxy.bind();this.bind("location-changed",function(){o._checkLocation()});this.bind("submit",function(q){var p=o._checkFormSubmission(g(q.target).closest("form"));return(p===false)?q.preventDefault():false});g(window).bind("beforeunload",function(){o.unload()});return this.trigger("changed")},unload:function(){if(!this.isRunning()){return false}var n=this;this.trigger("unload");this._location_proxy.unbind();this.$element().unbind("submit").removeClass(n.eventNamespace());g.each(this.listeners.toHash(),function(o,p){g.each(p,function(r,q){n._unlisten(o,q)})});this._running=false;return this},bindToAllEvents:function(o){var n=this;g.each(this.APP_EVENTS,function(p,q){n.bind(q,o)});g.each(this.listeners.keys(true),function(q,p){if(n.APP_EVENTS.indexOf(p)==-1){n.bind(p,o)}});return this},routablePath:function(n){return n.replace(j,"")},lookupRoute:function(q,o){var p=this,n=false;this.trigger("lookup-route",{verb:q,path:o});if(typeof this.routes[q]!="undefined"){g.each(this.routes[q],function(s,r){if(p.routablePath(o).match(r.path)){n=r;return false}})}return n},runRoute:function(p,B,r,u){var q=this,z=this.lookupRoute(p,B),o,x,s,w,A,y,v,n;this.log("runRoute",[p,B].join(" "));this.trigger("run-route",{verb:p,path:B,params:r});if(typeof r=="undefined"){r={}}g.extend(r,this._parseQueryString(B));if(z){this.trigger("route-found",{route:z});if((path_params=z.path.exec(this.routablePath(B)))!==null){path_params.shift();g.each(path_params,function(C,D){if(z.param_names[C]){r[z.param_names[C]]=h(D)}else{if(!r.splat){r.splat=[]}r.splat.push(h(D))}})}o=new this.context_prototype(this,p,B,r,u);s=this.arounds.slice(0);A=this.befores.slice(0);v=[o].concat(r.splat);x=function(){var C;while(A.length>0){y=A.shift();if(q.contextMatchesOptions(o,y[0])){C=y[1].apply(o,[o]);if(C===false){return false}}}q.last_route=z;o.trigger("event-context-before",{context:o});C=z.callback.apply(o,v);o.trigger("event-context-after",{context:o});return C};g.each(s.reverse(),function(C,D){var E=x;x=function(){return D.apply(o,[E])}});try{n=x()}catch(t){this.error(["500 Error",p,B].join(" "),t)}return n}else{return this.notFound(p,B)}},contextMatchesOptions:function(q,s,o){var p=s;if(typeof p==="undefined"||p=={}){return true}if(typeof o==="undefined"){o=true}if(typeof p==="string"||c(p.test)){p={path:p}}if(p.only){return this.contextMatchesOptions(q,p.only,true)}else{if(p.except){return this.contextMatchesOptions(q,p.except,false)}}var n=true,r=true;if(p.path){if(c(p.path.test)){n=p.path.test(q.path)}else{n=(p.path.toString()===q.path)}}if(p.verb){r=p.verb===q.verb}return o?(r&&n):!(r&&n)},getLocation:function(){return this._location_proxy.getLocation()},setLocation:function(n){return this._location_proxy.setLocation(n)},swap:function(n){return this.$element().html(n)},templateCache:function(n,o){if(typeof o!="undefined"){return a[n]=o}else{return a[n]}},notFound:function(p,o){var n=this.error(["404 Not Found",p,o].join(" "));return(p==="get")?n:true},error:function(o,n){if(!n){n=new Error()}n.message=[o,n.message].join(" ");this.trigger("error",{message:n.message,error:n});if(this.raise_errors){throw (n)}else{this.log(n.message,n)}},_checkLocation:function(){var n,o;n=this.getLocation();if(n!=this.last_location){this.last_location=n;o=this.runRoute("get",n)}return o},_getFormVerb:function(o){var n=g(o),p;$_method=n.find('input[name="_method"]');if($_method.length>0){p=$_method.val()}if(!p){p=n[0].getAttribute("method")}return g.trim(p.toString().toLowerCase())},_checkFormSubmission:function(p){var n,q,s,r,o;this.trigger("check-form-submission",{form:p});n=g(p);q=n.attr("action");s=this._getFormVerb(n);if(!s||s==""){s="get"}this.log("_checkFormSubmission",n,q,s);if(s==="get"){this.setLocation(q+"?"+n.serialize());o=false}else{r=g.extend({},this._parseFormParams(n));o=this.runRoute(s,q,r,p.get(0))}return(typeof o=="undefined")?false:o},_parseFormParams:function(n){var q={},p=n.serializeArray(),o;for(o=0;o0){this.then(this.callbacks.shift())}},load:function(n,o,q){var p=this;return this.then(function(){var r,s;if(c(o)){q=o;o={}}else{o=g.extend({},o)}if(q){this.then(q)}if(typeof n==="string"){r=!(o.cache===false);delete o.cache;if(o.engine){p.next_engine=o.engine;delete o.engine}if(r&&(s=this.event_context.app.templateCache(n))){return s}this.wait();g.ajax(g.extend({url:n,data:{},type:"get",success:function(t){if(r){p.event_context.app.templateCache(n,t)}p.next(t)}},o));return false}else{if(n.nodeType){return n.innerHTML}if(n.selector){p.next_engine=n.attr("data-engine");if(o.clone===false){return n.remove()[0].innerHTML.toString()}else{return n[0].innerHTML.toString()}}}})},render:function(n,o,p){if(c(n)&&!o){return this.then(n)}else{return this.load(n).interpolate(o,n).then(p)}},collect:function(p,o){var n=this;return this.then(function(){var q="";g.each(p,function(r,t){var s=o.apply(n,[r,t]);q+=s;return s});return q})},renderEach:function(n,o,p,q){if(k(o)){q=p;p=o;o=null}if(!p&&k(this.content)){p=this.content}return this.load(n).collect(p,function(r,s){var t={};o?(t[o]=s):(t=s);return this.event_context.interpolate(this.content,t,n)})},interpolate:function(q,p,n){var o=this;return this.then(function(s,r){if(this.next_engine){p=this.next_engine;this.next_engine=false}var t=o.event_context.interpolate(s,q,p);return n?r+t:t})},swap:function(){return this.then(function(n){this.event_context.swap(n)}).trigger("changed",{})},appendTo:function(n){return this.then(function(o){g(n).append(o)}).trigger("changed",{})},prependTo:function(n){return this.then(function(o){g(n).prepend(o)}).trigger("changed",{})},replace:function(n){return this.then(function(o){g(n).html(o)}).trigger("changed",{})},trigger:function(n,o){return this.then(function(p){if(typeof o=="undefined"){o={content:p}}this.event_context.trigger(n,o)})}});m.EventContext=function(r,q,o,p,n){this.app=r;this.verb=q;this.path=o;this.params=new m.Object(p);this.target=n};m.EventContext.prototype=g.extend({},m.Object.prototype,{$element:function(){return this.app.$element()},engineFor:function(p){var o=this,n;if(c(p)){return p}p=p.toString();if((n=p.match(/\.([^\.]+)$/))){p=n[1]}if(p&&c(o[p])){return o[p]}if(o.app.template_engine){return this.engineFor(o.app.template_engine)}return function(q,r){return q}},interpolate:function(o,p,n){return this.engineFor(n).apply(this,[o,p])},render:function(n,o,p){return new m.RenderContext(this).render(n,o,p)},load:function(n,o,p){return new m.RenderContext(this).load(n,o,p)},partial:function(n,o){return this.render(n,o).swap()},redirect:function(){var p,o=b(arguments),n=this.app.getLocation();if(o.length>1){o.unshift("/");p=this.join.apply(this,o)}else{p=o[0]}this.trigger("redirect",{to:p});this.app.last_location=this.path;this.app.setLocation(p);if(n==p){this.app.trigger("location-changed")}},trigger:function(n,o){if(typeof o=="undefined"){o={}}if(!o.context){o.context=this}return this.app.trigger(n,o)},eventNamespace:function(){return this.app.eventNamespace()},swap:function(n){return this.app.swap(n)},notFound:function(){return this.app.notFound(this.verb,this.path)},toString:function(){return"Sammy.EventContext: "+[this.verb,this.path,this.params].join(" ")}});g.sammy=window.Sammy=m})(jQuery); diff --git a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/channels.ejs b/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/channels.ejs deleted file mode 100644 index 3829e39..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-management/priv/www/js/tmpl/channels.ejs +++ /dev/null @@ -1,5 +0,0 @@ -

Channels

-<%= filter_ui(channels) %> -
- <%= format('channels-list', {'channels': channels, 'mode': 'standalone'}) %> -
diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-mqtt/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/Makefile b/rabbitmq-server/plugins-src/rabbitmq-mqtt/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-mqtt/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/README.md b/rabbitmq-server/plugins-src/rabbitmq-mqtt/README.md deleted file mode 100644 index 72ba9ea..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-mqtt/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# RabbitMQ MQTT adapter - -The MQTT adapter is included in the RabbitMQ distribution. To enable -it, use rabbitmq-plugins
: - - rabbitmq-plugins enable rabbitmq_mqtt - -Full usage instructions can be found at -. diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/package.mk b/rabbitmq-server/plugins-src/rabbitmq-mqtt/package.mk deleted file mode 100644 index 9db0895..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-mqtt/package.mk +++ /dev/null @@ -1,21 +0,0 @@ -RELEASABLE:=true -DEPS:=rabbitmq-server rabbitmq-erlang-client rabbitmq-test -STANDALONE_TEST_COMMANDS:=eunit:test(rabbit_mqtt_util) -WITH_BROKER_TEST_SCRIPTS:=$(PACKAGE_DIR)/test/test.sh -WITH_BROKER_TEST_CONFIG:=$(PACKAGE_DIR)/test/ebin/test -WITH_BROKER_SETUP_SCRIPTS:=$(PACKAGE_DIR)/test/setup-rabbit-test.sh - -define package_rules - -$(PACKAGE_DIR)+pre-test:: - rm -rf $(PACKAGE_DIR)/test/certs - mkdir $(PACKAGE_DIR)/test/certs - mkdir -p $(PACKAGE_DIR)/test/ebin - sed -E -e "s|%%CERTS_DIR%%|$(abspath $(PACKAGE_DIR))/test/certs|g" < $(PACKAGE_DIR)/test/src/test.config > $(PACKAGE_DIR)/test/ebin/test.config - $(MAKE) -C $(PACKAGE_DIR)/../rabbitmq-test/certs all PASSWORD=bunnychow DIR=$(abspath $(PACKAGE_DIR))/test/certs - cp $(PACKAGE_DIR)/test/src/rabbitmq_mqtt_standalone.app.src $(PACKAGE_DIR)/test/ebin/rabbitmq_mqtt.app - -$(PACKAGE_DIR)+clean:: - rm -rf $(PACKAGE_DIR)/test/certs - -endef diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/lib/junit.jar b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/lib/junit.jar deleted file mode 100644 index 674d71e89ea154dbe2e3cd032821c22b39e8fd68..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 121070 zcmbrk1CV6hwk_Oc+qP}nwr$(4E_d0sZC7>Kwr$&d{hf2}dG~%1??n9fPwdQyHFD?7 zHTIlyry!qDw;?Jv$h_V2Ugsdo?{2v(d5AZ+tzr&FKJ4{AE zRzg%nNtsqg^j>CSTw02Tb{0m8hH`3Rx`vK%D>l%>QxnzcUK;Unggx zWv6BMN5uc6{_k^)_g_G|f6lRhp`DAfu!VuOof$C$t&z2Xlar6Ujue&vG7p*nXI~-i5u%&4#^6t_YK&C*e^HIJaGnC=x3uT zj^~L_FF&tP>Ze+fQn@*K3pwpmWsgmT#4{q2fRoCmYk%;fU#XCE@VjZxHq3EduCXM{ zHYpt0nPc)^I(il47Zt+e@gSB!&Lr;Q3 zQ?aqSIKKhUlZb)*0VkVCd5 ztmc=Psm2>G`k+`OZz;v8>jL=O87@2*1jL$kR$~ftP#yqu9)R8yX44c13ANCto}IQD z@N(>~!FS=`p-&a`rMf4`eDTMls*zrfxq1)Rhp=;f-h6-iZS& z`&i%&)$c;*#?`_W(gQgS!W!|c{m8vn3x1{-HgncNIgy3qgxJrYLWRiZtuWL@CXx5i z%TOCq1b|dB4rtAxF8AuFwX$_uDcIE?KC~|GRx969W2gdRFy1F0RuI#(&W5tk3@1GK z6Ns&SwSNe{YU+M;@D8GjGobsNM&Zb%uBZmI#G2|C61?L@5v;iawvf>l&1JmgU5ZCB zq^dR3Y!naj$hnu!yc+y)u6e^UhAFlCxy49`EH=tjtZtHDq&6T4HXX)ymK`*9Dj7u% zZFP`ltit-sWpcCUDAG|>)k#V&tXuz>tw6Rzj)jP2ozH}vF0e9tYmv!odzMg6%p$X1@Iy&<6 zeeU@7`~G^`u5p`<#PuetbQyZ={Ha zpxyv~pguYVgZY}sAcPL737oz^#~Q{8Q)P}8M!N#b(QKvOtf5$|h(CedmC@4z>IHHHG~OMQS0hZIZ83iYIC#zf;@!TWD9Re&&x>Yvt;l)gltCO*Kllj6d>i~ zltwl8+*w*k-)R@o#N8!vyaABT+&ys_OF<$Y%N#`B{n#%H~m|IY+8?;D^u+ zF|(S0OThCqOOHT+oHAUP%@u+f+XKhYhU&u=ii(SLVo7}^Af_XhY__(QKBzjR--u(h zKa)W*nM@UWaSeHWvsYCv=4;jZbR4rGVH6N$_YhpopS&?vm&F-;f5e^F&z%tOktzG=4$|!KrfnB zJRFyny(EJ6qlgGAnUr*H#qJy`rbX`0-1#&Jrt|G07%|puWh0@;l}^8WGat%}me(_T zmP7>u-SPQ`TqP-O@h9eLDQcz|^9&tG-C293=7G;lG-|G6X)5>0Yozb=0~Y$nO%4_-3pR*;C}8N9U~?u$ePMia6|8Xt|a zad*05Lc)(>VakYlAuluzPnnKfBR;Uyfw-MyRP?B|hG!}Y7PJbY$1q2&sZfr57)HD- z;6?KhZ)A%C*`h?27|r;!W>8eukxXn%qI$c~aM3pFWCS6kZH3qV^5lZx3P9$~y>bP0 zY3mw-O!I0(k^JZ-XP8+^T)D=G{mWLJ!4P3)RS`2Bt@&AZm|qE-F3u%LEW{NnI>TGe z&VDdHC7x#|a$<1IEiOwp&L~2PO2hV?o|PeHe!XRsQ=-!jWqW{=??9gVTX`m11d|JB z?}0Abdq;y`vAB?veKSIn4I`&{J=AwwoOz*ZjT+LVo)7XUbkiAB?TCp-lKko8K3nT@ zC*gM0V9rIox{E5)y44(Zljr4THJ(rHnqnNA#jN)&Ayy5oaVJZQAwLXwELA=|G5B(k_+|@+;+)VNHiXc8#Q^;bxW0~v-@&+U=;HG+ zxJ0Nt2Qgub*_e$;@O*^PwJDja^r~%? z3D<(@IiZl#o%1Ma~64me)K%3wrv8g-5gTeyIIsrX-suAd=m~^O5N??^>Iy6lo zAA(%UL^yzGR-v!t5L2|i+Epm+sPwsFvObN`l~x?sPwIfJAdeI!LtZMw zi6PX_V-B8&Dg${&ZFoMJ&g25_RC0KdhL~P$o^;Nz&2ni?*o>GLVrmUxfTPJ=IUM52 zYok(IWXNJ|EbX(gIFAOSHuxJ-vp|YAerluB2&*|KOa{DI9NeZgCiOuYL)gjLWPqS< zGa9z;QW;)zLB?;!Ax1Mf#vpA z5OPC;3sLRBa83zpxYxW0Lm1`<)a7^P1+-;6d3fEdjBuaI6tfqjBNWm`UQT3V`yXIJ zD=}aw*1CqTFGYB4?vsmXqnk!exiS4#Rb{>B8QL?S{p}kr+I7uSFIVqAH(K4a)q8!-D8N4lo4P z$b_=w=1@Hk?H=o_8(RRIOhYi!crd!>^vb*tQuQjgt>7CDVhQV~S5wbCzvvRhNwZaSfRUWOQp`y9&}$bc%53L+D(D>t3-M}`%FR_qbK;D;C_ zJZeB_Mpr_%-CUVPp5=BeMh`GRG#|sm%_45%qc4?Y zsdlrnvsva8gDnb$X5tj!HKf+ew8;|W#tAYBIvcG{x)eB#yq0;P4{@w;-ms-9UMmBl zQCKh>J_t{x6q`#GxJoVES>W<#>%00Y76-eM+)i^1ICQwh?%OG>8pRuZHsNFJ1=p{at?J8Ng=j<@)(zPkF zedM!0k{C>;CX>9U$|{Q^k?ZYa{k)@T;zrF37f|w4`~6U+`zu5?xfpfFE^C9>Tazq` z4gEZ@PWBDTo!7rdHqI*YJIeb#^w}0>*!csMb#Q-o5~Oi!pqa}&V^7+voXZzz-4b$T zR;OpGq7*0jnQs<1g+Z79S>_=z=Xp!`TZMTjBi%&t>)oU*wEGJX_H!Z@$NHn=mOhUJ z{Q>`{zq}jW?w=#Q!f;e zmnS6s7k{&V#Ik6jRb(E*yz0d4G=T5;h?Y1@ZD4~=m;srZD?^64KecY8h9IdND>h+y zk?7GaHqXGxVz8Jqr1yc8xl6K)<5?uqSP|iLmB^RX7<2q#3aKBgB^7<2Kq(LKJW!iD zm-s3oM-~crmODlb@By`QU)DZtUAVK}uog(?@+3Kkw=+-=d~dvq_g%ifv@-d@k1T~hMS0~v7UjQdWqC(CGe;9ACqV2|?_yD}EzFHI+4S}e_I zISx^V%auuG$RLYm9OkvzOz4-XEfr8!5 z^$yGSSpz$Z;U(bcD9x1)JBtcnJh3s?TpA6pcoam^PUXGc~lC zakP$l^N&!i`?gr_`Mjmc8*Avpt=bpIl-tOGol8Q^FyS3!WIw$~!at?g>+T@4CPQ61 z{n3ovaV-GN@GA=YZ`4Fv>rK~4)Ckh5x4Di}tw6Zc(%>< z${2!K?WV;l_EG#k@y8qUW8K5~g0t-%c5MfnRDmA$joH*_+ho&3WT&_<;qTeQjTky1 z+fUzOuE}M|FN}i$mk>`)aSQdy94Mog?b9g%q(-M7ZtOf8Dc8FrAT#C@h& zEeSVb_OMUaYwv9Z#8elYaM4Qp0!(UZTTt=qs9@pnwgw3O1c#f)Z6@Bb`1)ak$gS_0|*K_(ud!GXK1y3ImUANCJhJhe z01}pi0z=%wZ8*iQ(ahkre8~8EUOV5Izu>)vi0N*b-kK6~KrE<66mQik`J{QQ=MEH5 zpU}s-W^+gL=nDv}-7zRm=>#%a_7J@#h*;=XDr|PM4QBl8VDRW`Oeb>=ytspPyI61i zM!GTHMlHd0AAkSWW@iVWvYqr0E{wm=I*<9A{ ze@ZybAEWpGt*deWrmK}roSYT^VY&V{E0PZAj}@sl-fTtj_^D9{r>wq3EF2aqjtPAF z?hKPvycxBrv1cX$e!BzqqOdn*P8i2Hdf+?3VK(*qFS}|)A4V!@)D)BQGQum1@>`Mg zjT=S?BZUnI-k_A?Drg&VH_A`bZj7I61pdf6g`%r7A@AuKs4Bm<9FZpNEH`DQ1dEcw zuGY8d9c(@5V^F9ROR>WLmMdJLBPi{#8=Q(o@Cm;?K4{`jaxyRi08Inh(>`{*08A)I zP$l?|*k;+po8pXS2(>~1Uy}uaO35u84@uvOr{(m`A?LViHS!_)ic(*mo9r4R5EY9NEKG1P}4 z)`X@tT*MlH9s};tyrFr|&<1fgzkp_}60zq*yGh37n65zO$+VR6^Iv395%326IZ8;tc+Pg@2-W!gZ z);Mpm1pUV6mKsGdsVYyHap9+xMm)~E5e$_ow~ZGjOE9N*`We|c2Y2ajPt7FX6#Met z8cpkL#UB0+5jD7u({X*d6u1TIdQODRvqJj|3_~^5bn6aiR0HviQN_UxdI+TmZr1?j z(>-XEQul7dMDFb?(nvD|Ts!2XGq4CJF!P4J3IMviF&b&DB3QyO)qccYh)jMj@umom z#ZgoYwIB+;7k&2z;6;L4vh*Hkmw~S-4I}A%pS^6CA(PO&93i=Zn~^cT4=E^-9{TCr ztY_B?di81r4KU_65#SI*o~Pqew3Wh&aky%Nd;E-LP&Qc`sFjLtt@V0%MCzggE(c6P zD&d1NS^FG6${~2RDu|rZneQl!O#^(Z6~c@O&a|QEqkeL%@)?UoaqKqLSx|I@L;Q3_ z+=iMkEj8+*cvsKNy!Y%!JoEHJu{{1pN||nb2pY@8PdhcW%w{x%#7Tz;+nl=;2%9-n zt1f1h$Ky|QA%JPv4VbEZM%~%kFtyhyDB6Ao2%$yCRw)#;UhtZIn54A~ZJ!!Cm}Q1f z;uW^T zaeTVu!YU(~xWhdfTUiED(aWSyti(DWU`Q_F1!^o#)`jZyf%Q)20t!`VL~)k1U%!fo zk??XF1F$?pGMg7dEVr!s!XSlb%o6EMpmOLCL_M|_0WHtVM8H7<-lQr_TB#Oq3i9Gi zvgFJ(WN$h=Zbe|$7U}>~R-Pnb<3o?B9lYIv)-XM@3nDl<@)rxlu&e_$TxMhd(-7tLqk~^jI7WLbQ zdIMWN*$yk}Ek#(SqL$Iex!%z^8r{2m!OcOme=+TzF5!&+5J3K>8BdBOoY5t>-pjnc zY3$Z49<+V%%l!uEgWSPvEeKrqX@kpan$7I$efS(paYZmKZ$=KVOxw2r@w8dEk~SV6 zes-uDWQczISm^4FyVY@;(`Qx%RT0|6NfK2JS|oR)pgP`QXH*@cUUP(A`bw$~m$8s? zkyWT2_bCVnN{Mx@7Nb5JdccD5B&>9ht4n{x!yx6|23KM;25$aLa|l}Gx+alD(81}m zEsVC`9N!cA9OZgBXt=%llzV$?v9n+W2#TX!Wk`UJ>j#1)tbchwZR=`XQgKk>lkfIsRaP+?f2n%*>!C<^C#0G?>$vl0z&30Bv%A zHT@D82QlqGU4rdDc8UMc^qKzEBouY5kp49NmS3Bu9g|CqOPU3XtjWwGYBwVCz!0cV zy`P2c;m}fP5>o>FUE;u*(IT# z+R}!^*RF6}-=>iTNj^)j6IrO_(7!9+cA5nh9c}kq;^u8kfl4b{+CCFb81M>XCa%QQ5aceRnpC6k0eEujMnoghSV;-rf}<88C)zj&QFm0h^u!u z&YHB*WnQ;LOrIx6`lATa%O4RlG4_?OKXcmNAaK*8ywRU#V^D)KF})%GSh_<2q!>_ux{(uZzw6Dy|z+SB5YVuaP>!}M_^ zzsqiX%`i-h)ZAr|*K`kZOBSQy%%SHoOdV8lA!4o596~bP5ox8AoMMf`YUv~Tfnrs3 z-RjhNz1+jYXVnL_@bwRkpjV)2UjK*vn*S~P z%>QElU#ri{1Gb1QQKC`XO-bD51Gcz#_~iLv0r1M0qwmI9OArpIu4Y7ErTGDPJAr&X zcqCgGKmh~)Tzq!A^6~ZX76i3YCzN(mtdhp3l`aoJN=w}?TVkwU;DI-2B=RIYf&64+ zw{$Z$=Qak1svb||e=20{l{Hr`#G+(~FT)Li(3?n;q-0sLgZ|PFjJ4CXbRvy{#ON+V zZxTt>`iiWbBQ$m^P?8v-u4(i}zG?EO?s~n69XA3m2r?A1@JP*WG4Uo`(-}&}R3LAs zL7>ul&d{)mp?@FMOX6Iof@E8&i~JV8ZQ~}Pu5S@YxL)0OrGi_!r< zdWAj$A22=oE|DZ>E?|Z|?5ZD%xX$mUj@ZFlF>IlV7bY#e#jM})xuaSAZ9mbUP0$4r zX@BX|Jk@lrj3-}Ny=8DQWFQG^qW8s-FQMn}${G_uvZ85uc@Y zU7%Hof^YvLi*>XAb@cBOcD|eWx~&KQIjX->x=wa}@7Etv3;(yIvi!@7QPPt8lhR+s zRkS*)o(S*)15%4NP}I8R<_L-55E2EyNPKQ=v_f1nOqQI@6uu*Ccrz9Z|LiJwDTp*H zPFnhrA#FO9cJ000Hu3d!e~-4O-b)qICQHnH+KOj#z~RN6cuh#)&ZfrLgFc7zJmTW5 z)|lB61#2e>ciT+8@IV%=t3a0X_iDo_^hDeN00wj# zLcc4q3H})BQC;kSindWIcd1DBK@HDU!o~x5g>z%d&T?Jh4qM{Ziw_C=2R!)LU=Iyw z!C=uHVpLX5bg_DZj$C6HX($(|?f#V?6Yd6PtpzbJ8qMm`=2nwD|Ew_X_`)pGBEz5x zDAR$XSShH|6~z1R^jXZS4Er1Y6jB|GU`u{fTo_Ao27N%eAr3i8mYc+K@NX3cqa0d6 zio`mDJ&bH7`f4)IEr0S}O?a1#1)WD=#e&z$a&%BDH4Kz(7$IE6BL+|H1yD_m$3Am8 zK_3Q9Qqs}plSbL*C*$dl{p8%pzOQ^kc>>>a`>{Os2;94||i5Iox_`G|!QT zTJ{y&Cx@DtBszLTy|}RFTl*cqOr2{Zr$Q!?Z^oHn#At;9xGEN|9W2YTc~sq4O=voo z4OU7{v|eSoPE(m)-(T-M zLZEhaQY+XcnrZyrY-NyB#)e(Rn&YjrxBNkK>e3?g2ya?99_-0^x3L7)9kfb-97$`y z*Fpt^n@Ydpq#tjBYAtD%=gb>??Owq`gS@{bGoUP#M_z~qON|6=t_cL4_(xm|;)so( z(t+?mxT)|Xj(NEc?$iZ-$zM>=$jy)EMNSXc_pLb6K@J+@8=9PpFj-w9l*4^2p49%% zw>R>MyY)cU8$-yO+DexB>OGp$Gil2Xl@X+sb>UMHeJj;Gu8}8#&sg=8PLe+nv%nsU z8N;O8is)*f4)`8L-TWnvn3g+fH4^YV+G73O#{lQ0`ht3sXqD3dRIO9;2sOC$1+(PK zF1?y}F+Oox>2&1OxQXUI8W%b+BzPE9FnFZ7tp5J1>YL@OHc+cXA#EQOlXZQ_YPgzf z3U-TqNgIxQ`yW~l*x{um;{Q*(^`ea5ghw|sP}*Yl_|b+fN7R&uvpHOk*^mqcgefp ztz^aI?~LK(eXKQv5L*=6ovSdz%o7+p8NKv>HZOJjF%M25R_BI*hOVed?f#etlV_1m zM~=*!5YmnZ5)7M490%sLWpg}3xOF#L`&l^N>QQBO+H(EI5$g39CgU2tX9!>Z&9V12WID>hRL6<>qrRuer$ZZ~$RAIk<>O(e)`hOOdHQ zX~By|0MyGLGl(pI8-Hsc0DqCcRceZZs70fGdCYc6rxTxAZ=wNmG5?wp@ zKqqn@Vw7!)fw1zzP5?F}Q~H)bydzV<40V*#SPsS;R2e6Hd2e2^2q%t%SaJu`xM6%n zy;NjB(w76<=JsrT;;68{4Z*@w|7g|kWD`6T*o1S$IrJsl+8!(qLi6|I! zO}S>c2`k&iyuR4!w}|xK*TiJa7>YES=A6Pl!a3K1Bmd_{CPfikiGV+B{^zY||3Tz) z{EJP6@qe-@i-RLp@mNJ;tKAq7y(8)EA0}uZuW-tnB`RZKrcb)R-=IxF`|*QM^0x$3 zcxgtaQ-YVQtnJp8o~}X3Pp#LZ&yq%yFK=q?2e1Bwr-Ls%^cug^5;n)pVt^`Iop7pp#^emCiC zyX$uMofRH5lP;Mps!4gPg$!O(`f~$JC*VEeq;G17Es^+=IBr!^ms6_U5Cw#f5DM51 z!g1pe8R%PNckv%gsde@&W7ZLI=n+et-=0#Gf4Uyq0kCXm)WPf$XhUmI6%|9$L{l}cc{(t(f z|Kupi6T(}0@$s9(&6F`=ln6pXy$_SdFcr8jKMW$Az+bch-++PqEVeUfc2qhu4W3_1 zYk|_G%nGGZxhh$87BU^f8d-#jwrXv%Xi4$X(8{K1sY)|t`+4KpREqTJ`xt2B`HJUx zqk-;uk}2ogn2*D}_$|QVg2N||zjA0B?;L8OlDVJ`#V0Qo!vTp&rg4eTv)Htb&(ai- zJ@u|VS>%I7f{ZRonK|06iV3={)1o49X<#R6!0KmA{Lp906)~!lv=qeSEu<2oz$t%EKkoh z>_jy?l0^x3#hIl}WWF9v;rR=c_@5MhU?rskQ7P=cE8p1E5eQe&=j|L&1tH!KzvavP zjHNhH6`3rq?Ph{MlRYC9N`=9ih;~{IW&72c!nVlPoY_>_WMYD9Hr_|DJK z>@adsUS6mbDKUDCb5@3hern2}5B=(C2Ai9%ku9oXf<2c86~M?PDvC)Ji#aCxZqAx> zEw^T-(Oc0uk{*yjeNO9C506=dWf#`ZzKIVp){Tck%gl3B!jQHO1|ZDcTgkQt5$An5 z^gZ{}Ar1bKu|+uHych}EQ6XEt7JaT7`%DA1N~%*?PD;SQlBMla7@Wd#D7`=H3m?2( zn+gl_)R-5=O^!tYwAhUmdJ=7aXd>0?8g3~jVrsiE(K7L6EkwF8bA z@M(zi%pIUUjv8jIKd>jsqs`pN4yvWC=o`Eh(98~rlydH07^|wLw-^}hw9_Y^!NYR(>L0W7rh<* zRi?7qy|P^u=sJK0KA3OYzAn{4veMRG*E*W(W#P$d>T5mSv-PCTlu(e&6Z=MFI`OiQ?R(DZsf$(YkrHB*a zWPuPKw0lq!*D$?SYBX?jZ;kH*J9=Nmo33HA$q5gXVwWAv!j2^T=KfW(!!6#>B@_;= z5L`wz*3a}Yx&h9yY%By0S-sQG4~{pbtg*@*dD4!-ag=M44a)lpP7ZY3oc8-GEb$Z+ z52DU657AJkUE*LDE3LQ9w)*K#T4(FgTmuazq45mDSY?O;QyxU4JU-2Q%|D)h4g=gv zM4Q@^m(wB`TC-TuDUYNPyoKAz@Wq2B9CLEh+vYC<@nV zpHlf|UetAP;;V+}hmZF6bCEi7w%X%*8ZK!%n>@@iW zf1~NC(Q6a79VyvD4YJtGQNGC;2HsmyjGCFMo_qQo5n&Ad(!ihQEWV9Ml56F@KE+D zga&~;XIShP;Zz;xkM($HSVK&}>z@)33MlPAE5HTHns8yIT$*T^qKY*w*#cqj>?g(* zG|l80qLS@bb1s15drpsp5%s%J7|s+K(_-%OFs3~2cLqS(-;+mOD9NT-^}UA%RYG+27@9L(m+=-G;m02a^~1!3c|0E0`kNZz{a3BNPy=5`obfgNLm34UxIo zZd7&?%1IT^MnQiO3jAAk>0K=OoWa(2VH_-eErt2UWNlh|dCf1=kCKerU`AQ0NiN!Z z36PEN^3d#{vd`Lep2_7)Tsd=G% zfh^C22Nb)(?!5MqUhLESirM|KOZCy$(OW^%Fnw1)I?8E z&#H#>_c{SFw4?)27>pw}jK`FZW>2+hj{T@+Qcfy=qRdIRn1R0Q4N;c4tM^HES>>gA zuiq>3c45(;vNk;X_Cv(#T!}6ErnF@?0#(ZI-IM5zsvJiDl5Z62BOld`eqDYGl)6`V$ zA~x4q$a2n^u}io)IEcw0JVJAjQF3Iw9@|DaNk#vpI)B+|G>J`~aVrUa?4!ueAEi(q zZ-vB^+-X0J8v4mGfKP`Kc9==317WShxOduSt9}S7No$qwh+tLfa^}q7pqwo0SBM3x zm~NB;tp0kTpvp1ff8aR+b2e}Sz4$8`hjVaV6N=7LNo`;u<6fr1syYUyqfYAd0o#T4 z%@if^j#CV7Pc=YnU#sl8cZ|wOFGzy@ROJ`~BsP@_A2$Q?dbJkjhPov_8Uui4MFRnH zn37QSbWh<^=Gl}gbj;nTxfd=U7ER$fo!GQTS%Ky*TDfb7wi zw&bJz4RlIgYp}T+xJ*kY653FKs}P}~DFNu2ZLtXC4ACV`%|=;2pF2hV0mIgXgPUQK zfN>FqkD#RBSn*e0wX&MWBFfpiw31WBq%DJp$N3@)2xVopNNUB&vlvx=8JA^DJ1#=0 z;h$Y`a4IKWhB0r^#N5Dk=SE3+wsXsOmR#DS0hP5%nPQujSRiq!k9lzxg0hAKv0MFBuCx1=XTy8K7DGF}49;1%c!pa{g|SHyg#f0PWZ#yTEKb*V zu7+pYK2itZ2bE+iO|R=&SdWIMgbNU)e?kf#a4psl4Dg#15igXL=*`d+d(J|FjDI3z z)eNbcs*F7aGq`(ee5&^70h}>MtAPwVojunk;&I2I zc@_AWY8Nxo;K^*Bh&#J-QkN=vj_}!nJMJ$sRL3kEa{~Lo{*&~~A)ZIJd6?pRHbnfX1bo4+&zT`|$IKs6Gv0w4-?{w2WAJnaAK)AaS_gK@GlnEDP zEQoYJKAM$+XWWSIB2;{Ra=@l0&*WM+UC4g!$jI44shPgK^>Tp?>EDyJ{wM65y8YCW z1oWeM25?w~Z?4-w3$eK-GAdK-EGkh)mLE~`kRCcoqP(g~JXqq#{D4jbSst;23U9e} zsEqIUK#01+6)aI!C(Q37rm-aQM&5yq&A$&KfwMGFN**1~B<1zV;fDQhK`=^(S4t00 zu{qTWV2kDLfZ6V#Kyu(JgKlcrWU-JM2bZ@#(6?x7O}lVQq{{LB%-3QJUp*L?R9@|= z&`L$%2%6y`JADH@qdMm>86VsbH%yKFOMr_A9?5@jW|tC(_%iQ1nM zOW)Khz^rKbTFQZ059}QN1ybno_f6tGytx;G<~xEg8&R^205v;a-439=C+~$HmGu|l zq(-nv(xe6e%^qepO#1Mg0(a=KH|Q+dm8&MILq40>D6>3pZ59ARvO>>&wOg zdPaUgC+laBW%C}N&8;3 z^`J^TuZ6&6E85$MtnZXIoB%pIW}p?N>=v>ei_4YhYa6I^Q@rTe=nr`b+GW3P=4M#< z+4m1zpPrmKzr+k))q+zb71?hUy}aoK>^E&38NJeAbhJ9HtO?zwWEhkk!KUDdg+uL1 z+uhkSTv~O0 zWl2GzLB@Xel$5S8$Z;e=<#{w0C?6V3NsKR;ar(EPNI#;@a3VB>CYEti=4^Dv<*4W$ z*uvQz0+s)$c>N%JCI@oLP2W;3xKZ}sX^X*Ygi8J-pCuN?8)JPX)k}J^QUUIEWzU=v zOpOp4Lje2AeG@NL)JqBiBtson#z@xjY+KnUua*W>XCDFxrGf(UXNd z!-&ty-W9)ijAU*@`xWbsiE@jeJTY&kE68PGtEpzWq8oy1g*~8)t z7;gYHop+dOx9`!2SZ0rcCu4T&&G?GG)&oTz^6Fb%B%>{s;X&YoqS+o4Q!*ZVWZUy0 zqb;`{SMSH2VNjDyOV+PisAMIBniG5&-2Ot{7(Ojn@j@-!Rz{kHra5S~`{GT(0yT^* zE~3dk#808AtIj({Ne9(1Hr{5~=#4p1jsvJIW%TNt4*i@lbY2=h9ker*Z}e&n4xNNt zQ-XRmGoV*#m9V6YGE+{vPGVY-ZkUoE!iXnj*=Hc6fnW zDlKx8mTMKJEqoFQ%ZcS}?+XD>?~4~Y%FLyf%h|zu?0ILn6e`1mDn3k-0xo+6H0I43@!`Knjb-rAd}a z_9dznEUalPWyP-mi@b%}`!pPwN8WAJEo{qc`HY|vO=JrgU9Fr};{S)ScZ#yKOSV8O z?MmCW?MmCWZQHhOJ1cG5wkvI$H~;QFr|-F=`ycn=+xv0v^{rU3W<<=G0Whi)utGc0 zV^CV3q_d~?n-FPqM+kopd$z!sgtbw!Vs+s3kvubbx; zJAZE&HEI`~1}_Y0DbmuZHv0Sw<+>F1l6+QFn)s}Vj3;Y8B z(oakMkn-MXX}qlTt}dCWfH1t`$~+BM%meQsYX+u`w58JVL*fs6bNTx9g49vjIy_(I zf(Z8Mo9Hx+{TOny6|sx1P^3fSMv|q&2z6Lt-oAh+Z|-441#aQER&5C`A6-rpst4j} zHNY#;UQm*v?J@Q4>1Bh}OCjG;JDk&1F~U}qqZ6#51MSQ6Fca9PH`i63f-9~391uE9 zJ;%woxyz}uNz!I()Qu7!lzeSIUQ7k9oI-|Au}df-YNX=+ z{p@+3-5-HOvfdCdU}ch*`akE$yod`fw}ncc;f(j}9&&lKV8%|0PW4ogBVBoMaFawf zrOSr*PDe=`HTOINPEVZkUXF4V;D#4zgN=(q{y4muaq&W~v;SD-0$t^j7P&)gh|MWX zcqTXBEh)nW*G?` zTaeP@wW_nrG80LvD;HZ@MC!O9et*-_#Kxd_?OUWCA+>? zJ7pd~2%Gs`!lY$m!J$JG#`wv`)YlaO^qGYp{EoEi`9NBwMBEKiGiy~Rmczq@H%H2W zHksDQwa-?!s{Q-Bdq!!7`^jlOvlWPj8anAm!C%~9~gFtq;O_g zOk(2V>XCGLew{d3N<2y{`x+jZN|HAq&B(<*cx|{InRQx))PI-Yg1~cfPpoeg25Z!c zN&Ir{0YSWd@CnxXPQ0HV{ghuvDe%Ap;p8E05AS-PROY$T6w}sMjd}R$dzWRL0~`0g z31A-kc07BJ^f_b})G;FD_`t^iyuSyw$Ui)-qGuWL_l%$Lws~-B@6P{63cssd@5xYya-Z7`0(<9wj&JS4Ps1`fuF6H7FGFX z^0M17E|6n}-=-k~c2A0&+R za#!9=xxg%D+VqJ=xeRw1CQDR$_Xhw@$$C3Qk6lhXBu>?A9Q1?(diL&3JI2K=K7UXy z?;HEF#8QQ-2M15gk2cQ!=w|HFp< z$p*+uT$TUka0Y)(_5DJV3Jnc~1dtPeD)39OSfndSBC=(5%_Ji?MA)5_@Oc=^%V_;rD+YinH*g{wSbw807vZCWI-cP0=)YY z2jVv%=_R&SGn35^Nbcvj`mkRxbS88%%^AT=9!tGUDQ(qzqxHebAz50E`Hgg*#$4oc zId!z{wm1gU_d2Pj7BcZtvD9YL(W_R8XR6ERGNBx;?2Onm_Kp&-+-V7W_+XCpSa2}& z7&muGZS5;wrCWHSIQy2KVPtgibhfG|WejIZ$r0lOHNf=IYV4ptIGkfaIvg|~78I+m8;}ZEm6kuX=3@LSb!2QR~tJdEwF3dZ8q=9}SJ3GpGJ>K!F6DzV(Q?_Fe@}k1uYjxK14<3@;2l|eR61%Zqfs=tku2$_lTN`PQzugUgqpAp!4Wm zq0N#SDB8X=@elBgISH|ftR|yu?04p=<{eHoRgoi#C^So+2YJ9Z^zJW(Q29+TYU7 zeiYdc{Inip@5E0S63b*@KcYwI)u9s?G3nB~ZykM`D5zzQWIcJ4s_Q=%j0>y})&w_T z89~?V2_(leVA8*r-X)-+XDR7si&JN&D0)J)T(Z4Dj6YcHGf{2~&cOBg(k#-+WG-ys z$aBw8%eBjvzP=}DO>yO!D%0FlG_Xk&Bi%Ex&6wuf=(Q|-^_|rN%2A-W*^QW`Y)p2| z0#smVooV%&3YMi?=G-r^(Z~<#i%#z1p_J^d@+*RPZH`Skpl!&9*Ft+;xO&e z`6+7uVUTjP>OY`!w8$=RL3#E71bQ*--cDPhEo@kvIOcjk2O{&QFqjEh;>cKvsu|r&j|+UPpwmcj<3@(uWRtQB6}zfzWAp<;=lw?hUUnHSQmxsKg=4797j z3%Yxc4qfLikQL5Z!W ztft;F-xpxx>W+4_^g@MGfN_M56cBATm#Wvub*6?U48h`q32D@%b%th?{>Df}pU`uc zx*5^^7g&k(OW892CYl-jEft&aKV3BPMz%(Jjz)%mneV@Nfr?sxbqa9LFEv$4_$AAS zS5=m-pcLjlkA~dli6sal;1NAHS(O;swXdIp-2%NMl5{-}M8Kw#F~=Xbe+}^FZLM1S z%I+8$UruB>T(Ud3_Cz6UUobgKcKhg?5| zK8L!$-)ty|CkFn9qJO2jtpvqs(G*FdZ3?rz%aD14gf$cOO=-)%v<2K9ogx3H2x6@~ zvsS-cdi4bvay1pjyxcuKQP>rlMSm?*!5;@0(NKJvJ;*+8aNXFv-<9a^N$Zui77A6z zidG$#%1T}mjZE-w8Tck5_2|OmDI6T62xi7;96I#qCNDHY9czP^;pPr~KM!w-(bY6j zy(8rD)ZfLYOrJchsProjeHT=B~c)&`n=)EjYjKnDIaSI8vQ zvE3=~b7vONa>M5B;m#`8yrMGd?Vo{%XM^+2!q?Ra11{ zulUOp?H%Ir?I(s@1B4K;*Q$NcyF~0JFTdks_*1K!)Hh1$C9{{&wKTHSA)Ems0B_RJ zN;S>;9jhpO%EF@2*gmcm0R1PRV`+S6C9#Do;Hn5i=T-#d0=mZWCQteVG3^h5++*O@ zqo11No|Q&C1#Y3RLFokd-~fJfG7TM@<02dn(}Bk3YZU?U+hJTQ6SfE==_lc@{irI7 z@|;tn1uf*q6#qjR&zTdg*0&|rCy>Oc#zTRa^@mV3ks2*&rwQ-9T-OEJu*ZaoGnwbudUS#Hg&>8-e0 zCYPWJq>~7hT_c?t6D=YJu7-Q~kJMa4h@cK*dyYiw2sPehOJ=FyF}QE1L|>d4Ch;-c zbqYTkMP2x&0A$Y?sN9vwIEuruI;%cq6yE}HOMME+LSWpLPr8B33o8@!fijSo=gc>T zP;!-BLdTBtFPR-m(~>@27QmKUGWr3{%c?*-GbDP%K`VD~)*wZ}D*R{_siY9DF|dPa z(WhlOsR!%kDJJ#phth2esnI`rk#Q>9%nC$xG+O8xScMORg$vZC8#Ce^9fB^Gk!iDt zX^SQ#V7;nb8oJ0HDt14J?dTC<6&L4y{_?`?_$O`f-zEnAw_cdwznPeVk)xCC{{)L( za_fH?78A)9zzyj|3EgJh0OIp_c*5>CW1$`)2Yqf4oJ6*yqq0{7tWw3h0dU3{poMoLUL8r!JCL z!BaA#783$%^0b56?A>BQ=<}H_2J26wf#1I&+Aq|AjG0gFA&NTh3y6@`v%0`wkx_{i zx!F|73VhSSP@5y01xpI_2>l}#)c*Yp6VOe@jm zy&d4k;#imiUllLK)_NiH+#GnpDgs+2Pxs`J$|zc*YAB9i_1@ZL_Q#8BLB>{@OxycI1eN{gBjuYL#A?SbiO?ZE6IRD~{$8THD(;*FY z&l^vI7inl5f6h2TLpoB++VxA#TC>E)Lh@Te+npGJGSn5?V>K=Bf)d5L{=k8>I~TkF zF(W2e<>C2!ze_Rr`n=mi9dixk(YvFH0_09iQ5P126B)f0C3%%Sb=>s_&+2;sIvEx} zR&6Ciq>uqxI)`U*-;@003v+OTub5WmUgIWVd%G#C)Q3QZ{h16wxrZ13d+rXgEP6nU z3?jxksXtP=>(^9E#w+v zSMK@3MaQa{k$e@mI@bM35MH|=K%WD|a24W_V&aiWd!u>++qE~Q5-on+ z@?`mlL21!4CP%4|H^#nF8jNfVrz|(Ec9p7iSo?`M`4qhD&VJ1M9Q+8aQSYx5z1*VH ze5|orOx_kv?TB_l@0g(yb%VWecq487Rn2YP6D67PhmvKscE!9Y)c||r4Og)Fhg_f6 z`csVs)If^i2g@hmUsj`wtQ>dzZ8ea8>u&ztVeoI^Z9WGFBl~}$y{r@sSH(r-;boGP zG%OMTAOW&J@o+>ck-2mB^r;huOm@&SdAf6Rk3{ScKnd(@sCeoX{2E2!De66;>arB# zwER`0iX9DW{6(l2LhIf-Z*x42GM!wFuph5F9yytk(vP7!cXS@cSx%p4%}ZTXnx9t{ zHnQ+l`MlujHJlldCh8&bhoOzz(idh{G9^`tLn8b$8=Hqb^~`!CAsVbtS52V0)Ch`g z^wF$qz{t!@)~JeDbzoILl4Q&H2KP~!NHJv8xmV-7DH|uxWmNqs3LbsaR;-J6-`D9; zOq>Vj{ZWJ;eqaKl7r!@ACk4Iv2OlFzQ6+?vhOpj`J_{Ig;gglRZMAU!ayX#~FqSLA z+YU{V&ghRf39iVk1K?ywn;y6*#G3`%4Ce9f_CLIS!$oA$_#-$Wi1DIK`KHfDc@Oc6 z43NR1hB2v5fk%Ce8E~AT93DwlJf$xYm%{E zUY0$jeAu}UwAAmEEku~l`>aR%StDRRVy_f0ls&eW)Blv2&ruUVy?Q|7cd8a2lnLvy z#ltK{K=q!bd8ZTXDxpX1&|LZTYT`iwX3KE}jJgEwQ;m1WBs@Y{5fSkZwrDrVSU*=3pJO*=)l3w|AW*f;NNQCD z_04u)Lg3>>_s>d;=s(~+02f#`@07o`6_2Lx0640;I%8wXx32Fn$38Spv!9)PrzoDT zAPIQ0_AuuJHsi1{b`iW_UB-AA;~H6DA=Jyp-HrPIUU@qEz*buLXz!%(%-`>UcyymG z8{^r$H5{K-pcD9g*FfB(g0w|w&^5RbnY#HfU!-2~-#)<+dE7RZi7q6iFhH*7*INKX z&KE+{;+A``zPr_aZy%RL;I#OQ9rk`C0(AwHnN_UX6tiPcen6Wwznq|oY6u}Me0**B zwh3+*J-LGV1U=BFfrNZS?H^AYrv|}X_2V0xC;%xFO;L|pIyva{$16ew>F;*CN1EDz z&qwJj>kZ81#ur0SaK_NqkMxj&5v9W{|KNehnS(H3A7Z#9#PXAlJl9bEAbLr+XR;A5 z1k^&GWJw`KAr@her zD5X-U$YVN*m`Vi?B`K|3;OXxMH-B0QZv883SsXw~&yi}%f@SwslFF=0@H1IL(X;D? z03=;wsLUIhH$7d$eu0U&8T=-cMSsjXP!f}j1wSoA_THIXr(j@1pE}RjJkC&JCb0Qe zP=L-J@HVgw=xtc>A(qxzQ5Slz^=JlA4ie#uRZ^l+?V>daF@x(xq-6DxB+bm4wjm>5 zPHOo%UC87=`O4%DlS zwAjnxU7TOdT%ybILV6hSrc$?X$BvAbz=n#(8xSL1rdt%ctx&D@g-)FDOZjJ36{uTrbUJdEJ4mJ7s*?#7- zg1@zgP?0qz1z+_@I-gRhr{P7 zEAWm0K8;MovyXslAKG0={28wDhzu9w5enNn5Q4S)#yk*`_Kc6i_Z}Pk%naA19s3+A z;W>cstt#gxG4jqHFKR_;B8elu8mtFJh+l)0@7n|iOAX6v)F$g?~w~QDdrqSddOj#^oX53J?3??^T{tu6K{zr|I|sgk?|rIg{$x6ZHTQ%BU`kGW+`Djn zH0(UWUWKZ;l?F$cw|sxNH~l!3z)}1&e4pe+R^I!i#-aa)Rm+@oR3L>~b_&?J0ksnb zF{aeTA#D>muOZjO%-MuXx!$m(p7Fm({aK@aTHAXKgK^>mvI%kAsY{w@U zJGewQMaAvlB^wZ$+fw6Tg{F6jOFD(pCz_V}{U8OT?9k7gr_?`J{TIHRQjL?Tokj=> zyVpg01GqX2joE@)c(7(*clK&egiz|jX$)JL_vh~!IiAyn+z-N~o#q7Q=N^Rshk4du z)o?&60RVGZ-?fhYb*&m_)XGh%u1PWNnXAIBtACaa*YKGMCB9Wtb5Z{e%2WI=l>h&N zvhPH)u%4Nvk)e>i{r@BzqGY6G`o0T?0#HHzxnv|bQjq|Wk2~-J`jMF!w5eODs%QGV z@m3YxR>qb<`v$?2-_L&! zok;(~`xdaVvemOUbNJ^Ace4Li;yy}YQU+55=_{%ew`LWPK6STdiZrhYAR|5z7%US3 zGKxGL=sfmd)03$5V#Rr`rQiA6kD1bP5Yb(}9tAwc1_`mJ1`}(L$5tayjRtznKyy zjY=(kh3iu2$`Xe8Uo5<>gW!e%N256{=3bloDU-R5x81zn3Q&TAi*7q{)o0jjJX) z4+H|b1naay++%9rkeA(v!e!}834!aTKC zqVcQrkkBFJOH(hYl4yu;N7=D$4EP0Gg&WFQm(ILd+&iuj z(Kv)g=T98Ltj$LFQ4?0zc3GlK8?XbU`Ks6wDMQu?`r4U{r_DA`S_nZC2eE2*^3<%Z zlMwoCcbPf4VnF0R-W2n6;Kt|cOs!_th)ydxV48bSsrW`e6&55SVwsBJX0aPWfcLz_ zV5BLKvl=W4k%8K?7e;WlM5DR{UChskr@F4G2^&P--?YQ$L~!IB3v!t77Hf@n=$XX5DSA*8w)5UZOdT^nAUeyqGf_ijpXl#3N2Ce^#fN!@ zO8Obw>3}Vn?6h`v-yz3nrYu8J>7Va+rhCLF4gq<~*vKz-^LzIER1`TJ&(@QFHuZza zjk<;3DNBHs1ae1$G>#{DKwfzMOwKJCaYMD-u^>OBp~ zU1aMWlZ}DjseQPDhu!r3MEfi38^#d39?_~auGjQ-Va4dzx|x@Es)cGYH2`DHqDkC6 zI;sTZlI%Xi3}Ogf_)@k4#aJ;wC+I>56LMrvM}7mg(>|MH|0sfNEz21;eP>Xrz6V|X zcUOS_A6&t=aJ8e6fuo+j<-eSOloaPr|8I{A1Rjn;lC0>gew@v(A;t7KZma~SR!6hD zO2y?H!tgV1MvUZ6`_OAE=Lm`i1r0vFgNBhbT3Or4;9JeB%1)g<4TS>`eyFuWLT==i zh{bFw2b>4LoCCc#XA(Bh^+C4HK8@TaaCEwkS7Y8$%}e=0(ke;YB*=4o9!A!pa51Trx&NART*e zGi?abI0t<@b9zFc`ru;HSSpH11H;(m>t16`S#vux>99s)G(sR1TW>Qt`*ICqX$nFh zQc-zWLzoN#AVeT2AaJ@`I))mC8hW}PX3n5LbHQctMKk@NzSr}uK5nw|kox`aTmE)^ z{}#CT_Z|9wYp_YmX9}1~NMD<9qN>UACmATBa`aqlrd2WGMX>5r7-ZB)uQjI#EJQVi zcJ%-aO^uCR>bM2b-p8c{(Ts}2uosb1=oOCv-nTLK@0*)O5MrjodUfqj8xGz_ncLh) znx8LE&Af7c*w2c(cZM_xO$K5T8B0f!78ER%CKou=6(M-QEGSnvBgRhSKlAu?z1YX$ zC_pF@fD$1VhDjUMsEtk2z6UE<6)_@e0V;XSGo7K7hBifk1A9!^X$dhSf$)`5@Pn%m z`Z5DL-8c&4j!4CvEY+|Wv-ukHGYT_NQZHxInU>6{`2|0lr_G!Mo0^eDKSd%Qrw68u zuSmSAkv8!FFzcP9SK%(#v-hKzhBqZW{JET$=s#`8XChmHOqx-qDbQzZBAcia%Vl@x zooRwcjtUjsyx5<*M@PVbK$S5!-XkdGuVCT zf@8_(XsJ^O*3FXZ2C5(v;3br+keZMk#I{WXxJbP*#2Y6py_9g8bERvnY!eVRIcKkz#1EE-86^0mnVl07$Q&=*j^KD5=b{H8*nSJPLFGa@L+TbSW z3LKoNn2d7}ZIO?SS;{vQ+6vc@Oe{_kL>UYYL#~FBF%xX+da|_?S!v8As%ai$NMmUX zjS3XrXGod$ei7uE)PtyF0WO|* z6!BZDb_#+3J8a_k_!^vEkO9@1*DctD@sdqdBNr6ny*IP5g_2xBAfp;`gWB2&4Yx01 z^bwm+nF~wvrX!hBvoek)J#)6??=Wl`-Okc&rC^J@&Y}z{?HXxKW4Q|$Pl+FQnz_X- z&`geGA<(sC;e+&Y?9<}?CjQa*eyB8P0;A$Vj_OHy;Uc?EV_k5Gq5@EP-~dCbsv8SF zY-)!EYgg2^RyxelMLg#YTb7~>fLm4oJqI2qY-Tsdw{~pqA$n^8v}fSIvAVD4%wdra z>~m9Fjy1aKZwiPbEUPV+eLzF|`&!GA+X-yl8fEni+Tg0KGlQ1SZ zIz#BiPWKnVXjzyutFUVm?z><%0GN*yB+ghE~te=M-0@9FKPQk=IuH;E!@QdyUuw zuACj6AvcW?w=m~|=;;cs1dE0D$n+e>7g|>tpD?fI@U`W#FJ}2?(D~=&u}vu?3>H2{tb6OKd_}6Z<;j$ zt<^p?d)lwvwjX@(tdsrVZoBTyZDTD*JC#S-rcW<~Iz-mac=5Jxk8hu4=X@rMx?60l zQFEJLk5N7EQ1wDK~%d76hQEjq+r36IBk19)&7G^L@$i zp4D+6V1T==#v7;-X;!TV2fFvcqzZ}RQ=b=cjXD_H6Xj96Cnlykk1{=`C(f^UdA<7r z%q$t9ah1l~lE_onAb1%k_9T;4DxIWgm833#432r`c+At{m1&Hd>_xb2UUu}3$wINb z@#@g~$}z_Q{h(F_=GiEJdyU;GNU)AUf1EPsL7TTClq@s}WsxPL&C$ECsAzho^tPL0 z*@DVYbyehIi~rDeq%alI;8cfG+zHUBbVu_q*FMZxOu%BT2b_kJkLplfTa?uD*@m*4DnJYd34;S%btp|(`)t((9U5El-5$PDE7_@Oc-$UmcqK2~;$~B%PG&nF zGW#R{n25A-UO}5jS9EaUuwUajWiFVO46VOKH&xd(4foEVJgkR&ryKYQxYsLKJLS+b zG{H#7+Ui_)hH@++igfPx z)r5(KPN)EfXq<8X5fCjIYz%K9+~EtoGT zOr7*=hw5jkw8n>jlpcMV=Gy7MLG#kzg64nMtp6|8k~I6CDD^#`^`FH@rxKdHyJZQhQa z&M>XG^hHyX8)-CODNYk$g?Z+fHByk5YK_DA0_DmcDXJ2^C32dHuQVfB@=9(!$@K&2 z+NWvKChB=@KWGkfn~GbT9c+Pm##&;nLfW}G=qO>MBV8Qk52Pq{V zsuXsm9Y!O{jD{(%1kk>|Tv4nqxAKC(@$*Ax_?A(|J3!JWHtU$!2XohXsRkwdU_+qDV5b0%ZvYjiepZaOd#(?o#v_QfThvZNGDH9XWj(~I8H05UUH!YysCZN?1sH04tfqF^>PS9s1QMvFs;Eiq zuk6^0mM*Sn>r;89C?ftm1h&2kh=cT&49SyVOa$Yv^T$RI6thj|VnRdMODwmF&7PyS zysJe;ObSz%U|iD3uad1kqWZFtZB+5KRF=6Cy8ezMzCtXwV+^&{tH-lqkY1i!ud=ah zZ~=7vfDlQ)r@?03V&e=yc8D&udppbe=1IFu@-93TO|QNkfk^TJvb*_Ex8Rlf)%0^Z z;S~ho+38UFM;LvA!*%`X+_^y!XdZ({R*i&4%`aUEmsNz+h;28j0oCx?Uw;t)iZKi( z2z3I#_cQ#PukqjS9lY;d|L4Hq|7-XE+|z$reU!3=0-_3Z>KWOg!B1Zk_4qCekZEyA(fk;QAC1Y6)QyqW9Q0q?YWY~&Or8mo2q?z zmMRwWdH5DO&cJRiNw2P%7{1N5l5ft`jY_7Yvgjw!-9YlSgg?)em5Q%~((zzq*Nd$+D!5J`g<5cfBn^^|+T>695QUk%OI~CfN362*;PLP`=AWR}bai8i> zEinn{Cej3mj4lavb`|PJ6^J@CNo6!|sj5g=hDS3x=Ac#lYcAvtdbXi6Zx@h)@|tD~ zk?d!vAM$cX;>P`-TThWxS_-X*i`hoYz7R^ta;?+J5ZEkp$}XbvNl8QdveXaZO51|| zkhA=G2->4ndL_^B1mtJnoG?%E@W=+W>-6M&qa!8iHje?s z0d{bq4oj?*yOiEx)ju8h3?ZllF|yn&A;_?BW3(ylvfwrDo;2TFEi*;1ngvleC7%IB^;;4)mE*S{Z6M3DQUz?tBQWh}Bg=cZ9_Vk) zoaw6yOk_o^`N@I)PMsVUOMRS5HjLjP%`C-sUb;@b2vXQqFdY8ibffL!*X3J4i}z3k zAJe+9JgUYfhkrk*e-2UZc4Ee002fCK23=Fkv^}AAJE@M7^Ni)dzGuxFF^NH|0{pckJcMk6C4}oH^t= zuAwP7@CRzEOIVs$8iLC4oRUO7=<l^e|JXwpoM7d4Kp{`6zp`x234Q%Dvno2P@~zBm^*Ar zG-|SZOq>$E+R$gOVYnddA1tV>t6MfV`x=$|8@}tNz1ZH|+a#Y?Q0b~M{X#MOeIZuo zdHFt-Nk$~^pL1^REO&w}N+>rxceyLrM__Gl27Y%&()m6Ibj#FfP{PJHlUk*j)|*0Q zPugPiQM|~ekm6D}uS7#v#}PH<*9qe9Apfj5?T;S4ll`{{>i=}8dBsXe z0MWw*Z<|%DJF8SYRH*aPfHZn5(rP2|LHf&fG~DA@RL)wV#BYZ4O2{JpW8MRs+Mob3 zP{yj*mfKcZhfhZregyV6Ig2$+*=}zv)UP9A!m5DKYC~F)%HjR5um)rl}TA0Bekroo`V@heu=bo=aCeW+XnjaMF;lcdt-J^7Po^&G)|@;H9h z!3W9G8EF3xYMcO^F7zIVA)OdmV`B2r=nj8+bPM?WP<}RedmmYf5ANZvg%Jd0l~1hE3dkzeic(~2nb6H0tIx{5`0Q^RUW5Fuhrl@jiuB#i z;{m3jgFv&Xqj~)tKSuJgVXA%9|vn3{H z4ql)Hmx%`O=XIMm`Wu*4ZeHsH3%l?9vJ?NHBGx3!OYN>TZwyUxlMScWZrQ={2TY`l zG=&t2O5Q(w5cX8E^%6Y8fC3?`2)L{X1*D&8THzz9$men^M^=+P=A*bhcB4C^+>D@I)`y}|QU zrw#5QJYPtFqKQYOi4sCI<{}TIn3yW?S`ytj4vl?%tz;I=30lp+LWeZ^c;aDQoz zHJRlS7+I_r7GFW2<^Fo?U9chxLcuSj@FnurxiuVf1uKmurkZ=$H5AZ3`5$M({!3 z5F{Y*`npl9-^nyB^%4(m6oHSQe+r6ObYlo-pDW=S|8n8=u%>z{@2bKC`nvL+^wD_ji)A`?dekbIm0eKC9v=jTXSPy1m02v z13qnVA7z(Sp}o3R`US`AEKK^`B7B5K9>WFG&y+q`gMu9!`PA4n%dnFZBjMq2Rfe@L zE}Rs{hBOXiLuIES0}bNA7C43LP`huET8p391%+K5%>Z|(I9wz&X)+PQ9~ikfZTJ|g zsT^U`S)!&;1`V@a!Dn*iYlOpY zVgH%NWRb{8;f3=QimKhBtb_R$xoDpRLpy9UqZgp4UL%ik>0>HB0&5F4XF4m*=S;BX zx~9I#N`27&*#W%r8K#_JRKcWmvoJ+GW09d}1;Jpdt05e_H_kFMwm7xp!Ia7{T1J48 zn(6$w8?3B2$fK8rdo4pMBv$`oy`oHzFO^;ls<6!f52N`vQ%DYj!LoDa&%}e?Ag~pn znI{!cBiMu@SC5ny0Ws@nF`@P4n_MuRe(j(9%A|Zrmuu?W%LR)J(IMkk;?1T zq`^G1`-BFgcjW%uld&5uxhY`3gT}~g03lR(j>l%gykH@=b z8^ahqVIs=RYwkCtG({nkux3dULM|Tm;v{x|=DNYw{Gx~$2{dR@TOLT0I^L&syRJ)| zpDjB{%4Alk=S^N)OiZs6zNo%dg4OPoSy^@(^;c#@TWg@eL5ji6P>Ta8PWgc5O}V{+ zT1|`!j|Wa$9+D&pqvCdQpD5rx2ya?LC~|W#0)yA*rc;0?a@%a60Xrc7noII>V`^Ou zwFo}Zv)O&xz4gJf)P_ghn)BC$`E5f+NCJP{W4zZtN`8{`q@?}5t z+Zf+fg@Q@z0v2ns*eaIk>wC%+{HbM3dd1(4+RMW zCUX}zVZemav;Z2d0TVyXK7cN_&HZ)iD)ysbfY;^gN*$y3UF@7`x%Wm%gCB!_Jdr=K zgv{1OWpIaZ_y@hCR7Q_%k#5LyTE+K6K+E6g*k$J< zPCoKZcLceSv&!;3lUrbjbr>z5^R5M;@veEGv>)W72&S^oa&2?lP@O{P>R;)mH0J}# zCNs0WMfey+^tnVEBr%rP?9_83P3owSu$xlWb&q<19= zXDF15aZ{FMaM@qh*G{<9-ib=uZU0_M@r2SDtNnFvHNnK1%O|R77p=Ld<(8Jf5_Edn zA6^(<(^-%P!IKuZc&dq$1Qr;FSCEFQ4s@65N7XoK?6USp2cN%mkJMMa`wEQ9?6x)P z0O(-3h&5dflC%*nzBUB)6eMHR7%@A&1_{1m>->b?Q*@4n7vuPZBl{vn3liDLfO`sNV(Z0f`MLp!Oh{;(lSb8gd&J2~Q z4In%3SAUSaC8}z^CJI{oVvy>EoCL2mNX$>jO>LEYOxFAfw^{0rS_8Mm(yvVg4b$V&b4Vy3lFfjLnZn36J&s#w0oN9chUq1? zT=@R}iF8>42CATRfc7T4~|dL&vO-9-H&RHIdk zgR|s_q1eAbN$|DdXWR3plteyBRlog}=vGvYLI{HfcHA1G;*>wM%Fg4Tely*`y`|#)EmFKhjk7|X5 zMYyUM8_GSH{=O%G-_-lur>8%vYC9L`!Y_Lu*=Wn3E(N_f3kJQ0Xrk9rEH)WQsZ3jM zNVSZ1US(%xI|#HsXN`uvt7FTxFHTX5nr~a2ZESyLdP^%ARrj4fB@$aPdPQy?L32D& zZA_(Rt{48=zi-O{_~;3ANamjL9WjgAqJb#rEUbF~`zW&6=Y#oGTywaWp6EXNs(ch? z1M9smzV|BQ_j)|=c%#Fq7VQ&g-tGi2_{n>t>Z012nFCW8Il;QI&(TF-^`tnGXXlk^ciQG^Pt(nSO;%(AzfoAuKCLe1w>EcJs{14*~pzHJR^^hRq!|}m#tFcS7kerPJ=GT%_q)Tzx|oeio3$N zC1ixs?aQ2>7urZ_;I<;9qc62g_+343W4{~r~|e+^6iH~C8SYgos9=I1+c{FniKOgM!v4-eSzo?J{XP%H0u zQ5J-5pD>zXaeCU+(Ay0g6?c+`i@=Tw{#y)?tzdx1u;cO9a(-&!?k{z0V_9Tr=S znwxW-y4IsX50dJO%~ht$T?%s<1@lHi{@WWuMWUw!y1jEf2U1i>v#E8FDpPa6_VjNa zEy7PY-4tM*k3nV`Avuje`0rQqVgWeKKpX*y!ra`{8~CZ(bWMJ_)#F?+y{5E)H@Lg~ z1mQ2jv1DkhdS6-{-#a&rGFjh>+z+dgfitTx7%8h*8Q#-EJzjWcOuexm7My)k;0dft z>)tzEgt=G8>QI1~`PG)10MnxwrCe27HY{CVG6srJpL&z-ultK>VY5_twQ(Yb8YI-g zG&fU?k@WJxXaeF#eD=XK5^n1;1UIs(9;Hu!kv3crsrd(@mA{~!?sN&6y2Ovf@zjiY zQV93)kRKGY2_t3K7*C~XW&$B*WFp{a$KD7ZUN% z#lX$2?z16dX!80VDH6jCm$}GXZAn!+HvttbS2N`BTp0fO zi?qljwalhM*qIv}wk`Zpki_mb*}t2iqlqY>D6Kkh2&5O;d5 z+ZHZ!vBCNk_#vR|T*gwEQ39-rPX%H6q;w5WQd>-M*0=JRPp;$vnIP71h;L>j;}(pF zAmZjlDT|>ZQ}az})(JMIeKwK9(1ymTYxyzpzt_7F$TT;&5;PVxp_PiZ(eh6j=|M%4 zN+1&j&t`DYX%UlL=D+9hg56v!@{+&$C?0%`K3W}_)EJJ3-nRO+D&4HMj3{Viw!V&; zHw~8Lh)7i9uj#JMXeUD;4}OUlhRL)9XjE2h@#3BPdi)I=2_Gg`i?5>;eY*?jJ4AGf zi{pF%LD6{?9n^5U$P$9IX^BBq3x^CEjIS9wbxbLcq=F^H3@@hxsF~%^1xm?h$$WMJ zAy->G7x1TZpo8Q}5IX;j=ZaY8WX^_2tVujfBXt3K$Zz1(KoJWv+P-etHg%CThNPU* zhr21y8dApk^ptFzm^OEAY<>XA;@`{*vxJfl_T$#EJ2>UyLd?B8riP0PmN*Jpy zJ!J}>{lb&e%7YO4Ak;W)!T17wQ4XN)exy@gZFu;0Sy{sS!#+m6uB1^=Fc=8j-P8{8 zBSmHFUKqKko{SPSx!x4lZq>7oAZM2{L3j?~Akb5M90jaVt`NP6D1DhF9~%OP$1rNKNCs-IrU+`Anh zWG#_MQ-AS|e1U((C)c?d^!1);U}@=Rqn&+Ft98Xm$_sw|28QZSp}cntf&<|xptJP3 zDQd|^hq1<8pY8SRDQV9;{TJ1VlUL>VH-4SR5`NuZT)RWhGW z#G}bCf}iTIjx6Z@9}Mc*PK5; z%&k>rjNBPqUs>kGdf>Frid%Z$^y<4b=rkKmGTuYZH@|9i?uJI=?+wJfjqf_uV)S_-^#6h z=m&N3H{TLK^Fs^TblY_bp)u+#i0ab9X9X)isjQbA6nEvL2emhvJhEIVa%oXDuc?yn zxK=hyTum^g>(X&oH@8^(XyTbSRM8ORx@Rf|3=lhT{{%q2jw0W7-T>uTqkJaRn}a60 z@X)eK{J_1lh1}KKLrD`sBGHp)apl}$#%&(WG{T|kr<@e|{849j4Q;n6VUPnTObn&f z(+=zreJgc9W;UNbNx7knmnp`WVK&Pbh{u~$^lnJh&jGQ6r8~W&d0$pWgrlxEkWCDXHR{W4y zrMiAGsBMfxaHILSo5#{%Fxoh)cNe)siC~ig3@BR&8R*6YZa>c`ZoS=1S|s*6b?@3( z_7*!H!RGN{3G%owTx<}3aG!An<9d$$1TUtZnNaArDbsLTeF6a5z?394uY4Lind4S| z_M=1hO2#(p3`6H5B?PXmU41*<6lNz2*}3+?Z#ZIfRgLDDkM>+#6R`Pt>Yg%O_Rr6^ za;=&kd{EH`+le$73&Z&>E8n*1t_>?+=ySLxTA`jMneeeQoWS#bN#OIC2C-Tv{cv}YdPm#5iwUt7#_PobAdYctsowv&__SyvjsvDeRrs-F>l zGen}jFVcA_QRHz?nZw7lhg^E7afURDq$AP4s0d-?f4E0J_V~caUo(BaQAd}iLm88=3b+o4NRNxjNsYQN}fE8Q@GjmoP);F;S=VU5QJN&mm`+8 zW%T1JkaebH-bejCyLyk%V0+b0va&+}wi2ih+n0d;Nkss!oeF22#C9Bi$DKP>oE?#WQ=J%U~le}DSLJ9q@;l{|N= z_+q~LARH?!u&VQV4z`9Zvgo990dzsF5QJ$07eIm5IrFO{^yUbJvna3FE`b z*X?IdT`&~T`#IQCy9w?(|LZYV23GGd8PKVz%Lx?T1i=h7{(@HEZ>?hfI|eohYk*VB zzR)MHZ*pD6;G4hsz4Q6Fv%N&T#W(%uet^CGJide@w8vZC+?|6C6{~slJUcIRKL)*n zRT0byKWoMB#X;t^vZ3kS;O-HtCQRHc@a_rYea=QKO|V2>gnxO?$d^B=6v^;PrQ~8I z2a9HrGnM0;61}sALxE2F$n-Lde+Az7;>_(PRV4a5*gG!gh;o&#C>l2=k6klAM0`azV}QbyidJW|ek!C7B%3@eKfan^NBI}oV?uB(Z$SC0iT^8(K$vDEab|RWj-_i4x@Jh{q)fe zQ#Wf_XVt2(YVdQ}z!XWMaR27XbHu;ZHO8E{g*AAH0qdz>wu`0No&!FW}{^M?Bn z!SqDQxWB){G@#i6eLpFRht3-N;9Qiq(&v>6=$+RuBlxX@-@|W~3Ot|FqWM4&O6=T> zdF!-!$iPpFbF5uR1%-Aj(qCZ;>#4|YeuLMpIYYiPJ$loLXd&^*vbj~cg!d-cG8(>Zv;NE_W+#28p6%Bnuie*7i`G%@u+Y*4tGoF2(l#K$uDBObIjY_m_hJOz` zc(}t%120h9Xm``sm^teQz*DZ_!AP2Tf~HfGQNXKAN*(5!p)mk0c#gblw&Yhwhv1!g z&O1o?Cc%amXoxewEsn=KsBj`_ExKO+E17m}nw~T{4*5<@8A9t#YcEQ1KMvt-I4eg_ z^94ph29>Juy74*wVDOe5%-E&=A^%A4Hjxu62vu^~)l@>h#Zti~FvYaPHt8T-8)%yF zvyAXPt#`o>8N|bA<-zCpnP+LyL+owo!pNiyajgeg{KIL+hgpdMkMVfq(Q>Lfvj%v= z$?f*1SsgX7p~)*_UPI!ZQ_@Xn0NR`z&N8O`5zS?UNP8OsW=q`dSNLb&Tw8*uo7$)@ zTJ9ybkwJE|;OAJ7M?B;tgXpW__yl$Ts!8oGm!4>6Mg#4atK^bTXjWhRBL_1u4l}qp zb%Y{y^7^0Bfkd-8vryz)2v5aG$}6b~z4kI$Bc|w+h32&zej`hMWi5#m2~|JKt}G8} z$bX|uxq0y&tMNs?70fu|%Xot~c(mC)(^6^Aeg&x=J!Z!6m zy(RW?|65`cOv6}i6O1%nw)N>1%I&QsHtW)13CJfiG)Hi%gTe+R+EG_lD0gVhLFl=Fdl;?l z6wB2_Q3Lwyer9H711*{^r)H+Uq>sW^$_S3! zP6W4PVUDTxom_%>)?j7WDkZfN7&c@F9qE(~S(am-f4OoCwH*(LH4KXA;?N>?R&yH5n z*?S1ZRpwEx`B0wODbEEK>pjbS`gCq)&!kuQ2J@xX0nbcjmEMJT>4?Zk?q>$I~uo??A&Ulo!`E|U z$1(7+Pg`cgX*>i^GFY&oZkn8RWv<{@-LhpC%h2!~V+6`$kQ*i0{Lsxm(#20EQCuUo zW2w-9jfX|q>a|v78{Nq=!lkQ z7+x+VQB-D0aITwaS&oJKg#~rqOGhtEWY8_ELShgcLbn^?u-985sL`5yW4GBCx!ZA( z!q6`_r$&=-nI{`Vhk=xvx(3$>p<|9thSAsUt{5|E;Z$7;IY7ieAh{; ztl^}B=n_z&Z{fIS-{-+GDIg28zM}j+U)$MhaG#-Cs})n{A>X@BRac{W=|vG0{=_^y zJVb$py@_0kdd&%Kt8Y|f3v<_0M#qa{E86nmv*T~7z{26-O=@ogZw=Rp@3Xi;u8z{w zQktO4I!M8AVLF;Vdo7P1OZ^rhX{=~8207zk_i;`23aODK{Ch8GObzMxhQUn2IdbiL zC2g5xJMZvo-0m(__=h`X=(D1b;~4NNX}X?<$#evx7o<1bE_a;^pe!YWdzCSZ7T%Vn zb{Bh(Wxb9!LOAzqw&nBz48pcS<3WO?Dak?EeYqD;4xhU#UqDyv_9x8q+BdjeYbe_b zi*3!Q^0$5j_0uC)$Y08?vq4-RBiG-(Dg~xP&&^OFjOOpTs&=rXX!H-=nt^s7NPn*X z>^Wa5{C3fG@okHdYPHo$e9aZx8F@C9Z3c0f?M58BM#^wX7+@7YlFY~XMG8O%H20$W zSHK*)K}^Mp%lM=(o=E6|UUf0>r@teVv_bMh?0Wxw6tM+oR}uxT`k9BtP&sp#S0n+y z25c1``#fiT1R-L?_4bV6nM8FCZK6Bon~evm<|rw#n1YiZ`d!>zU?mXjP{4eAZq zT)%Z5&;wz=BOEAFaRY9C_c0#gW5G@W42$Em9fUO5g$LxzSp(@U{B{(T0{ya~hXv^z zZrSd#;F(zb*cgGEK_)iD;Y;uPHrqsICppMG{>D3C@CvM>h7f(GYPY@o@@Wax?i*8w z%%Jg0?=NhB#}0)-R6C@5yWoY@Y2421gYieJ#%?@&(32&;B+j82&!iW-j*;6Fts5Kz z;>%w66W63uRxCL)=@Q~bOfyRlcx^cw_RJ-a&Yog?g268!1KR$RwlhfC zg=uW%mskT#V*RUs8r2Nj7}M{sGh)*9eQ2M#Mdu6zwUnXV=+lJE5&d07!S<_T2lZ(* z_4=E$e5+G=0S@sM+^Rj=I~x8xsHz|Q2z#CY)*U~iz@9&ISREaxY6r91MSpbbqSPl> zM`(75(hdc8j%rb)$a;Ki_uDf;lX2VQ;&u7rbgJlEQPk!yUz;Pb*F=y?F66o{4vsMx z48x=Er8tTXFCHZznVjVD9O(NL1Yl2cQ=_F4?Cvdx7;eTVEzGfo{q3M9CCPT7r;Zhr z1WEc*R+Rfm3xQq{X*h==D5VyDY~qsR>1F{^z`5+Lx2TE}*jW3{io}d>in<@?e-gJP zrpF}!zLb|wzRECvO7WfCaZZ~oVZ z#4iKDzdHQ>GI6Ahi0VJ~ef{`4SNz??fAiY}3?06RZc3(x&i^Vj8A@>?c7M^+DMBXE z6KAG=06zjIM2tlEO=pi0z&jsk?ENNt`^G)e5;MJ|w|UXI{`m0R#=WH*L9H8OK;G12 zAXWzyL?de`w6^82bT3sdN+Bh5H)L2<<3ehv3w-fK+vnp}!Bs`H z3@@^viz0~++t!ruuD@gh)-IQAtKS6X9}bB;X*m6ZzyA5P{&L9ZKOFLxZpXi$xrDW@ zfvJ`8|8SBDR8&_+R7U!U3MCo{R(tQswn3krrMMKBi`WkWv5m(fNMvDfRJjxVQ=5!_=X0 zk(8XA>GT~Z8n37S<)N8N_&(>M8-+S9#}zWUju^g~@aDI_sv4No&YZt44+afG zMG+t`H9-&8JB-#7LUD&3h{CH5bsmDr5|xC=5){CvzCx`ExnBuAs!9JdEQycc++cjC z?s$d1nlk9!?_4(EE>&?Zwg};eKu8ix^n8HND$}{IZuh~ zuyV<(n}?=N@2DF%u;n4mF|$%tJw?+LxlzUV*J$!W)Sz+$QHSoenH8L!5sX)+T0KQ5 zDYk}YmxUQ@7J~JWIN#G2d{5hWI?HI;@coQLhN~v9^H!1#{a`RvVf3H6lKZ zvAfAJS08Yae}>tW^!neHrBfv)(&z zi3NKZZdkzHnGsgaA z;SAAWN=@H>;)tzAyg~z9nCnZZaWt(v1;ZOd9Ffy0J3_+b6wXD5Ty@zqNkdw)=mCTx zS{ExqN%y0|wmbP1RfhD9_?D}#vcq%MLLEIx&SL`@x1QNOO@_882TRx5iFC>e@2WY6 zn)+sM4;R;qe~|d<(caZDK<5%Mr&k*CI~p@@j@ALv6xP%}w(dqgeumw#Yce90TBzNH6JYiCL9uzAAg9*QX(XPBWnJn`?}&3wy&bR_Xdgc}JaVa+Qwn zPJI5)9ux_Vs@iXV>UdPqi}Y20iFay#=|%k&1L-n@|CaZV|Ctmltc|UyjZKa0A06sz zBoy>x^qwaAzX3t(tbHrO=%U2ac?Jxi^!|9$MFY;)Ir0Ytg?R&&`$YfnFa6X(fg}Ae z&^6H6(whPx`@Rnj8WjkYheeNriGpQu0GDLPN~^1j817RS9}={k4`|u3BTA2firUgd z&fk{9GWjT;Rvtyz46SxwQ3-qL3uBxJRbn%Z3|{CXZpvZNBkVV^~vM{Q7d zqc^Z@s#Sm4%7|tVFcx0&AfNf8%*6_O@uR4^o!UtAWQH<%Rh>!lea&vSkhe{Xz_mG< zIP}7Hok^0$!}@;QTE4QH8irHlQG24_O+6RTpXtw3+MuGWdi&**zQ2j({xfq>{L^c4 zh8BP2jz8Pw|Hvi(kGdKe|EJt?(5K`Y3!M|c7@3Tk3BKid!U96GV4{pbbp=6enOy5C zOZnDNV=QdQpkKPe_fXKST)293L_RE_pX?jU-kq#xFR#%wn=*s&QRolEu1E$}al)}8 zYd;Aho=HOnPzlCCTw}na9|)$q(X6DOKrezgFxsnw5x0%mPN3AizmeNgKt-YgkV&DR zG}#a5-0#NH=H{F7_;*h|_=#j3F7n4}TFt@r_8BK*HoM*DM{}~oY96-JKZ&$ItX*^cum?!DU#-u zInz3XA2k_#e@mNFV3UQ~IdF1RG1QBSfnb#5YN-)jZLlYxCYB7BmeX8mm*M&yWdv^q zNhb^B-oEwcc~-O`qmyYCMhFC{cy#yz%DF~e#F^M7NHf>0Ff?%`^u9$`-aI9cieNhB zFM9`(My0$3e)QZQFr*|dn9j5V%iG`>odMtS`yo*OtskZ}HW7(@P> zc>T35*#Q1LTDZ`lOHV7nRs433XqC3FWoFDsce^-QN%U^jWY^bCuL7YbMF`srWyInWddWOe=wHVomaU=lCF4_j#&&gg>5eOV zZG6WXNm(dGhdC-mAA6^pt$|F_B#KDgb{~lc%`?V%&-c-3_V+pxlY}qwc0DVi?x7cY zqPfQaIIcEKKUhU^*UQ%M9yp2@UA3@$;7Jmt7N{|2|&YTk2X^{5`%R>BAX3O?>pC!z2kB2f1KXBhmizBPGkI|sury;Nsio3A_5zpfLO zQtS{tJV+VIeHG7Oeu06BI=O)t0F=P^cq%|M)w;0(F#+q58{<8IE~K`IRG$4EzUW2= z_g);^dg0)@vQCb=Ehi^xrw_HR@a;@~{(oqr|6k$5iVBB+T=ro5|m!67VJ2mKBJgnf?{ zM(b`K1HiP2==y*&a1B_3<@)3HKysa>Ud_RYH+CPt;6tT#_@ zeDMl6;r1*nSD~@c8FbVAqV-YJ`}6t!n<=A>cuoUqtJBgQ;bP;P(c&%t=pr6a(c@U? zvV(;4lE=(uJxFd)gOA&y(G#byL*oLX4D&DSav!KUETjzhl#`{ZM&!tsq~Deg*y1@$ z*C0i+44&#M!csrh8UcOn4g*f8d_8z7F<+Y44d%|xNDdGJn`X~qLl2u*&g12j+p5Q| zQ{duO@2-iUXTPhC)(RzIa)GIKu2eZjH7}Frgvh}h(%nwNA+TW>h0Cb-AS3{>+ zac0XJAzOUJVC-8+bF%pq{JV957=aF?Eb131wh&NmQU>?GPw0{dBPF@OBhH3a@Ju}Z zrsQK=5N9S``HsGcb0N3rS5VOPR+_ILlx$GGA#3JlaQQ}k=bZn&wF>na@HCdnnkk#7 za+t-JPrb*Q%?M5;Yeg1-vJL?FMwEAGe!FzqOb%j1;vKOLcXyIC%j}F24>#NP+rH{Z z1(t9er4kq!FY!x|o`u0l+cm-NS^gWez4IKbj5eg~B4T^q1;4lb6Th2$Fmj_6(MMl7 z=B11U?I$D(GHoPX+x5p)P6e8EY_&Wim%b{Eae*LyCAUSh%#P~brXs;DdT0IG_X_~S zOz~+R;M0W1V^^@uYmlrPtL9oCW2|v|O5G5{h#Ehza@)n|_=zLc$U)|AWQ|CYuXbBC z@&&U#>04-G|zvZSmH*dzSDipf`b_%-2v{P(8>oPVq-_lx7BxG>INS0 z)GbSjz@9xu!xdM?K;`m*HRQksBRAP8I&65KT>NbC{6k3sq3RihAu-G#We_An_O1fDxFXJ%7_$A%xi9E`^VzUM0>cZJ)wS(<(|56?8G?4K&l&Rsn)^~4l$AerlO8Ov z7q+MP;b3#5yU#|aSoj#C3G!JQZX>2i#ITS|#tOq(Bc@f3z-Xhu!|p_7h!jKZ!ZW1k zKoeM%6y3Xf^l^%Ln*B#S z5;n-44*g0u0RLw&{;$)-pD@N(Bk`XVqQ4zSmH*&yB7a<-p=+riAfN+=grrC?BSq5V ziQ$>xfg`~|tJsQY=?obVPvr5Kmm^m;I%<IW{RXwG?K|jOf!xqSV}|liuG^AV5$T9NsBnFlx?VE;gMAYaOSOWOaN6 zA1?}0U`bNLh$@mSqZhO;URGU94|z-rRp^s=(IG{0WLl@0+EF&0?5?75pR{L?@w} zOC=5+(Ut4PBExH~MnG3jwO&6XK?z)NEu7Fm$zjg%M-S(L>$QM$Caf?5t@(mRI=wyyjTCNwpZaVPuaqMCRgbd3 zVL7tLaFmTiicZc#Sc#ObLl2a*5w8l`#Su(3eLP4}h%yY8WQfCsVS0}ZZw|HTLlB}a z!eFp|z1HFMa5#ujPTlgz8n_o$v?<8iRR|`Lu+An3#n_K)D+Ux*8Us$*?UL_!SWrpv zZz-Yi$NCYWQ)Xx~hP(-wbm{?zH5fganUb>ggAs)f{vO>UOc6Y%S>f`1*Yc?|^V^WAP2ggCFZ&38f-{dqGc5(tR_CQRB_bw@z)M{{sO5kHgN-E3h$=jY6UE< z{dulV`M&)sL=~z%5`!X8Nh(arfi-;|qWN%{?5Lb?w{6-qyKf8RH`m|1aPpNP|T78KAJZb^WT_}W`qDs?ud_%3qrp91X&zM1B8}7#*mw_sU)-vDo!JJ1Fv7ZrWxsQ74;`ac1#VunzsGhD@3iNe9Y7auY*DJb4*_qOig`+A&xEP+N`)h&S*!an!DgfL^40jQU%)DX#4*wAl2 z5&7#3Ce{y!1n$Apx;Kzj`>I3gA#-x;T8$x1=^3j<-z0yaaSxVV(}$^EH7*9`5wZta zWJFT(x@102B>34E!&oiIUz;!2odzSqB^Dms{>i|SUfEO!fCe2;A*qQB zKUO;q(Vx(~Z+4@f0$l8W(C`GljQFEdc<7)%L4J@^zfp0wKXW__!>B@U{eF8Q`JiIg ziiM}Ut!lpEt;051Ez(`)l@hmRnXY9ckeq}Mg@s=QrR`rUN?fM{g^Wr)j;tRgw;uwY zI?>VEucxh3w>35Nov3^>OVLIq>+PgGq;i|L0W7{fQzD!bak}y3q8wZA{Z-aiEw+DV zBPyO-o(Rlu$L}w+@1vetIyCH;)Iq-HwB{_CfQx^A+o193hr}osFEH2m~PI%v?CtLx|jS&UXil8 z1YNx!rMPVWgjRHZsF3OxeJLyT;Mf!(ox5BKnzS!oa&n zCoXJGXq)ZCIo}6w9$9d>pjTX44ZE1B^Gn${f;*a)tTevYxZmA`6*lZCJ4t}%vUYsFvJpQYIIo3=NL z3bqX^VAVRh#vHQ~q6aU*>-dUS8E$0lTZVo+S9}MslzvlR=x6k>3E<*wJR-4)SOnB&!*>%~MYkZH>}MGnDV-I^o3%V)5E>x#H%2CIyGqN3Hn) z+Yk!_Ysn{DEGa_#xh?8k3TGBSOdUx8>Dms$h-`E~XPh0f~Rog{{ z?aD4p?CoO5x@nG>CfUV)`)kYHnmklGUsDLlbr-#vk1t$nct_*4fv=Og@5MMnQ%K!o ze2V|Dl(!TMX%qK*mmI>@k%6@gQoo1Yd3VC!1+{41h!MVDr0?Jk-~gD!QH61#f4VJ% z-cUn-^rDx(Y=9k4KAJWrSmF;S7uP9;g|73g`4I6ng{)u$Gy7 zkT0eUCigNC$r6|k=JO>nh$};HM_|KJ&w8`%<9UezhTl=|^wP*fO{8guf7*p!-lb9h zT5pgy4MkD5qyhV^6OH)^*TT$uLyP|ceM3`xb#QyhIe=MPd>YOSkLA|(SiphLN>ctB zbGn_!Xm9b7c+T40F7}T|10ZG!L7w%y(Oj$%(HHE<#*xB;+KjO>4MCcYJBzX^Y25ep zJdO;CykITp%4{!|gk<;N*@1+!dmc`qcfPE{o5>mE33N}=M0?-Ov@m^_Fe{ldzK^qw zy2oA}A>?Ev4225RJkSws@%oGg)D?q*5{0gtD9q)E@xN^2FwvndpuUKV;kyF$va@ z#F*tbYa!AfH)=DN%?%Dyg*z}^ji_`yD3;FJ?VhT)b-!_lG2-aEyJWa<4D5b_ZwO@> z7_aq)cgwFmLosm-5^lSix%gse6$B-_sBnzG)5P;vZ+Uc#7~No0?XYhObxd?Vf}md$ z$9jijXb;p=a(BN=c*43(40r-9ldQ{6ZwNVN#FZ-%Qp-c1xV-$trH+h`PonJO;Ej3#E1d|WMeQEe4|R4?S4Ih(ut{Bu<4*oA^OqbfH`@ON?%@3s-1#4B!heaJ zO9_FL{!8RMZ>FPTj^Iy)^TW4{jJdtIH-i6`I;YHGVF&z@I+vywfEV$kBlIr_DuPwf zG!-?9>NM9fwK9{{6E*V6V*0zpxj$SOup1tyMHrL>6eSd1*HF(;*GLzL9$gKfC@~1k zpSWGW{rzdo*MBenTK_wAMesjaC+BGOcfn0dnQkfnFW`;}3}scVv6Ak32O4$+)Wr;n z2rZ77$DPT%-4xHjROfn^;|ahk!4?<6yrFnGb!^Q2m5Xb0Lw5@%8EVm$kYQgjG9m9q zPPV)yR#u53w-tJTcH)`h5rVpTW9J8|xnZvk8qqO_Ch?>f7L6VV3Oy9AxGDwpd>CpZ z?KiP#Hf>tie8aQ&mD#cvoCH!WvRd7g)@d(^yfO(Z+@96MtXWpiR{ZQ^uGqH8Lg@RYy6vixw! zTdijbtwJ!Dh(D+~!ExRIK1gkysK9op#}3^Yths(~aBuB?j|Tw3kK*g&cL5_f!zYp` zs;@wpB&ZDBC@5bb{nbkRG^;8Fb}+1HCwfGapmZKL)o!{FM>~>`)SK!6~ctvLmwB5H~-~)Jg}qepJ_7?Wx+$*8qDj z2^#N`*!-2q0DjcS&Od7jr@N}@uU{@S`I`&>2{z;ZXUEz*e|4xhn$qyGzvs6gxCg2I-Qh?v7z(A)+EB4 z57!gmn{1&oF|R-wVDEd1{RF3d_Q!{-0Lgvzn8S}Gnk>H4Tb&1NWreTe|l<4OCM z#M(tT1-t;?qhG$1cF+KSl#HV$9j$Li+zQZZu57JU>6{1^BJ6GexP8@(TfA_?M#gob z3`^_6fd&RL{eHUMmUGX_0DHzbGrl*R^AG&y+T{n?J_UGJiPtMy)h@J=06B5fXZH4^ zp*;l$f&nEpThhEHk%UU&@eZcpO;>jf&t%D@fY;4hiHV&(J|}P{r>Eb@Z#Av;DsEo5 z5UwgjE}Ds21EqJh7XKe-?;PFfmUWF*Y}>Z&q+;8)Rk3Z`wv&oev2ELSQZaANIo+dg z_j%v`?svyX{(Z)8?6vk@m~+h~O{X99{oL@LhehCqcrsq@Lut|BIj9XslIW?6x#k@o z(VS9|*$u?)n)6xZT!tN0Zwh^jkoU)h!$Xkek~w$o{BmpGcvPPf_GqV4)+DtlujpD< zpJ;TT8+D}81G~n#(H<((qncNhydl_`jG=cZmicmJILlBq$0R2&$D$lu>0cKF4(VJq z=F75W|ArL(-&vROACN+}wa`0BcVsVxEgLbjDYn+I+BMNeyngad?vNO-A9MYQ2 zx#;W^%q2Bw$9w)++MYrzWU7JWQanRJn5PLI?kW_2q7Vl7NM1@m$?Z+A- z#X&Li$=Xm z+4cc6uV%`;u&?4uTcxUNj56KFosiOb92ZZZ_>$1Gif+~Hj@+3vuo2_fH*0Am+t>N| zmSNZ1CJSTe-Qv?dy#g67(qX|&k{D&t4FswC$K!vM10U%2Wyk3MYR7-+=lu`2^e-z0 zsXl9BEurwh0W)Eau?|ODgCvxO?fI%14et)JM*(1=iEbl<)5@GV#)6t-GYEG zlKgZ#+xy-n62)?-3>Qz9KN12&l{P ztn1!u_Ap}|g3aW$1w~E+G3ztZV?W#lmZPZ(3viItH~CIe%XoRRrjOFlLE3;VG(F{1 zEkbj}@!J=Z{Y?&iTRd6(ipz)J8VNRL=k6%2wWCUdE3VTQnu<-WB2rPb#;oQROWl&U z&on^6iYA@D*!pnQN8zeWC{=)z1A|hk#XI52<#my*uuPZF7J_Z#lA=zGvZ*Sg*^2Nb z>A==xPD>3{2t;F#l?*0yOExUbQ+jBvEa!bNfU4ESv#D_oqEVG`j@G36q^OkQIDI!A zK|ggVHzSvHBGV?+Q16s=ZrDhN+f#-{3=54?Fo6jFJ74M z@^uBOZ`Zlf9B_C}RK8&oG~X+gvr#0((K{H$?tp74CP5k|6AY+Iy3$7P-eZE&lm866 z!>YYtd?Yl!q!HCzA$+$H6_~XRRaR3K>ap_!MLqt>P;^vpoC(-QdqU;25pV+RPC1+T?uVt7J@_c*| z$+&7td=+-8Lh0VmkdSsP@8zzc$}o25Xsyc=$TaH!`WsMVmcemJY}wqyn7)b@>3uz- z*}Sx8jpUgQLn@OazOne=5V>8k&&!^8q3D;J$d@?$m%0wh{%PA-#czW4C0#c>(L}8@ zCPf?4z{l5$qfqf`6KJL^JHaR4uD4jJ_@wb=8F0M26yARM;Rkp#Z5LK0I*O*7MU9B;4D&{VDsYul7a2I(mH%X1r+_jO&qEoV)DylU_k_B(9*)M0qGx&mLheCf{X6T(#d z>Ad?Ib|;YYX8HUp*}>je3dR`wLnd{NKbkQS#${~W*iA0nj%nSIPk0J*JHBW#rNc@Z zYl%{OFGA=sLrn8vh(}&6mb;B5>~AbG1Yh>cTlfo6$4NK?lyoTVrO3XYL-H(sy{zQ zq3QZC!6+rzH+rtRV^6BsPmA^>{2WZ5_HpIT0f9;yx&A{nYk)NakaKDRM*03r!0IrT zaKk8e)$fwj<5`nxngDD%zMROKy)-0uQh~NKyG?lyi0)(t!q~Li=mWuPQ0P?po`i3M zgrvb-5-pa5Z-=YFJi_foz7)R{1xyG%TSGmPuNMJtcQ|+<*(Gm){KRtee-5bTj<{f# zQtj~Ow|;*2D(v(|xC)zJqTTHg^U^~&>NpZ8?Bv(%4>1PDiF*O$)tQkal*pQw15^<7 z&Ux%aBOI+0A%>SPSDFt!EgL=x*!=A&bg!f&OOl^_eJvEAG4?6!>5h<-BC<+kkrbsz ze7pkg{yhM9I@gq_Ww(hIu~4EiP}CT(ORn2cwvMx^9ol&Z@a$e$<-%^0#gC@a5QsG| zF6Dr}dVp2pB6nWK(Gac1N18owqdk(vF!l5MHzaJ)z06gkJvpZ zikD%me%o8;8n@e*edtl}?u{isi$H-9N_U8j_~z+e+A|`~DIq0hZ!Pj1-kQgc?Ab}; zyid_J-A9ZDe4FbH68?y~0_o)aI=C1>TQ zc}4KmlcKBhuJAc8g0~F>U$!geluq{|q7J*83jiP8?!i%Cms;-?VtkynaQjQ(5^l!x z(OAxC*%CKX-*UvBjvAL|{lU+N*^8VTG>e7p8Xb>!7@s4q@fw|)onCJ0j>o?U_L&#I zAnU$n2q^ze1^GYIIQ+jQ>4LUSHeZAoIei;rtAE6Q6>~={Rg4Zp^<-w;mFNU^J8QC& zU7_R+b|~{zcInV&kud6#+Py_dlzzgg)zEJ1ZB z>y;FRCi`u+Od!6e5r9h%retGHSYAJgbErnkl9MZz1a;G^pANQ}Fsllsvj()P@jXXYRh zfp{U(I(wmjvZaPn{7$;8`s8Ma=o>GQ8(s;OBo8J z$YoSJch1Gzk{wE|s$B`K0)lTWzLe+0orZ}7p-xhe=A}-_eGMEo7_HXOM;9mUd$Rm$ zg@!z)mdCw~l#iE-+rm-bR|z2?) zd4G=+b3`eG4$!#^)4X58_A*}Uo$zUM;gYo@%qhlIg)i4T)j(@u&4M4yDU_Bx`5sBI zNn&9FO-3bhAJ#xnhmW;1XV_{SZmX(hEt?+Mw0QhdwzfC;p#k_-#GhuJ){H;g|&0t3{9>M;yz5Q|6!qyPYOa z;)j1Y+qMjfsoms8;S{a)Ed37kfE#K`v? z92JnO0#x<30;3xf?IRgCs3>SRDx0aMbQi0rt>dKnbK0(L_-{Dl-55;zvi4G~&z)cE z)M^culWk0dIyWkIdF4eqC}<}wT-p2qBraV@956Tc#5N@Z0)_G|UAu_9VvV)M$Y7XQ z%q?Aq5qx63Q{(d02Q9mT;r56GK4#)GyYU>1@J!#Am~KWYi*}GEA7tZC3)5sQpBjjs zpq-@BCW)P-gDs!1dFkdmw--s8nd!Hs)I+=aCwl?&a|;YkLaS|H;%Dggs3>s<3>!#f zltD5mFN{O`Q=SVw3R3biR&^Gk0(vm^ebiZo?i{o87BDqxNXMw%5M{#v9~wZV=@o)Z zSK7dLSmm!kz=Q*-pngWIG_)4qA0~+R3^^FtxzY6YCLrz#8?35}D)$@~@z&;3T0Eg< ztKLX^dVDQsR`GhG#i6#wkfB`0DDSo+8NS*BqwEYp>(4_;&#P?ZQ?P@eV@z*-cU2D@ zziWuglWe2BP$ROpnJn=3&wLoS$b6@#PUYLqyG*g+GI3YE+m*-u!ZxC z8*5m_#VEoKnj7kZ_{sKdRQFj+fqgLqZHApUM*8Vv85R1<1KQFXz6fDf@Yj zs0)Bc3h8U#POxt#4t{sf%^(){Z}#&WCDo+`meOotY>dZiTil^=bnuXz0Y8gs>t1}- z4*SL)U5(^#2$W9lM+o=T-zXFddNR;^lf-nZuD&l zwqU~TVR8#Jh(5ogJMY?kWOvgdBlJ2PcSF5-$C-BZZ&i5ubPSff>$=`_?&Z9_ zLwQ`=o)7rc%9aT?D}=nW3tO@b6}6!%-VJw%b)2<2FM(Iic8j zAhdFkwzdmPIrBtGA*iI3gsxPBv{!|;wPjPs*`pXjw?r3e#YDCEsq*zxJxss#ai5E% z+X-RT^>K`|2#sIrL)YOGM7oSFEB6zPM-7NrEXh+fg}$PPUvVN^!8V~@e|Pv<<@?A2 z#-@sP4FJ0d$UBd{uA$i7hy4b^Es&?tUBW((dcAU}SFh5I)|WNWLx`*Ee87tcwB8TR zE+Z;(JdEVHEY3+J;Snrm6#0lTD8I@pmJwps*fJpei%z9g7+QB=33wp0mH|Q`0 z+AtqtbUiYxbLbqI$(b`PiD71o*n?qa(a2;_h>wWOU@#fvCLXVQ2?N%@!j}4g(&mc{ zelL**7&&ihgiI$5I~-Qx+k2aov43L<&ORvA6%^f?B!i(FjfjePqyb4pcY^B}*fvmP z(Mv-$twhlgkZMXIkWipZSPc4m)(8ZOq_VzHz^oAp3eC>E-56j?y&LZ6Mqw`|;)!D* zh|pWPpGx@6=3f3NmDqhn{-UU0RZ{f5(@}};89}3bKcx13+@c*gjDGZb9x+>^_f^oi z{XOZLG|W^7wMOF34DE`zODGVSQ*(qr=&Tq6UC}rUiM$8oTC}cWc%>bh1H>o3P4^Y= zkM;v##mEvP__uG*aR06M%=fqaL&(@f|7$(e-v<4NzsP@vmcllU_HGKsHeYiJ|0L^X zDQ$gixr6^eUFBkzB@a@eQsReS^H&m%lIxYMtu3AjjOV?x_NZ)W>9D*6+wR&==={DL zaI^BY72Kl=9~Ehjq>YJyT5d`x04?$7H&`=PO3{tFxR63#?(btczq7cE3eGV-acDkpJehdhm2>^mcH(SQ4 z=!dZVa&gKvYrnO2f}z)f2CuX`W~-Byl~gjJZ8<}FA*k-1+m@Se_kMcf%087rZJ8V+ z1&1LAoJ3^r!m`BCC{+v$=E$I6nM-M=_C`iQNLd)v3H2RlxQdb^u|d$n?_{&*xiban zR~n2B(u`{i>qjH%^s>_}^jil*MD4j*$UZPzyb18qcgG#GjbM`SZnZp^wa-vPl&UD*i4Rw!$n*%8Z*LW*lK!UuKr% z*h7AP>dau`%Q78CB)ra~XQPjhrq1<$T@Giue2@pjXlx2CY6(ZY)aS6t6WRoMp);!z zBSWyHa*Rh#8lrq>fELK^$7C^A81G|$B;qDDM_2{V-liD*YdS)$TMu3TtC6PnUkR(< z|1Z5+$zI>k_zwh2+Scfw*>;xFn!=Ym^CSQx?gmQ=w1jH`OR@ zkz(lSuP9x}_}0B4`gEh=HAo~-mfZrqVRn7LSkV>?k1&3Y9lw)$Jee!;czB(v+1ehm zu4EWLSC!H(yfEBYuR1;<=`2u&uFXFY8Q7K7nkKFJy*C4shXTh+Q#I_+O0zghVguZR z4*JkYbkQFrONVO3Pn+K}Fg4wtFeh!pB;+v5p^eZQH~BlCT(jPhs|3jp1k{Hx%@o{- zT!tS$j~)=5sUar@LV%!Ih&3(6rb=IEx;PhNg629eAWz!FLz#p6hovGFy4E2L8Vga4 zdTT>RpYQTq*qqkoCkXz-C@tC;DBpgu8Zxw9{sF4FS>ceoehztpCe-I%K`t6kyaS$K zf=%)h1ke~GFoc<)Pah7>Fzm7sIeKrdBd!VfiWd8cadI#=RxjaUeYCT)JQGT!KC_fU zBvZe$V}$8waqhgcNzA%iSRR*MoWWuolV72dHi<>AG)hNm7;OaT5_K?a1YUB?Q@*|{ z;xgoHogKou{xOebCe|-?-fdE4VVq2Z>mVY6bwe>Nh4Fy|>*h7M-qbLc+PYCrj8zc z=Xx$k5h9~$(9G8qu~e$sNp}w)R`D=}xzL4w7b@ZHKkiEEgv!(1uR-R~BDJF}YaEz-~x{t@GpYeH7K25@Yj#r^G9TQoqcX^S5}oW*FZUuf^t`gqVTDwQ$XP)`~uVZcUl!Pv_4=j43e~wR{M%< zdpoqdOWhq&@mmIeTw6Wt$8E1$JNI1Mu;4pYo@qrX+oJr(cNT69j~G@rPWb^X6j|N5 z*1OB4n0hHM8Owx3aS|@!WhC@G7V&xGMFO;6I*F$!CKc8M)9pgA zd0T(Pn$+6TAM{^LW1N56+4!HeF7^NC+xb`Bo26u_I41|s(+r|00H2tm3?SbRXrbh( z&@U>Pr^dgk!T*jp3gS;-7e9YGb)9n@i7IM>-NmsNW51qJ2vEp*8nS+Lxl(WXc=#GK zr3*g*!flGaS((~e!Ku8VRdZ~SUSp2ZH(}N)U+b3M)Kv0;b7=>WLfh$oYU6e1ohn7~OO#UO-4L=s$_*f@&P z@I843I4F;&B*U|_uCezbzscZ7Z~;d}B?igF;s7tZl|7qd2tnLyTO#?-y2*{i#`{^u zU*hNPh(x%2$`!2^uGvcX1dIOQt?1F%+jMy$`ny*GuqawpZ~11H#@2Oq)qa)E&Zt;FYzm}e9PBvEg{QDVzsCyh3E;eB zxt?taKJ)K`305)fD+eXOkXdLmomgygU8vJ76~3IOAQAQ$EH!U$rI!i|e!d!2G$di5 zP&C{yVZRCaiFi4UWFN#NsM;a^h^p4Zvz}syRt8Vu8uQ2!0#(}HoJ21jQ~=D_x)YCH zA&lqP!av6h%@m-Hfng*LI?Ho?X|pd)$oE~Q;X%-%ys@?0%V$1imKsZ|9)&u52~RYe z0GHVFS{wX5JU=o9W8Yu7b}aK2k>6W2I07(q(@P~Zj*)q2=WgiON)G#UBS1TZpHcjI%@ND`${e9bAJZ#@wPCLG3G}!dA)NxA6X>YfZ=?4Z z%+W{C#1X7xj4PDRcJGPxg*RVQ>s(Lo`B<+(aAi;qreK#`qfeM&$fD&uBQjxX0n;XY zqb*j1&kl5!Stg+cBeTR`XMmbCZUB41GG$Xh>(48p|G4EHFeFkO)!8?Ql%lMTE zg~yVc1{!xM?iRN%Vg(5IEwBbONEBvbV4%VcnQV=m20ts4F+XVB@y}Wg|_& z6RO=dj@NkJEua}p0q&bY63B0qf`QCo+Y~|QCC?ZJe1AgBI8`dVu3tBr;;V!8zcUwz z|8}GQkv4u2L;etl_$SI7C9f%q`L$zIWEy3iQoyoG0ay^3{^B%4JYz7>RWg)x;F`A^ z0$M%9h^~~cLcBs8?+?&^FU^nlxUV=vYSOrCER9Ka$_Fp57tIPPQKVKS4sDa1Cp5{0 zFbNOrj%g7`m~80r0!&9WRmKDSt z5_=qqT1XH@<#DAZ$>>QMf395 z7!4McDOFHLuc(MwyMIX=vf60L)T)!^QoM@@$oN#9$yx-3oQzWi&Q-~U@I5tMPzU-R zx(mCn;ZUKp3>2L;PZmro z$Li;#^iF6&$l9qki2&6pGTY=K2B6;)HzNjNmMQgu)z&39zDa|dc$FV@sqMmPd_yWF z-DzVpj$3;`qJAu%nPu2^%*z9Vww0R&PbzzfWbE~rmWu0AeMUL*b>}o=6a@zAiahwA ze@>i2vE!D+e*u&S{}rI*{6E?FA141JjS@7rvigtWK-p3eOBvl8wpqGKb5&rD6@(f* z8J@Q6!JiZbMPqed4w+t~-KG~pyLDu+r@J?Gw27~BW>(9wBu-1k$k zH2o=#D>u5SFVdsi@1?D-cR!o)T`oT^F8+yIrp;{SHQ37WI`b7jswIDonRpMR3B#V- zHxlzT%ep2K?Gp=lz&)R)*3Cb+}l7|S2(12zB!vyKULTr7Tmz2br-V-UwlA=PIs zaUxq@5vi!at%{1me%U0Zl9-_c)dgTjz`IR`HWbQJ-EJfo0$trf{>llm0T$w|SSBOM zl<|tJ7x^a|r|ZZMPg69q+d4a+#4?3wC@+!Lh_4prqRCz`DIf-Gv$}QCG1*{>ia|b3 z0{WBPC|V0~-7J2R$pYDBLI{UVRT9ukAk~d~E3w(K^eh8A^a%w%NjK?tncXx@G0*d9 z2;^1fLBAplR)-pdY_3`K4xG?o>i* z+1V{nPBV#A!~!b~J@Sm4-9-7i!#WxP!i0rcs(e;& zz`BA#d~wUA-uD!T;bW^C&B~}NAxCg6n+oX!Vj$a{Cdx@171hfW=lLtgDAHl@umR&kAd6L)7A$ePsJn5v!fTBAQfGjJyM; zoZ+shCpN=gTir+DxrCi&DwPM#3wD^Dh{vig(T28a_rvE{#t$q~li+I#8!XFt6(H_? z>7AK|)*!|%DjXH(+`7ZOV+keQw~Iv+k|Qsn(lULFThoSfn$DvDB|+r%R9rleq)Kk$ zOJYN$wsmQWOi>UGL>4N3N~%1pot?PRQO1%{-8~QbY$cW8)HUTeGKk`OeV;6NEa#9~ zUIVfHTlNhC)=q4R#VPyU(+Gp!1Fnv)MBGx@&RzkB1<>@)z4vl+jdH)q?P(6f zG=td1WTzsD-4Q!NQYRlK-ojQYljkvA>;2kRQdc{&qR94!&|h&@Oh=XxHu?HzyZb_l}Hf(7;-LLjYQr#@)+|mr{tA8N_^Id`ph^;%nSCP5VYK z{zT!QwTmxn2spPsv5io&Ny&Kru1U|akiExx+7h7+TYJTTSF8~>YhLT>%h=R=2Z!&r zhH}|tzJ_y2eI7W#-3@i=jnF_RGz;Cd(b=Rs3G(N(Txz4rX^s& z`x{sNHH6F!0chkXt7c)100a6Wvxsvy9tSR8H|O|zvK0jv9Nb?VK1;JVA5Fx-AsRCK zh<|TBUb=L>($M*QI$X&r<)U?gpKFY-nviDeFkC!WV&E-GQ=MV%se6nvtN6nM#tD0= z$7r_l;B~RLnT;F!m%=Zrd)my3ncjw!12-hbFdGyz0m2X=?gyLVcHyJFf>4nYojNXF3AP_fB{^l{n-&H z4-Z4)j!XnLBewWkWDt&48pl9exK=z@zP0Ivg}I;zReG6X8gs3G*l-DT5cYf)UP!EN zjp(OzSaprjZl5EL5Lh3d>1i?(UN5ho3Eb4!sL&vZnGfNP-vyi@9VbY7I3Tutay3^I zK`kse@LZ%{H=sP8`9zYze9$Gm0sHX&+>hR$sZMjXqDh5G34L;g9 z&=|=qHp}9DEbI8NA$Gv`fcK8I-a}(oppA=Ia5w717(^ySu4A<2$?CYFG z`ov>4`lu^tR&ZeRyQKJ0G{1JFq6~gh^QLiUq~JJkilYmrNqa+H&`81b2eZ6LFOdV`%@&7VzJ|2!BSh9uPgaG5k*AW zo$)~V9)+qdqYCz8I;8_{L?x4q+&NacA0lRUcPKF@C47Dur8W%O3?UjF1amUYCH^1> zxS7u;V={R^eimDus|YJBa?>brcKHDCa*Opr%QFC|ojQKzZUkCnu;~wiNONu~Ib-)g zVgId^LZ7A)Qf*N`1>F4Z%Q}N6SaXcov}R)-K}8zF!rw1uVxwh<-`L945ys(5MD0H4 ze%LLF>C9RS!W13mPlRzS)l&b$2&5nr&qMmXk$!b*sZZT|dxuwd$6S$1(4-^LC`Mw< zZojB)dMd5L&uPL&Ca->kE_EWym&$|*DNZDe4P%WM+uhuAN{5_A?jz-sN(!j1Rb|V{t16jm z3&;8gdb*Q4|MQr@^NDYr#gI^~S^Mx*7CE+dlAF<6@+%O^R(-|Tb}$2dm9nQpzKC@q z%I_nZ&29;rL2SCd1G0l}f$g#cpRQ-WF?QW)n0cFTF0yTz<~qW~9(%Y#owtUfPcF9* zIwDm3l+QZ@oI9JghBi-*flv^vQQ!`ziBc%-+4rq+(sTx9uw{muv5v{w0B$*gHrS&C+W}+@;QQDk7&M~xxXD&Iv<|v?WY0)r2p;hi?Z&t5 zM0zpE8t?a9C554}W3wI5vCN>}Ms@X?JC?{qJytJwPriNh-l^Hbs@fbuG%9?Xwhj&O z#G=j1zRchVc(g;8Jlepmyy6aYo=wl2GZDJR&L$h>RpK9__K|=b&zmH3R6+nQZ*>qu z*@df6qeXJm=&{U>gQHuHfa2|zu9jHJdaNb?J%ei=?pSGHp6E*_H0QH@xZHCr*}*fT zYVTUVjb9F{P0&^mX$|^1QU@<;s?#qc@xZDrJRRzCO)J~X$a1ygg)#)zaxjO!|Aijx zk-hCj)s8D9{&I3FA89Miqs}46_=?QD>lh!Z!(veM3ljc`{>Rs>zu^IMsKB2upgc#A z0CnHlQlI&HC=V_U0$|90%I043+6U79t$eao4(XQgDiGQXJ#p1I*osq0AYjU zs~06H1W{-qge!$A$6=0d2h$hwx3A9Y!@2|#;UIz(0zkRq17gHdn@OwnBE<~{{!ox7 zlbkVa&VI0qBBatAqf^T7nCK@@2uKW~ zq}ezB7&D<1-k+tu{Ruv1!espQIX8_$V??5FepJY3lRWGF(4pDbWVi~_x8`QP_PGU$gpf98lm=Rxn zI{jxL8DSj6PGP=GexWd;R@#?ktN#3!1pXm<@o9Npx-?_;aAI1kd8!FgoMip5)&`4K z{fGpnRlOWs)U=gA0=ggdf+7)1#vM5oIk$0fY2wfE?_vC>#1b0B@(;O-J3pcc>GenH zmGXNhdZ{yl5+kV?4{UalCzQg!ym((9A$h`N^y_oK8^p#;xKEjv1=q|OX{r>zrzuYB zFe=V-!X|RFj7N~0FZ{|yt-R4G0M`Nwq$~?U-^soxG$SC{3zrgh{oq;M^5KCNyzQT`{-PxeS+w}BsyYFdBD6rY%`y>0MzjFp7`SLiO|LSpn9|ZWz z$>nXsvhw8aPzqUygP3q=i4Y*_Y>EWqZ2?^N&YnL|FN$;HJc(vba75CXjAg zQXQKldmF?L2Zx4ikd!f}PL$BevvthKYgmj+`~d$U!bUkaxZM8rntS}$qxjcY$v@fF ze;tLb<-g8g-vnMSb$FcqY>hhj6Pk>6^W}OcU{@AZ1QgT+6bBUl+XxqZ*~AH(5qEAk zqok?I>a7<<@gwnbv)d-`FFk>viH4;$*SXek4{dL?a7HztukZr~F4UZqqx$vpl)wJ} z6^{P1L;m--LC)UR)ZW;^K|uc>(nwD6nqO!u^3dlTRMFZ+5Gv&~q_CfjYThPGPb#^e znqm}0F6Xh+1ubH&xHPBODq^UDoU_S5pN}9uS*4o?4`yN@mS|3#K zufFa^<2An84Sv05K1f^qmaDE>A<#(uhiT3x86t<*jZ~Hqdb^(_fn?~*wAwnWLhb{x z%o;qw^To|mPDb=)1R&C)m`LixxlG8QT9;yypdZCYrPp;cr{zM{XF^Sa8^O_l;h4`U z=;emOR_=X7QwP=Kx#O{qzt$gs@xbYRJYrA|N-KPkxGhnF(4d_GAiw zNn#qidbL(&*oe@_%EJoDqbnoPA%16+{sdkZ;`wj`s8UE zR3p?IoAKH!qlYg0lw$$4QbW>1sW_gQ%y0K&zMq+a~l6F{{z{Qq#pF&-#T`1|gXfh#G8EI7Htk zh-5<v-YNQ2)yo!_(cd=#$G9im5XdLh-cw$>o1=WaYv`Y zK9>%I3|N&tOwnue364m;J2WYeexpv%$}6j$B~QOA)zI%05o#T6_9fHjb#j+zQxbGT zjnDr4v-KMjhTW_4^{62KSEv>DZ<*5OUcq@P6a+Q0tcIGbj%Jr zGZxGbbp@v7Dw*{ANZ-5?vH3-?0G?74U396zff!@n`?8M^dusLrw)Z!_@)8G<0cTum zUh>w}jkJ}GU*4}%y0-9rEb}0=+}6$1UB@_TRVHhcDxPz;9>ELo5a532ioesIxg$}p zln)%bjKtgGzDwaZTCY>s^3r}OZ-9uvB&jpQy%~&{KaT;9EYKM8gu_TsMhhpllVTAJ}tr73d=yQkbDH(fj@u z&+H2f7J6bWQ>u-dc+inFF6whCQ2J-+a|jXkpH;7ixwDGsZ{gQAm$!#}OpiLKu5Cq~^39>?mon z;j+Gk=&GvMekn1RQgNHs&{x`x4TAM<+=vgaOlLVAC%Z!lNyfHJW)()2{}Xrr^BZN) zlDyOm;}ujSOHtRwDA}x&ae=}6WO1E)CtHb=rEC-Tn7RCL$dg#qsiDlRBe$O{2qsePF)ShcB5XGJjqVA4mIv;%+RYvuk zjeKvQH>2jgGhRk{bJcX>s1{}?zqs3xruSMxwka!+VU9uxs?;-9_1TXe2G+_Qez8v; zCo1-#+KNT*&=;{hoj$mB7ae-K2l?ir5e?{F20G6H^EM~-y2L0^52lXl)UqRNG#Flo zQ`%da5*xx#xSyWz;_!YYZQK=I&s}*uQZfC54NGEOLZsY-1%?m8-&h8Yz{I>Ro#uzt&TdSX+nqw^er%jAfe^#;%vBkzW>VpDcplW$BtAH@AUY z44wz`iry|%;V+?d;aM%xi6q`~trSYp?by?!%Rw8E2|JLWJ32^hY(cmshop=}j&M)k z@}Osga$gLirH?z^U=k2z+sf948*Lw5d}A2d{0D%32v zVe)Q5oEJJj&BzraAV=}{hj7cWqBZH|^xIhZLU!b6o~H4p@s!<77fT4!{dT;w9WK|O ze?LC6*)IXO&eH2 z3@4*XtGD>^wOWv-q3J5z+vN{25ek-kgxT+Lo-O^U*jy;v%l(UD%(jP78q;SgAVzugc!xidjM?d;64yLjoW0JK$e+$ znnPo-U(a#u&Kd7@$rm1R8D9JD6SY&Ss5!DZ9z2^^xne+?03)=<)H-V$I~4D8QoVXt zKe17=^#<&yO}yx^7d3~11#LOUO+D>OBjL#Znp&@H%V|ydf_EVPEhGBRl!N?l@A&^@ z?!R92q(2*^$RD6cpaMe9p)J135SGYG{*p}Lh^#`IL%rmJZr}h`kOjzj3SWP{$@lFE@;Cxf?Dz#j*rf^hvK39>ast*3PT#>B zjnY zZZOC!h&N5Gxgbo-nO}jea-_Tw{k+Pl^r&wN&h=BWw-`WLb*jjeOvynhO^o5G9KcXP zJc^>2bhBf?H*5Y&MP1h*FFs)<6AdaQ3BrUZy`+?pA&*puHq$Cu?zJ<~;A=P-X{$=c zG;Y>~fRwR4p9{5`pN2nkHcDFvnfqxMN89#(O`L>yYb=`owz1w?e;0p_IY)?F1B&t1{LL2L- zKh7DG$q{sx`or%ujLU8y0_-7(Vw-5}_BG2>*?Vi1aD4a`I((-_2PkprP~Wmfv3R1# z#Io^ns9?7Zgv{N{UUNI~-9TkEx8LE?4?m}gpb!RqTWW;S6MxPaRZ*6C3riMB@0iTv zv1t+qwBJ)a#J6Kn0{%GNSSdk+GgMQ03Jo0WtsBM|btPVdnrli9kDB2NxVI+=j9A`p z3e@k@>t?AapS?&cbVE)NK<_igeELPRN4b$b>nPiVWGU3VXpwE$QFTLK&5w}St_UF6 zcF^>lmMj;tq-2=Qe0(GFtcf6x*1c@nQXW^_tk-?8%}typ>4&Crr`rAQd0`~S%-pa8(77CnN>lK1WPhhvz7Om`^W4(7B#HE43+U`!w^899>Wq1>?Bn zjMGYn8k5?-r*V)@8cd0GmvcHFj^U^HTQU@$o{^?{Ki;FI3DOXpoa?7nD zSoU1xJ;3g*yK;F021%@Nj+_-i+kA@65j39GMbvWBhb+Oh#!PWA<)_SbLN?y)L+zU< zgG8uSvB5N^Wzf_zGQqR5fm9Lp9|@ihJ~?57UVF{eYo!m#u21&@!!#mB$JRIKe31Ka4CdXMU(ahpX)0beEq<(zk zz4DCh%+_?K-Va=npjcJ;2JG+bHOy}4kK3k(^HVO8G@0SK0Ej`T9$ePOShLL?{V39M48OOcR-Q3pNbVl}nr;lz zx-pw?=B_z+G#R=MgvkwYHlZMh5_gnoY8rS|&VU1QagySLTYLAl$J3{9yU+mG$=H}{ zkW*rzQOzz(3A64HR`x1dC`p|rMZd+=_64kJBiwJvU2-^FqKg|868SWTpNM!Iu;IFR zdq^e3yc2Pm_hJ$}*Hgde+~c>ssC*?Ix{Elr72w$MJHJL0iC_`5iEU8M4kEU$tr2c# zUd=r$gl7^xj)k8l-MnM)ur*;pLh4Wdq#$pYagI`dB^!GGwnpVYBhcT$xj!P%SDqng z`!xvV>iAdeQI76_1>i>xnri%D5g?g4w(8;bRmj(f!++7QVX+)1ZDVp}4FbECGYA>A zwz)l6zsg>Ey%F+-;0)A1jjYRWp-5mSY0_G5Wcod>*0}~6IxT&m3`rwRsPdW!M*;)h zk-%JQlC$-bmSQNdhyCRw`d9lk&DfOg4)r<$C&XHi(&~ZMlM%Yx)%&BmQ2Ea>Y7-u! z2DY>@xOTa8i1$wsMIR-+Jb%hJrSd?`)9=5q%#s7AZ?P}_#3+k4`GK3i!tCLHt^M); z$3Xmb3^rft$PE7oz)F^X6#qPe5m0A(wZm=O6$WkKMNb0!a*#>sA~3Puc&EsyDH}$3 z@IG;awxpTSDFbmTs$5e9|0pzy-eC0%X;CO}|++z`*cLyf8(RRz=?rwaGOCKW0RQ{fM?m}DT9Xsy~A4eMOkRb_z~W?Vy=8Gdbjz(%(hP8_Txx_-h9gZ`T+MZZun z+HXy$uvEo37ecqiH~2hFt|wysYu$7ZRhDoxS1M!N;D$=aTi3#C!lE>Wkc`-U-A|>C z6UBq96jQz624b(R-z?Y@?qNhB97y%UpCoI})1a43cOZ;fs#jFKVaQc!@h?_zH^cov z+&0ooQ$Ou`m_LgUqRCv#iwo%bEqk2_buEmrr!a@b;~@l8tooT*qmN+Z=*;$M=x!_O z%&16+HRw&0`O4K5WO@+z@qxzUs(0~g=dzV{iQ3RkQN>@AwLM~Bh9Ob}lbd$(00Xpu$CiPPBuv7H#T1o@dP z$lj?I7_gjCiSV5@!E=_(_y{fJEnh=0M(u4f9LChI82%EpAzf35Zt;bv>~i+q1)G*9c%8{zhe7GTg%nswl?akSHSRv zwEXXFt-m)Q|8c|rbZz>7vdU00w)#70U0W&g^miL&O50yFadNUXoTnt6cKJMK_pT?+X2}r z;!qt?Ns&9FIko#ven;2Hks=aQ$*!dJ7L`i4(U*b3Q1#jUm}<{irxtBF~*f7RZ8o ztqb;K;l*a)gfJ_ZB!@dNLmkKJ!HghIh=j+|Hsmo6(O13qSJcF7zM zE34;_n{_0WyBnFeX;nr0TubCz`8R9@ATg`awJ%C+rfM}|a55c-%xB9#%E@r$p8rHN zFcnnsV!uj=UtcEv|7+fVJY7z)TQ-RD@Pl0d=e^D-LNwESALjCYBF!Ch4Xl3f zhqQMH6SQ5j04q`HN@rHuwr$(CZQHhO+qP}nwvEaA_wAm=JaZS_i*NNUPrQK>5hrjz zJ^F5O*iuC;8=0{M>va*y%acd_8cC)NyNL3`hWCiIppcC@h%CbDzl*%|fs8A%e&5vy zsW5&FIuyrd-LUX%zE$P&mKgphalw9oql=2DUl3h^tn_(er@P=x?I}FV8?yA|lW#Ej z8q^6DN#Z^on{w;k*j%>e#d&DpHpz`96|mlqIipq_=C zW!iW4+A}hK1|8ehkQ_X^z;+`=?6aKp-C$B5#2oGncNZTUCM?`~jk+UTa;Cnm+4Dv! zn`d91D0>tDy*YtJatlPmWPMsw{EoBCC&Gf{3?{C6CRMw#v~%ox1^w46*VhK6X!$XN z+y8?Z{9BR6QBL!p2-d8r(Nhf2@G+X$G$%x?Uiiq;p}EW`35m_{ctqfWsUM z>lKI3vw}*2L3WhpZ2|kf=Y>Q)WwJ~h;r2H%(HRO?w4<3y4A&9Z^Mpw8Bv7B4#=>rZ z%&C{;L`WnS*j{5&8*@OU60JeI@XgvhJyC-CdM7Nv)lYKqXIH<=5B@AbtpedICG*+U zsq77*)N)|nH|=3TVx2|WO)0N|{DO!aHqw!@-Z7GH18Wrd^)Z9sYg3V>Nz2>ws{Mt9p^Qw z9H;RPYubPseg7wcLegTE7)Q4lZv0~1XYwY%_rwEWl^7PpYZblZ)z{FqsVgMp?)J2x z@;G$2YZojLjIhSlU=?_Al_9Yy_+yI4O>+yE?SM;AmZuC3~Lw9}D>WKUe_$fBE^qAH}I3O(IU_O2zfM z3w3gF50U()6G9Ta7(X5eGLHx$bh`m18{m_%nvAWzh{t_x)6XAD@~GD5dLbI9+l_L61;e)bT!C5dNS zo_ho7x3zKxRQz(XnYdGEA#nY+QolWDT=hsadma%j&et~5Ml+gy8QLFrPzNVTx(IeF zb}R+2hhCT#jz}aIqCXetF9469f>%&JmVCU+ujy1R*g3f((Kru#nRY@Z50h@IA3H24 zwa;E7%sj(4-W%o|0N5VR(i1vbIXFln3L1VPZLcX`$BpP1%cxF)D~!XG^lc1?>$nKs z{>Qa!0_#-!)so*}+_LE%DcqM%=A9S-6i{Ir* znavrD73xboFe4v7&KwkMi|YJ3@CVb*{{yBB|Ance?0>O<&d%pV>9Yluo^{Zp0h1t4_=Vi!Ar1dc1R~)lQCv)P(jRBJPi1m@Uwicr5qscR zMAeBDd);XY`%AoQCKhbhk;vJ8o~-StE zG*rPZCx`_aEP29M64ke|6(e!-5;&D9WW%8-CcL29L0vl z#7~t`F9`r0PP=3Y2Rjevp<)96jS9?B_7=+XXB?}kM)HO&o&tJ7*)lEh7%6xNn^z;H z0BMXs7Cen&6F_<-g;tA&s?S4l|CYA?<*&+-M_|7Fl;zfVWVXxdi%Yn2q`!K_)%K7s=#INo5x(+Qsm8$vzyRlfHpXFX5X=#E6qPnV&EjCxD6w9!dCsEu` z-ag9+p@57z4p-Z4)1gRRoI8ybHy`%gvG%+CC=kz^e{`K+22~v_cn{{? zmqV4GUaibd74~b4cL__0)1yOml2?fWXp0JWF6Hb6glR#Up74MFLL^`qpv5U9=(icQ zd%4VM8tn}0GDVQ8J3wpe%ymy%SP$-ug9)0~q$gr4ZR$<9YWc~8LG-lnFhxUhhaoZC ziVTksHfQu)B5|wGLH^=7rt0o`%M+4&W6G0B>8J4~HXWo+8tuFIqb&zb$tShy*XE;0 z?ox)nKXFq@w^Dg%h2u`r=mBR9V|ME;zEF-g5!x%*$nt|2D^BP<)wFQ*7wW^OgjQH( zhW9)rGZ@6ir^+Dd%R13^jMF4?LGv%!LmY$D`rJFLx2q4?#JV6VGS^JjU0($fZZ6uO zhLchGvZ2QhyKhA7cR~l^K*A6<&kOg%!yM{h{*M4*(6Pl|u z|8<`lD$r}jEs-14`UqW=H5<-fl&bG_*p3JkPEB58Gh+4b zCW*F8^{&!5;z1=3yoZ1Pwruq!@0LNHyE8)=@7BwTfHW?88RT+(!60{zE0?Y%O1$j? z5&Pqjp;0UhgdX4lTj|?w8Ozfx<+UC_YPclO7`oX%hfFK5M+Jg}t{>(PJ}9Wx8hjrX zY&bYv2A@-M(JzaYQ?+m#e}{=hw!LqB7#tHOA+njzgoN+{$7CkZOUAf@TM8(GsJ_lT!YF@xHGuJCijVTc! zCRoENb+Qm50OVewF|ZNGG1(+_POqOsD7k>{G686AoQsbsc`ec5>+A0)!&o5nh_HWD z$XQ}4{ZjFWbXYpXxX_0!FDRz#|6HumgOQ}7+3qsdHCUd65aV*6KXpo2ml$+1S?SO4 zNj6_!Bu;W#Xe0L-;Lv(;iTbT(YAj<=yG4JI$Mm+1egr<^Y2}2Fc+57eKBfR*87Xh4 zQC7ZFJQZ8b9;_8Ayue*#7VnmPrFH`v-&;=40j@MX^P&itJ$p*u`>G`-ov6@jBcUH; zH75QjV-^|nAqi4kU>7!IP4QKI541lujm~x_Hry-c+BIMoic~0B=LP;R#1S}q`}2Py zL+$?|GO+wRGRXZvtq7iDW0BX(1=0YV3z*=P$`UR)siHN}Dhx^C{CHP1hf)}b2 zS>lL&-Qr{HM<;sKx!Dq8&tZx3`I#v!;j5a4!FMA@HIt{a;Li?kYr@AB+X5WuT?oNT z?F!BNg_iix>X|4cjCF8~`v(%5j=)!&&c}~$Wu?oax)8S;Fj@M}Ar8&HIZ~GDz{T|! zZJt$}#Rj=_zwmU4!U9lf1zw!4~jTKzS=LUMCjw$D#-< zP6`QEC@&c};q0bsNByP!oh7RMwt$=HXQZX9N3f@NmWK$Rj91+Hyu!X$&xAFqsI2@y;>6IRepB3KQ;$Byh|$Q!S1WAef1{2EXf_>j?MmGOEd?DB zrNamvuoea0{zb~g;X7S3q}{G#T=`|>%u28ND*jVemzT7j#an~?%ttfaesL(=2Z zY$yy~1@fLOLE%jO0UNo&)|(Bu?RKL>Y~tLo4-JPr$5qD8)rA!JR!dtvt_KD14J>S) zf~l(^7M-eeMF?@BBGp)FN#fvr(ehS#P)*|3{#8Qqf)Ik=`q)Bihl~>HV+Q|uEUcCq z4VB>h2nasQML;B=Mz5NF!^k47@gB#Dd+(h6Go~FB#pLUWz<5MIHoOw7?a})O^vX3& zw-!gkpX2-Jm?J(=bIpj_Ey~=fKd|2a5OMB%LxC_q4?FYc`~N56{$mT_-vmseyr%V> z%+Eq54SO3Nx|gMWx^_v=hRz8F#JwH?5rM#8RW!maj5Abnr^-`P!Y>L0!TrEwt(W|W zYtj}f*$5_9&f`zFtm8}9_s`+vY7sr4Xp|(-Tys!E)`m?llVq3xsI0&$@(&M1ApI2V zo92!Sn`UjVHFPlYnHYnSL9N6|_ox|~ej6f|I>Ro$noH9b#~_E}OmvLUo~DvKU3n~U zRZ0nJ(ln^jqGvH00!YDmpQwl^#3{5Hnsgz+d#Kf>%P`_y+n-~#Vz8wa?#Rv(SLvZ>|WkH>0Ynt&wEh zgc1@*l?)aqB_LxkB%SG*8!N5N=lt_XU~-y*zn&u>c`+jY7GPoeiI2P`%-TvKE;s>p zjFAbomiWv;H8W)cmkJ(M@GU>+B#vEkgG&ChXTYVI$s*Lk*q2!=8|q11?2gY75o#|0 z&Ou!ESB3xYpAHT}9cMfoU$+OV7~xU9SdFr;f8t5=5l8~fdUi2g1=joh6VD??W10Zoe#t)3`Ac5)sg~( zko%3^ecpLltAK}jg`&>JM;Z4SuEtxRpU=b60W?2%OUD(r@zh zrSfuo!8{f7tqvsm*QPnwE5quFaRpFJs>&?%ml4s}z%{i5+d?_?!!KLJ1F|lcla%4I zC=lGvgHmr;+AhVVVfTmwsWI}2pe>FKWyS_ZUYr}Qk&r?dTCw255p zLuAx-?U=7UaP4b5Tn^B8nQKC^2MPtN+9%R&F$@bbkH-;uEjETYv zg?ctGEg%^54O%TaTMrlvS8Q&Xr`&xjGE3&yIyDwakaqLu+*2CB(#2g}gZw+2zzC5N zjTipAiFeS3*;2S78xtq}xHYO>MFH>{E<}@N?ns1HjX0utsc_1N9&f$rI$zU^hB*A&w2=ilkcKlwP!>#8Ywp;(y`b63| ziY6tCweRK_hjwaOMCOKxCmOi0V2(D7*6TiaVjtw}k3V8r%$Hy9b;?_q`k-{m1crXl z4dm)1Dl`x^k*mp|moCHEGV>&$mPn&7OnXINPW@M2i7ntx%Z&iM z@Gwy-0M?o>ffgB;r!WjR0>R9(@bLMgfxMo7_L9D7OA2B3Iz*z?ZH9t9Jm9s@bW*ZO z1>wt{+wh5fQtA*cxA#ly{ag9xv`oEV7=RHtQY^t-%26?xJ<6Y3=xcC-&UigdK9`sy zRlMg!dhK`Owkwr@lXUW@w>}ipn%;5}|K_pgE2sWi2$ z%pAsW*qrBrFeTjoM9M^#h{1o6BUS$~1^$mN>%WPwe`Bk!$E>^^smzh>B)$> zcwr4Vz?5V^vyDY;xKn*o7Jl5~H3}Xl`m){%w^JyvVeK><(VWK{_Sg5_!Q3Od=-H^q z?xk@=3;d;2@~B#1mQ!+t0&?bVI#7pL*f%xPi-s<1r=cS*BeWGQRM=@UPyuwix&{Un zYI5kj3NZUn*b|8hjUbXHhLJC0G92BqM`9?baQf2lR@n>8b)@N*K_9XkNKmbjlTN>| zTZy-GOw;deYs{3#lBEo z0v0mwVvg&9MOoS|9IHV_f@I>uv{7`1b;1ndT`&tx4+Ad(7`7|4OMNnnJyvR`v^l1P z@vm}wXHGE~0tl{W_7aCd)u*7BV#qYC@O?tdKdXFc@5f?rutu&bZ;138k*hdfl#(|!%xnRaNNlC4Rgq<6ZY*JtzgyuHRM$& z$fMid@7SpuY#Cy54m5(|=5S7hdulByv`@vf<1@yE6`2OMK^um!!Cfp#M*bhwcR{+e zEyP7E7dIsEa7?FIs57RgNRz%KM!H>yofsgcPNeHaYffa-Bg#uyw073Cg5h-DTa{BJ zhCeAUWzI4ia`7JLrSLM(EOjWUNohyaPQ{Ewk?rNRagU{YS zg+o5zB~i!^s`dYSRR2v{dAUwzw?Vs)sqqvO`UcFQ5!c=5oJYqaWoTF9!vNo zPb@B`h&X>*OAfO@4>+>aaG9uqIBS829uutH_uR*y?~i|MEp#(;Z4k&xH+4uB6-hRe zz#d#n5^l&J5mESWgSKsxVH(M=0_Ls?6*#S37WSLfUPuQ}` zs{@lX&sLHZoO@)-gqkfHP~rbn6Qk@cMmG72N*KoY?CSuIGU%n~jxZI%8}H`ee;{8! zlrk+pv~_GBburwsq|(5fCXbjVvWaG3t!<)tQs+1a(Urbizv}O+dydO?Al^05X`0|5 zE%)`-!F2~9fe=;1n9%Sa^+A4CvBV0wKP9(WwxMLmBQU&ifC>0uwg~|Ks{EN~X8L_U zc#93Tk&4i)=axy zTYJl^=V;c()=aVRD?WhKz5ITuOX4Ic@N@IM>ghN>JsV_Pu@PRF{R+?O|RAkk@ z;kDN2EX8MxaM-MtJ)4rd4Eu|5%CeaHG4qyvk&0eVguAnpShj=;{Ko4BZ(Or$F1_9M zVFnJdFPRg^EfGvHhs|ae;@FyyXZ^%#_{=efcTi+6z01NO48X9Cp*;kvirqR#3m2z( zdoG>%`m6_^A@6^AT%_svlz+X=>i<2e|E}pBV>Hcw6rh5(5iFH2n}pI;;!Wm(sq=7E zsmZ|$1=UDI)Xi?ZG>VM|Ci<=_=CYASKo9b`ak$A2CIm*!OrPxVS6=BOm93n;&X3f@EJ6fDI{_J%BAnf zn4WF+ITap0Rc?Rp1 zWY-Yfwc~C#S7rch3H;D^?oQo}Ht9AMW!klgErgF2J5*)WZE-?9JV5BENhz8#DX@J5 z2K75^50Vx4qOSXU^SgdBv)>JQmOoP_M4nS3F_x>T@}2SmR)mdsRfEqIrO=k7o5Z~9 zpm=}3q8!s4V3RWhx-``Sa$9A+#+m!?*OHL3{KnQqIbTe{Pw zSuQ+}aQ{MnTkyyXM3}i)ny6d8uJ~+Af_>`vz9tP^3s@HS!+kv5UAUURLP^O#;V4K?R4!qh z9`$%;3K)9I0WrODtigozBX^&8{t_KKG6XaS)vH+DSkGn$J1zvd<1US`d6Uc}9+3I6 zK9U!qQ{eLE&-qqfL=N`hPCmfttbLJ=oT*^vPmyCn zGlKxF|9(ZALmYZ-B7^ORoSq?9dHcv8>=u+!W%ET@hc_`MqH%N?4l>EXy|5lG9`G{0 zk~Yq{6ma(ra%5Q>uF0=5dKmK4v}XoctI>q2E39fQbh6~<*Q9K?YUw{$n^!DhMF!B= zvIJ2=zdD;EH*f!#ElED&$tgEKLuaa}R##m1OOAYWOt-)xB(<|K!v4i+xxaksf}Z6k z;BEY9vV|Jv2%^`^Wb(nw3ug8EqR`_>$U>X4p)XIcVTHgy3DAp(9!t$=&Q8s2)}o9A zM-&v2>`2q}>~ae}geOK5;D~bL=z8rW8u+!D-eiEyfA%6$GL7k6x|thH5k=EWp~=C> zn!y)F;IKCXpEg`+o(xOX-|%Zh$%ksP7NSB>g-|%jIMk1oYE)w_sXT7B^ASW)zwTG({gYt^v)G5b<59^(at*wPNhC6)I4or1-6ucB<0C7#*Wx%9x27<${6 zogCu@^r!4HJtag_Lg$endTehh3S#)HVcn3fM-e<&Y%m6V_vdT=Y!4E44&1c#w6kB( zu^K#o(;T=o6LPsv?>Qbkk62*Kqkp&2KiOR-A?%njd?wEs20DOiLc`}tR7-Xp{zzYd zV~61&M_n|O>eDb*upK_u*Ym1A=(qWWw!yPwvXm}wBVhs`8-;uEDW@+8aR1H{$zkuH zjPbGj#bnZ!RHZMoyzfqf_(wovBj5hl0=ZQ|y3647j-|>CfYX()Q^0)>bhy*%juik) zFbt+#qi_(_M~2n@bo0JF%!f+P?phwZ1z~IaCT=)L5yAkI^SiUdntX=?a~^_DRXA`? zg|Cb{j=zv_=Q*l0_TcK_vwmoO7cT4?+WCO>Izt8zeo5QM_leBVjEYk2`$h+qXD-kR zKTJ&*bjkw`=K$&!tZ8^K7D)MwUt1NAGfsfa4T+}F9R34}6Db-8s@o^<_$8bh{DUyy zI^Y*o@>d=2iLqREUa@M)bdLZa(+LT6N+$gLIbY|dZwNQC0H6MB{R295@HFdDJmVlg zb&qvmOd?x>5eSSjzqc?o@|^YQb5HVo0c)T8NgL~fugD`r3EKjC5;ll5+cZkk0 z87+S%V0cw=rp=c&O7>6*-U0A+}aKPoIv0Kc#HMo28_cPbU0? zjr$wa4Cj&`7@wwV4YRDIEgynE)HRpCwd1>TKA~qKG&b;Mj7UYM$P-8jaCu%$ z;GI+sj9)I(C_^y5npE^Mll8&-e4(!|FOZmlGHC>JNg)Cl7qF-vLh$4f?OD*E_bW~H z4Gr?4PcYogAh>B(Wh|`Tpp3+!{^FxZm*Vqtrk-;ZM26hzA+`XT@FJ1+z`D8h;r@~k zxO?CgkATycq*Y6>{S(*bjc9qoqII$3-mR;l6JAe3~70xl6O9L z7H+5~cNiWgwoU1dILa8h#RikUSU)?(2uUVFc78CFBF!F-?0&S_%229<6i*@elCxt5 zD4S{?g{*$1BB~ z0dpwM=-;q&SgOZC07u0+4K<`4vvNu^*%e8XePPN9`Z zhKgbdI5vj?EL9$S#BAm8PIs=!`(vMY20~E))6$f$1|Vn6^Geo+NF8vDq-zGC1wdUo zGWjjX;G9+q;IHF~V38Fz|mn#@{47r{<19b>^D z5Nu`B)NSNdqj?7P^rA7O4MW9ck3z)<42xP)7>6K-k*c=PE^flG`SjE;CxrnKuOe;E zItkhVXj90gZ-#%>Y;z+k*Q9Btme!@YN56!sj~7(dGk+HswWw*}i(oa)l2`S^5H*Uh zqi+xK5I{Wt-tT~`rJ}@O$>snQ42Nn`*ZWKFUf(iUS}lJru+Hi}Fb#l0wgeCD+z9Nh z7IUw*=cLfN$8-zH+XZ!$-bbr`CfYsTDDn3`!6d5ic7Kg=-F;zdpMLY#Gq-9#G%~r0 zp67&86f2f5TQSG*n?{$^d#h*B3i;2dQCsuLks#H8mEKMR3{b2OzrhaSdAN8`8J#kC8=+T7qbup|BYs$wE?vy=3 zObIDK3TC!y>8CVMZO4m&(X02sD|F7Rtz3#D=DqmEJDb0qzz&pU-aMEonX}R-X~2 znE^Q9X6&RjaPViV4>xk|A*H$3Jp_@sd@&nyV+(noZ;iiKr+ z5svGYM3Lm!0#F?zF|Ib`blxIv$LEiq0bxBaN z)VrBZ2@PpG4AMJIqx_1nc_OAaMjQGgY5hI!Mq-OF2FGG;n~n^oL!kpO@J+^W6kF`S zjD)dNjmB#2Fr0<4_P@;B+qqm)Ww=OeCq4vNLz5+ih+-r6W^{1&q8l8q970p|#!7p9 zxrty}i6J@cf=G>t$fnn%T>5zIl#L1y46P;J2qZOIGydR?La-!ZW#)}mS-yo>f`#%P z(FLi_+mBGh<5CgmP0=Psc5UXNG9a`@3TUa;HEkti9*K-FmN(jvl(-#HYR^7bNDY0V zY2q;M(R*YS!?GNkyh9Q1pxV-zi>e<0+T-=QS7!(xOC4#d0MQxStr7;2SnVyg&NpG0 zzG^t(e+$9Q(3y2f>(STTc+oH$g{w>84xoHX;M#AFr=XRU*lR8koHt>UzM6CZpR|m% zQBI!Rb!sQgd|0s~=4dL3A#Oia7D90_gLVxgbsmwYyp3{6zRk3nMNAuenQ9maf9moW z&KrID5l{`i4!gcxai%28x8-os+QT89g;GD=K2u=61C-YqaXa!Qy(i|qrwg!hHnT%e z(e4flP53*0O+O}1N3Yf#@i;Or;o7QXqTZiBQ-f_Jd3_4O263y;#Z`vgVH-OeKIhjq zm!LdSelfvX`Gy`(v@LKu9H33~mB|2=hv(s)Gklfknoj}d3D44m+Z@2MJh>t7$%+qn zq$eBQ(VyUa2DDZA0(RXL-0}NP$~Bq3Za(gL1>e)-%s=+B_erIg@cnYO6T_I#GE=uD z)crCSz+}`ZGXb6mIHa z6XuymYuDP${ME74$Abp1jbck> za=zFmle*y}?nRUe9?K~Axjaee0h2T}kKGM-E{{PI$XPqI6Q{2MF>2(-J4`9SD5=DJ z7yq1h@-+>>^^wHsrOs&52Ud0)v7p6pMlR0!(yK4&cJg;Xj{j`i6_!$Dw9J~$g+A{v zL*ONF_mcHr0TQEfNzJ5Oxx3*DJ5r1@FsnhWhIgYv+>}E|A16Jpxw7^(^6$hlPbcjKeC~aqs;L_%aW8<;@#(|vOC|qwWE0C5h(WfHkF`Ae$I|)_ zYG;Ocxl9_w<2M%;dn2k+qIq8p8;-G&MMkOg_(d8g(}yw-E7Sr$YiOruhu?HIE|Q8p zSotNCms1fry0cc^%E{|-vPwc2%b$c-y}(DF#-yQ!8!&H&7X8x9<4 zVDck#m*N1vPmcgp7Mx}Zc{;t#i@jAmdL9yb<9f8eqV9+d;6v-?Qad)}3?WwQ74~(h z4^Oun?8-um(8`Gj(ZgTN@@=}_B^cEtYx@h9AQ>a2MW>bLdug;)_s3@B=yNGjSi;A{ zs%#MP>vPpmc+<$6Tdp|L`Jv%^4_q5(3&jCjf@lA7tFdY3u2ZY zdOj~27Wima6(AAgRLB!0qt*O8E0)B5LGF7cC}WbCC`Dz^Ei1w%_1ei(DuVj55Sfg? z>*fl3jyu8oW78ZB$?BK!FNlq_u3tg%alfny*V``|f@Oqrm^GFz-RK=x@%Et}3j)3^ z$(30)pQH=ugNn}P(gc@-7`&9hrROBy*Ayt7$rp642fsyn`MYAwb! z`97tF3tKWQ5_z@o8RUbV(-+4ReGl+dLp>f~psQ!C%2{6$(K)H9WJ5X^l_%4yqz z!YE}g93}M)ct&QKXg-rs`#s+h3RvqV(%0RLlHpxYaNh^>nD$>aQd?|+wE-1Lc=y5t)z@g zpGw_P4|`&?Y-@!#rHX+3*)0nnwae-)Nu$-t=F1%4pUKak|X0~*$m zG7vT|7BU4a?4N;ey%N7Vg(Dxyn?lbQt>)qML(D%3hkmytY+mifGdt8CJ@Gl`|zSS&wd z)cPX6@Fc0(oPycX8Pk@rzQblCT*)FQv@x-vdS+ps3tBlK#Cl{Vde)Nv)a>lnv@XWL z^qtGwoVtf+JJo;_VIOW@cyAiut~tm)IRALk6$(r4peo{Of-GTc7A*P_R7Nb5UMQCF zdLIk6M93}&iN{jVzF{JiUGrS;y{2+#bN4P2J->tNx4B4 z$|W=>)YfGRO4aLjnUdq44a}0IJ1C-hM8TKHPq0?|Lwn8>O*_UQWWUBW(Z#EQV77e# zGen_Z(M4*JV9FTg0LssXde;D&&zX^mhTu@#pJGyaY~e-(DYv_Ikg02Oz*ggT0GYJ> zC#VQT9&!OtA5Ba&b34Fj(8I!qOG%!q{f@9o9F1O%?TVC#14K?=8V1H8kV2mc*fIfq z0|@#;C$0R1&7gWHLjg?=?dfeJjz0?FD+0)3?D&>Q>&Ss%lYg(&|4IBf2(Y#leTmtp zIB*CrehD?amz}S|kfPC5z)YiQ6ub7Cxw&;TL+Y@|?GJ1m_u??R5x}ByNW*AAH zsI^6st5|6rPYaB|*r}?+{#d%VnCv%|Y0Ht|&QSDE@rD?>v3EZi-JrafN;lBXi z$9k9#89%Fk5&BR_v{t91hj5=$mlG^e)h*~YRar?vQ#C~_KJCzs_7aXR7mh9iv7XgN zGsV>XB{1;j8GGjtUp!qHI1xe3w0^b^E@FM>Sr3Gu*Z{}n;xB-KjtW&QTWUdnt!whnOvAwRjA6Y zF9n>oRS;FA;#_>;WIlZ6m|@FC-g)NR`(E0!grgkF7QZnkB<+ApRCt*8MFMK*Xv~H_ z%SUp5ShL=p#@p+TbpF*+0qlK$0{c;S!KmQ>2+YygFy`9~RsNFB$OE5qMz-f*0*|bH zL^LA{1`%gNQC8f~kkMb->pKtT!QtFu%gxwG=3{DVRk9(DP7kggb_nX1V2HM3;tppC z*bl`b?T`G@jbFM5;EBPs*i-M~6J=J3qRm*G9knTbnd+nLGvUMfFMFG6f^Lkm%NX@&`RR-84)GPu{!nJqPiIKfsj4oB({VK*C@XMIkI zLsbASTb303ew+guZ z<^0b%f|KeWtShAF=$I0bp}HHRzp6Ccw#EvMIcDs+a(qO2RncrSZnu0VKKzjeJ~+tY}hoWs>x6I3@B#(pm(`ZJ{Xx#+?wkzbZ; z5^=oYRGmSILKrqS#d7X^4`)lWYxqRwm1ya9Odn(b7j|{bOLkpG)lNmD0u}ZVg{4EA zfkEj{2odtST1aJC0SGpEZ~vQQqHS$&RulMq+7VQUQdSe{{87Tij;Z}aLxGR-*}$GI z$r`@SzAVawK27kjE<-dfk_$-I-ZEHuw{K0hze~0-ix)udgDdcR^3Jn%QT|CHqEoc( z3T46L9*Hf3N*WL29*QkR&Eg!3Eyh;L=6>L|?sITduqqwSnuXIkI*+Zf{^I_DVG-XB zDIJbprPyQmrLC>qp$|)7DjOqJSA5@yL}p?K^n$g|&%jaMcLe7vB*PIU>@(BCJEh5_ zJ873msJKNU3%ZeiSej9&{XlC4v4?Cg?k#T3(0z25c*gb{u;hjU8CAei!xrV#cAyG1 z)s}~bhQ=1P*Nsl;%dD0yQhd~IxqA8AVB?>g)ekMQs_VbZ3<`ENR7lLYYS&23YqxAv zS0x*jmt7%3p_|a$`#JQ$`xM2|xy;!*KwN=f-1(Cau3a3K& z%+97(SIEyTuBn(_lVf2mA;W_`MH4gcNCdBrVT!3$>okL&dtR1`Fd?9rF6)@56Te!5 zVbEZs{W^z-pTo5o`{J@dtlbCemE)QrK)yW`?blLXI;+_KjUNpzBUUBlW&?8A>+a04Aq`Skt0;WWUR&2ie3vHhbTqT$X#=Mn-oaK?mF``v*xs_xvMLaG*s(!2?56dYJ1m zV7cuR_ICi-u&*BQDNDU^N8z*aRXbg>U*u+dUeZusq&3KZ=}%V$h4c@7D0*@j_UlB} zha_0?%vASqvSAVkkmHmo@^tlRSu`N7pabqG0<$$m7xR*i!WKgS%}q$N#s}9Bt6-eK z9)Bdw9rdbSArkrTHlw~PF=O)8?%xkjf&f^POi{N-ZoA2-h!qrv3axph4rfgzIoLq+ z?P=IB1EvplRc%#M>oD1z5hoRJn0!_uZvxUy>P!mlYEcES1j7et8&JAze`hh~aSryF zZ_1gZR`{U+!&kW4MOAxnX!Efg>p@4UDhpY7A_S?on`=Q z5yZUdD&dP|{8UDhb9le=-cu3c(Bx-5U|aU9u*qGH83>j61a(~nPt zdfr{p*(b~r@4Fm4j(WNvK8(JPP1I&_y~27OK933u9ilJG^_F~moR{QGP+Y}FC#M44 zjV{R!DIQSL?(!`T45SbHZ+fT{qJErO(~+qG)D2)VsBMHgxG+l!-^qj)=ytWB zquax%T;x~cN#V|BBO;b<2#CQZO`i&+Z)XLQ6nMD<7*#*4|39R?Q?ThmR##O}VJtBI%&uepQeA2wVOD)s=iD2{k zb}97ww@R(wGZJ=>JO}30;}vv=?nQOM$_Xq|k1Kr{@qj$hXXBCy_Gxsv1H2+mib8TggUlqt&bqjc{R_PsslGW%`axcaD-?A zIG&_ExqHpn!_nr0^$3-(NvAbd4mlHWzE2?d<4>~D&*j0-w8G`DWun!`nXBE1p;uv!-=s>UE?{8FjlQyPNu2Dy!$M9C#qM-~D z8(bD$dp^$L`4dpY$j`n6|R)~ z<^$HTnF;4dK`)S02i2lL?a$`PN#Y8ErmoQdt5Tq8f@l2N9 ziU63~ocQdVb4eb`M?cNY^VMs^E5;ZItBf-$1(h@tn`s!cR=#%!EJ_m66`HT|`ix$i zW0aDGlyYJ}A@0L2adsrcEl3Utzi^Z-9Uf)4oUw+|#WsCmeEg*NTM#A;?moW`} zNmvS*KC!*UVGg5ZVx??wVrhj~KPV6@Y|vCHWpAUm%PCu8;Yor(Vhx05d(%Ibx{G8n z0fljqU!Id^wy9R%pvy^WQcfx}`aIhyXkA6QN48{XydrZ-S4>jhZJ2a!q3$+B6~yk- zf>TQ01c~*7pc{_M+R%b)9Q!>UjJT@1Y+4h1^h*P*hNK4ux1bfFCOxa{3$qo19bwCQ zA*Slj9Y7?|8PSB^HV3tecX1l<>0E1GnGSaBY;c@`SwO(+s2!7BRB@^OAMsM=$;p=> zPIMjgRP)L!K8Y@kBf&~aJUr#-WD1|swCX(YwolD6mXoh~jyW^_^8BYBY_5W|bc($* zHc!*_V!yZ~bfck+77o4YGqVk$_?S)TrnOP3WOOB?A)ZA_aB=U0Ixdz7Y?V2~&MGRK zY7P)eiAA%kJ}t$5hbgA2@wA7`fdU5?c0(`fbbso^cEpgiqgq$aZ1PXU=3{2Hf3PPxt^oeM>a zY_Rm-1S(>H(m_1o^@A;MJjIE2PGWWUK^ZDqG@{RTi?DHFd5|KlP!lx81a5GVK@5^f zl%^zc--b*XiK1tVuv+6{3)uN9=2M4~sEc-*T5|Yr3rgspdaVd$S`vdLV8UZ<8=#0$ zcGA0;;4oq7Sc}1H<>ffVdK+$$Ma7%rhT9pQ@6c*os8%?$gvEGOH~z#M;t3tZYsJbqxAn!@M6{Y9yt*&V zL3HMu+jO^8>+^B{+Ey*YIa7(&b&$g?9ez8ZCIPGN*3adg6dYQ#VUQ*@Bt;&>J6mEj z{gIMW9I83~gcncn|(2e5l^k;d|>)#+U50p;g; z#uCL|Rb&_Gz3)I!srGN?b zsGn!&$9m(;ZYyzd7|6xNhxvrFJ2^v|l!UhVp5eys0OxV`6Op}c#aur#ofmupm(3!0 zI#!7r-r4ij0(LyjP6ewjq`%`x!>g8Atqi5<3yngI_cR*kcfdWeUFyalOA=t`nM0qd zC<}dGFhNLt_s2rmJKIiVwK!iH+WO*+d%}B7Rc(xh9jo5{&O5JSxYKk)BsiYN3rOEB zcb;Wiz@0y+T#3%^ceE+;Kftz~a4}a$T~lmnsB$)?&H)LcQ$-47lDQ7?+l*k49XyvT zT70x&j_QGEA*qVMbW7F43P0Q=_anEBMQ#ZlTJ`t4H}(q$()6*m4F&f9){_L?Xq{Ts zLynlP!Jwol+Y^?{F4-f(8f52Hki;hz;bNTi>^GAeG=Sh5}3r@{njqhBYO>ti6Yjr+#AtC+t-@e zBo_t%kS;r6^=vm)?bjN5kT)>iC{gr?G#5SqcoeecopWyR0m^fsd!rG2fn^JR)+56` zpi9uV+&K@b;(D!CDzrUBYOM=2K+RZWCO<-BJu(TqBD|O%o_Uk8M{518!|T9D>+BbE zbu77sdl6@5bE!I?y#zI3Ii)?IFTYgwNR| z%U60{x8RBNBYRoZqGDWI)W(P=Bc<^M%%HR0Hp!qZ3*yBs-$-U?&c=j#Q`d5W-;xfo znAT#KWPb!}plU=@HMOxOacWF`#Re;pzYHtB@#Z80GI~Z58*`b1IjX{McwXu$ybZWta%~{0EXK9RYDsgB@G6O`7k~2~l zhT3#=W8zc|Qie`?qR{QEOEQCZM19T3(L^CVX>0vwbXAccEc%Vk;(pTs;Wi`Z?g^ZE z%C%jeowwq~!al8h>=t(KIqKGR#I^05rL!)hw!>wzCgT)xuer~KYn$cMoGn~>k7dRC z{cqB$)%tG$f68CK+O_{lAN>Ed7x~XR@4viy|7)MuDW+4ppAIQ7Iz2AwYocmnj zj`GOLLEx;$CX3qktiEun$qVwTu@rs94F?mFJKz-%Ec7Z@qi($Tw&<3)uwt}nTdn1f z>oHEWbcGWmNl=ikz0R~X3mXhd@S|hq#nyiN`JKzfTry3p0@g9uZ-k4 zo{-`e7I7Rb1zMTyJ_ONaoYt03s;tY(Lh_?Wi4MMMni`pQM3l?+pT`!~?-XCm!f5%t zU#dI6JUeW?jyvSWK*y?3ntPTzWu%%}?ILX7{|4pXHSXa8{47WRYeD*dR$CMNx8Mu~ zBTFL#N1OkHZ2ci;{2QOd8fuN$TqM@$&0HytvzE|ynXU_8NCCzV`2$a~tU3+2v|LSX zc#%ERL1+N^FRo>#7##e1;I4P3!`15h`EztOnE|2<`1pS4EKM!xS_BEls>!)~hTYj+ zUMIFM7^HzyMVw@++?^d=l2`ykE^^!10I}!95OK9avBUi0Qtd85A9SCTOOBO?gtKF{ z_MM;Xr5DT9-%RPi`h1YrxSBR+Y_m?8l8Yk&l7!`@cY5=b>Ri8ddV&chxjw+$t;HZ- zNf=Pgi*kOF6=PNzr6rZXr^gDlTNZH2pb=qts^6vR9$9)@*X|8CvZdUuZO-Z$J z6b<_Th=aVlv@Z&T@*5p7ZRqBLC}Iy9rxQmS*=tKrk$sCECZI7HY5%D_!&<@ZPXBfU z@SlA}1vlbZgJX{E8MfiOZVZ#X9f=v$v^`@C9t%C<8Wo^p`YhljMOahqf14lY8ueTx z{KWU47vTRdG5(LH-b-#=obU&cWt3;7Zy|}beT@$pX&O$+4=;oikn@M=UHrH#(X2eT zJ?{m8;hq!dtcU937q57S(&(Q7RoM-Zp8)4;Z+~^;`8$0erC2mJGVr?>KPYK{peCeY z2qC4hk=wYz0p?S?QTmIXmG4x{?xDh>`?@73x{%~(%#pBxMTR*k6$UX8i0a}Z0It;0_yShazsB+JH5p+Or!7`{Kx1Le6Slv!Jh2>ZpU$G<^G3S3L8<6 zoYG^cVw7Du2&0T6=Q*@t61lz6i(5=T+v~(!!5R%RtnHXVOvR}z0!!BUZw+W&RgwF@ z{@sp$l&$<;^;u6O+lMvp$Gn6 z0VUursh%Ql6JJ1KEbG%ok3#a5o6i6%N?m}~=I6(R!=P%(z~u<9COU-{7%>4PEttco z?9d}&)>10Z6*d0$XI@;cXu))bOI&Pmz}gm-e7?v?W1lac;WGv|C<_FOy{fIQ9#1q> z5?DBP>42L1ChBb9GAx?LSwI)+Lx<>@``mm6H>iw|6H6eaq|qE>w)7PSDh9Tk z2jG9nSelF&q|<&Pz5f3v(*MzNd&!QA1JNUK*ESlQn>Kbes-tR|3KfUb01G9Q;UTe7 zJ7II?3J;;D7NB$sKF|W=?)1UlODPdChZ!oWJtDXiT!5Upi1bEMIUOv>rSNQ$#3uy}|lEBs9b*zH% zwYs4n183@lGPJ}-xP$uE@aEj(@zo+ddt+2JY}(LzO@E8E&;-Sb8-VRYt)@W@njN2M z4nTY3z_F-=t8-`V8rq-Jy?=U>xxqDJ zPcX1`DK5t%&>Te`vs2O0=_NmOV+a)CIPE2|B@Nvj$T(;O4ew+H zdN8Sn4}+ON&*SuCT0|dfDrL!(JViDrj5IOCr8e$+xERMSo#7hO3@b|SPgu+xlGHKE z&ReAB#((!4+{Sojdha!#cshP&l57g-Frw9%f`CX`oCmVk`By9#I}Oo2e_|Q`kFjL> z=UZ3M#!27ONWjv}z~Wzq^hQ5~>Hk1wcj)YHCfvcL2)R^JD3M#0 zYO?UJVwwP>Mrf@r<9!9`@R^q`2=V6ND$ze9lxh*(?ro*sW-yvWeI8#|Z<86|R)c{i z3*|LrZMjzR zy5`(vwZa$+(DTZ@8^YIPKau$klC|0-e)DS#1HM-lJLc?Ld%~NMPTygghd~@$=d^+) zM048hxu*-*r4UCWT>wnH5wr6eS~Vl1ge&r6g4lOXA&++nkBSvz!=kHB1|`^NXQ5oD zZ~-ePXbmP7A!!IheebH$jiPg&T8m?zCzy4vH-Lpyb)=6b?vwZNMW#aBcsOG}jtW*X z7=c&PuEmf6ovU65xg)Zb2;=j_ujxm0# z@h2^C{?W#U;h)n&!pQBv3bX%LQpjn^{HPMSfi!rQtjNt9a}=#g0h_FG-;@pJDp<_K z%fSPl;jImUQR_HGU(}@3(;sqqb9l>6GXe6!d%aRxb&e<4UB18GKS9MhgoJh`56KkQ zqK7Fu(8#;RYOQ6n*WBR}yJrkBmX#{V3&^1HWm^)K=aLOB=a~c7j!UDl*%yj3kwj0c z!p5(GSg285Ms!C5fu{Qj3+hC7 zLiwty{=BiYaTJ%3Prq}uYvMK-brTaB-KoveGl0P0xY}&-j!!+9&SgNA_WxG9*r1S> z)J?3SH7%+o+0p)i49z6QG9$XWM)`$1A%TR2GZ)s52Dvx)Nm8DSr)07v_Ea2zG-)Q( zP~)u_>E-@Onn8=mWATzV#(sT|#zKqv`^;`Qos{+B`?0}M(@*L0tA^+v^hLVgvnY)} z$sxJ^IfL}Y*`oWVrRf{7_1gJ}Jo`70r%;y6^bn0hrRtOtS2{h`G+M_R?7@>wjh%lc zC(QDbbZr3Rcs)yI=55Leu{ON|pi#J6q*gdXBEr+oe~~+o=o0xVf06<2ACrOZpOZmW z&tA{c(#TT5MbFmu|2ZH2%UWUHx^y8i$vpg!Sbxw8k}Bucp9?{N$;@=1pkTY%$+UUZ zI^B!!Lyai!-`)y%%sexUkbvaAB$*yR_6iQu8SOeUJU;wb60Rh&S3yxmeEYtG-9NFkVoK`_{-NKA%~5d_R(XmuQMErm1OzK)a5hv_xT30ql$lj@?5k9LClqFoOUc%UM zoj^|wyQK=a&0Eq0b*nO%ePosQB>nOQ zzq*4L!+4epA)8Mz%ts?Hrt=c^9B6|dnjCu&ndqLONJJOj3l!%k)FkuaqiGW-EBN|G zM4};5oC=Ik>jHTheMB9)O|Sgc#UWMg*Rcc>tZS0S%o<-CKpaDWXOImOw8@i1BAs?n ztP!m~B6klZ>wP~g$qq#pK2(kPkb!^yH)3W#4Hrz}=OV-YN5l;MKPT$Hx^n>|OG|kp zYeOUZ|DCBn%vJ?VB_wYeB1?p%q~D^24an4khCePvm1}=ySsJa#sK+#@yB_N{diLT5 zE{Z^Oyh9kH_F20h1Ttq6X3>XXz3)WWd0brFAnEc)4WF+z9kPxye#)1-XWs5lU*(7$ z4d2{H_to3`Eym&wb(g#QvHRnMa?5Uc%o$ja!?yaejy&@+V?HoV)nu7Nh-frb@$7Q6l7M6H_lN)uzA8o&uXH^-OfrPa7AY^ciDIy9XVr|R`3T;!IpKC!HZ zKA49Q(Ne!~nwR* z$L?3xy8diyr=s=QLI9!VCV|e!kB>3Om{@gYu00vTqB1m_?n!A%?;;HSS$AtZ#qAea zFL#w#@0w(2Rg}#+2U7#tlPT0t;f(a;(<5o>HFgT8nl1WEX5W9?H&%vhO$DC967DMF z?n2?5S&3)qdzD{iAA<*>kh7VXg}XDIfM(j1LYd}@>(U_a0>Us!(|lGK#{&*Ue7cz2 zS>ng-$W^BMOkWv`BU_CpC1wB7YG$9+3%d3y;H64_h;-x{^e*y-sJc^{v`C?$qWi+j zEQ7|%kGo3<{g~2f6g~{{S$p&upmn6;?B2lXQE+MUqS?=#i^Wp-fHMZY#Bge(q@>um z5cF`#e7LEik#i1NhllBK)r%q*smYPK-g6!s)ZlP7Y)3}^7DvP@KNhVfKs@Iq@ zX*^&)l!7Ch-TMOVC9JsH?I|a_b>NqFZZQ7CFxZtH773$4mMYY0=98%M#tmy`wmF%ID(UI(c~SkJ`4<=WyJH) z?t$~=^`P&89+tZ0NNqwZX1X_QY1JiKw)vjK?3t=$bec--d|wy$b?uA0Y?GHA&9ff7 z)_TKHyBR3Gh;QFzI*b7Z%sZjllSO7t*uC@~HXrKVv)?4I)1@lcd!2aY;2Q@N(idFT zxjik6hOHs%2a;!MXhw>KkwYzpPEjnjHhI?`1C^-V24}U4s2+1Y{ZSfHlG0iwSnqam6=D^9LKiORjZJ4~^%YhX%sbZk zPlS85$CcAA;A?)4)t5k*eyD%X5~JOJ4H-xC5nDB&X~~IKvc7r$?X|Sd*aVuSvyo-> zrzb8|2VLY4nq(65Il+lVqjMpY*(Fz@{@{U(VW}PT2)kClVeaDJ`*#taqWc!j_&; zRC3Y)kJ?wRCR~vvaOSTv2I=^P8f3>qL(q?QT#*oMjF$&#v?9D%#&ULdCFkqT5Ayo3 zRt3>SQzttQu@Q1fVWv=8K%J_nj(s8jHW+h=Bd%s=F(a*R=fl6a-c@OU<5urGUc(rH z-J)5O5fMHBDVlhTc_Yj{o9L%hn_$$I6n5RdN*qf(dC_mA$vL}lEQ4-3q*NAE4w9JA zvWUUN80rUXK@cwH0bQkJ_6$~r^5P9fEtHsOf7e+;9ie=|DC>JTKKkfxuhf03C=>7O z&Vv@>`c;n$n$RX7>+_`B)x(*WDC(#&+LBmT@<9`1I$+{L7UAFGiy~i{1P9(Ed=!ux zrY*h`y6&%yyd&JUtd_?bke&dIeQNxG31$$Ob(ZvcIw*sUzcB>%ST6yxRe*WQFQSV9 zrk`pZaoA^JeG|-=S@8_3c?_fBl+`|jV9e234k(}pMG@ee2w|rv>wLopg7EtfkEwjT z9{5R?Z$mnRfwguCJ%7rXnS?pk5Oe_(;C4iVE(mtsx}7FBxq|8xDn#F5@(Db8#Lplu z=M<@Z*ZGIjrYi|$b8gZb>@ z{O_wr)GeA7MW_{;;vc%)Rl|lJ=~j}sneCQ{&k3Hcim_vl_~YAYv3k9D-n8WETV^^` zOFzq_t@ZHgr%e1+N~*r`F2HfIbP-B36JWS(_)sz5NJEnW_^BA8sB5lvC*43wK=7TI ztC!h9(^|eV!{kd|=@*<&f!b@dnG?b@hqV5HGxKu7%(1c9C+Ukq;TIe%s}FMk@BV87 zetk@$V$<(0%~{+5fx>4&Uk@%bHy2|(DuLW>- z9kDXj8@q3YS76qmJC}nF%jpVkVV(QwZs*$h@?zA0Li!|8%H$wh@fzZ~gws!~`&2rd z{{4x-haqlll=1h1osKvt>qv!>qE;jvL6mt6TSf`U2EqW+z7!?t^<+E?CkDsOqyTKp zQAY6HdcBO<$V8;t`O$*PyZ{YUQMTY7#2a!?X{YaIzaU~Z+!~V4XM*i+$bGZJ_At*@ z&4V^0faN%&=wn7#WjAj-vqDXjzT`evZ8ysaV*frZ`GX}o*6|l8)WS#AZ*J|GhVR5e zwQHzd+&))&8@KK<)v-GVaxQ6|VF~wmAAbyetVT8}nYdO5tUx`1UDRv7R27WeTqRF?%c+`jsbXT^hm|`EERFvb4z)zmeE1f~kIanaTF~6#VT`R6zsix% zM7OV6peUYvv;k_1LUXwguK~dUG%UhuC zFG|vm5NM4UadziTUfWlU<~G$R#aWhY{p#uu(xg!dFe>iw4TLeWmQd{&+TgBATD}aY zI1^tj+>@QB)I1)lrKeofFQUe+9*ikBYTnZl5Qs>l@gkTkC{H!k^Vz@MCiGC_k&rNp z%yIw}SnJr+e3~`y&}1YZPq&eIUNuX%HXeE8;pfz2Ni%Z)w<28X@Z4PT}r{% zGOqzW#2a5uQ3gH0zEq z&g(I5o)%PKEjREBi6~+~z-jGia|yy0!ys}a7P;$+4uK@os0n8lsw5cbrN)RwX3OV4 zvXRqzZ{;BqY#m6{20U^4kA%5odS$2vToR-+|OSQ zg{xnUTR}Xtr7BLWnOI!QD!C6`LI^yYtB`kG=*sPly>?Pgw~2lVSAaAkUoy(fmKa89VG=n^ev~2E-g)U>QQ+p6Dl8(~Tjz}!otH+3OffsV&#;k;uG|*}gk5Uv9 zKuOO#RI`j+GO@b_S9BCIfs0gj_(C`>(_!(atXo00N*TJULR77#bVU%Bx*3gEbp|YV z201gSNj}Gg6*QS$+x{_yEt z9)iK+;kJh9_QfOr7=K2`vX7Dxybjl!-qJq+17)iM1qvs^YJD8V&l5~HvCU?=5e)wU zfZ&Rj?}{WA!<%I&^zHUEH;os$O_-e70PYm_eoNEi;la8k5P81tar>TN%J}%j(}8<< z@CwJnH(;+$?FZ@A4oBI8j@QTNUE?1OTIbrrxOT(FqdeX1O4qv_@5w#7OvU3?W;ppd!$4J@a)m0r62LWOf) zY+qc*R(X~+SmFH@`7LIVH9JjO|I+dO+TiVtAAbBXef)SouI8D@;wiV?oixZAes}fM zaVLG(9-ie++8t#8-PjSjZ){!}i_sp&(Vq0ymbDMy35RW|SE`vPY+(tm0LvsDQ#)KN8rcl&T_o*z~my^KMrf>0(0FowE%&4luN4Y2^61fNTi z1IG+7c#~DV2+Wm`*7M^g&kV4KROv|j3Hn0kFhZz0t=t8Jn|thyB=v$uV}jcaNoor% zz4!a+s(ClQ=EMYs+bT}mnYHtr&FjKDBkvQ3u+OB0VNfS2mNAl6|2ORplSVY@je{pj zcP~15e|#N;%qWMaG=Zfst%)Zxfkf4an){DWn|WzLCAFtg*^9F9f=5A`&EY_+^3qLY z7L`TlfyWbmRf63{$(TIsq`1^-KNyHKUXECy>rsu!GYCu3v}?!m`#XeOC;WB1LkLA( zDIBX+ft-#e;$vv^WVXar@MXt0u>sTJ63|_YdXI!*55EA2tLhE%^e94;j$ z8n=6tJ>zRXsNKh#-3qShu5|RN>YWD1iP`Jhb2hD8`P;}{K%OL`9}qe*RK_A7+U@lh z#c^jB>sUu8Tj!Y%k&q*JXpVi{v&_9^^U`$j<(L>7U($Ut+IcG9=IsS%*eRQzRd!d+ z&`;)6gcAp!wI1Rox!&Y%7mzF~z>SI35KF+PM8j+}$7a~7yDJ`GbBjo1t9%VX_UajC zaji0vIE@diB87IFJX{`NM?a9mZV4@O@s4*%7Zzmf(BDtWUprx|^Wrxhpt>JmYS&R4 za>ymF^fYTHC z>RB6E{%>#GK*i60jm5ZO5m5Hj#d9ia8-$=)a08i?Er^9Q@)}Eh2}0#c#94RwQIfTc zT?9(^ucz3&n;#9{RM-f6YxDP~zvQBhtq2z4iY5X&ue0Q*Pn5#? z(H0bLZVZ<3yV%xt(eV)LM`$TV!d_5}FgT1eg{+t&(NbZbnaibEXX@IuHqMEe9z|`X zGn_8oZPqbZ0Y0)U72Z*d2pUi0nDA|-sq_)}`8opeGvM^(N_b>Wq%Ym#my;T-=$Y}g zf?fs*>P$OVO`^!TP?Vg8qUjyA#vlXV8vFx%PX->)%yO!je9EWOI*J~<-tkgFywC^j za)J%{xPF1PO_RI0!mPduaWY1Aof)2x5u_2XBB|7K^Szj=n7}1hV|98&-G5&&9DBao z-isP~ZLY|T(*TNK)K_uhyN+vId@)9S#0 z&PYKz8!NXvE+&)qCs1zdgCJrnXo_~m${?mfE(;q9h84lI(1pnandu2hufnb)m82_G zg{H?0Nfn$|Wr3=`p3waAT6*{+v)op-Nf^X09r@a7x~OXQ@@>3u8#|osTYJxFwGDoB zm)Z!Am=}QTT@lZy?2}I11)oMGgZ3|PhR$FJWVB%a=Uae$L%BhMq?J?&vfbZ21;Z?3D0gTSI2hkXc(IHmp&e7%v$mY z7QvJP;(6&5gtRS4Ws-nN!1OW&bZWzTgb`_+To<*Al2djNmoQ?ig>wB0xQIiox6^QC zfhs-9w!RfVD-4X86~T=rM9;)H#^SrxT>KJ8PLL&XEnz1SbV25TgkBoJJ$iNP0ozGS z@1X~xI&R8(1+qt>@_Rx{H<3yZmJ?{A@B0-Lg+M@>QQO?oxZ?f&H)m6IVG)Uut)w z`_Fa?XfN)}LR*dq(wl;wBlELFXn(f;DKh^Ei-~l|D9+<_V0tP(6fU6*>S{+95P~;d z>hmsB^?tOWezd;%fI5P2egk=7_n^rKq-hZKNAoLTQl<<6j);2KB)N8Hm(iLdN3dxL z9M`X!*&9EIdxG1XP+BOxI6|Y|8`okcNlGAvSG3*DSRNuVia81a+yJGyO3Sa%KO}_J z$5;mAOYwvlN5mS_ejvcK7|y*|Q>G)iDhwBe_Hv0Xj(?(tnD)r*t} z5`o&?;34Pw^FD6MDoCRCqYR=o*b|p28TzQS7uh~N4r@HMd@CDlL@(7eV--z~+560G zm2!G%Lo=R<`UTT|rH8ZoE1L8HtTVZSpKF=;-l6wn+7?0FnM=masIlNWo!D~W6 z3AomxP_CGaA|6+My|6rEXBIauoYK3S*_^gS23zN8Sh;^li6P#z-X5}1dy8kP4nF5* z!VXHK)M81{4if~6*J=*MnXI`W*Ir9{R%#FA=4bj|&%$-XS6@@@jYQTxv-JJ4JZ?_M zagkjTi01qRqdzRf%EgY}BQs#iL5|)<;~p{jVv~Mpw1X>q*(14C6?*5a)w-Qp;yJDM!UB%yy#8DE*5nkDY5%Y z*f!)TSWnhGmY3bnIkZL#-%tg7Z8+c@qxTJ@_y+X1XU?{=ZK7|Ne=`~4*BN$J<^9it z1$VZ+%MFhTfR9sR`EQ>`fKQgUa#(UmhUM%V`24_>+vpeI?6@TE4WFQCu`xEQuAXGe zWt^e6FgLRZ`^oAT3(aF4gk~6t7nEF|e?!9q>TFqKf1Kmz2jn~if zPpBvA7@AJhD!h*ZK8h;So#+Ax2oDm*SDnWfUQ=Di8Kd3bpO3(eD}KD_ZAJA#M*)}Y zlPP27yKK#ws>%*8CFZu9KtpgT%Cu$4cPP90SKd~=j8>(Lo>L7pqT40LoEtkyppQ_G zr!9l*0u~Lxe=K1bsAPuYl1k6r(v=EXUJ3bylg8%=jl<5NEb_whk+1Gl(XN(GTxkWY zYMzkn0OWao{IB=NWXTZAsP=L{X$hLlJTe^bG3*^?&Pgw}g zRuE@1S7rE>Fs6(fHO%ujIsW$0FtEqeD&(r6i%^u}2w^FV^+mnVdzbpAO|6XPw@l4c zSz%@>WMffzzaVDfIxsDeD|$JCl7u+iZ}R*e!CrQJn*o?tMs|p8Rcac05!A5nz-Ylp zdRXzvez~K29I9;N+|GoOk`M9LVsxEot#F{# z&J{;vD00lY^DE0UVmTLdjN}rbc1Y5gG76HaT5A)ZAtfnxn?u*^APjlZt(Nt{-IPs) zc+@R;=|LZ(u2j>!5MdY?63bsy z`XC9|XU|Xx`G$_AwvyLTW--=vT+yb7{T+zJR$%VFM6mj#w3uV*GSXDKQ4$?UIW0j} zzx!%ggyx<#9opL;si;YUr;k34p}*T7$c6Ce4)mCLcp#Nl)PlIrf&AMgQ#s7IBEvTvspK^m9MD0EX^&($dvG*#GF$ywfM;Si&M!O% z#;P2t64YkCBy)0jrBN|}2o3(YoBGno|Kt>P#KdHxwb+OZLrMGC#tj)JFe_|LqCVp)<|oam;+5dBVXF;u&sgv&KT zV_e)&YVw&A-M^AnkA;K9MPL^V*aPDgN5GzDiUo+G0 zOQG6g&XFOazBf_)MiZ;0(aL>#Ky*zM7#x| zdMoN@bF^3`!a>j&Bk)> zNz}RdxH|z(yM|50Ci0zdC{LV>@&c`_r6=gTF*v z#X62_B)9;pe>)%FV6b1vggkP=@C@kbd9l%D^oPDPM9IBf#?xcxo93MifVtaZeX{vx zI&uUoo4G)lz~uuV7bBC|qZ3~aos6137o_)!jWs4^NewXIjO|+Llvv}~(8#_X-m-<- zrEMW1SZi$!1|r%}kPr(Y#)-y~5}r9Che;ZR)eG81ZB6rn29l5$(eKJ*#j`f?(a)on zRr19WKuBMkiPaKZ*M<1?lKr}l0V7%kTwp*C)CTT_fUEeWr@s`)Cd1NK#_2OJ&GZ1P zE+BJul}E4GP903?vwz0zT>Td}2LqT2VzI~tRBHsq&Z%8fTfN)gg?QP+b^8!voLQE( z#^%gmc(~z^Y-m9@qV42 zM^y3t=<8b`H}_jY@4RB&6;+~9*H)rQ1lfSs`hdGV)DQJuyV#^JhGC2_sgJLycKKM0 zx!m7Aa68y;I4}Pe%5Yb&%M$+n^{et{?(jcLJMjP8M&n!7G9FjgI___qgOit;W)B>@D zUQ)4z?)~)-(zOIN!9RXswEqBk{ma%|rcnvt4jk;?*4B2`^}f}9tu%T3{eGelFyo5} zv6MN2*%sd!9JMFc#JGs40&Z*kAU07jh40$>Hwyf@U>fjPFQoV2FQn)`-^r%p z#`RBft%9rHvGDhNbw%?Yb9sHWu|)GQ+d*i=R2ZDn4qa#A!5RUmKn3+-^kjjr=oSsm zr`s0kVg}@(LQjX$PWRL18S_nwJtjOyp&W+6gaaR|!Pd$Kr39G_y#h^?OoF6(oq?LH zQ+H%>IKJmW0^bQI3Y^6qAvfou;L1&cgd5F3p0_HDxWlCXH8$tm+g-#Y1tfGK(9107F(6n#gP2U;p&a4U5Fs#5-q+wcG$1$6cZM9f8f$C>2Yw4O-qrx>@R9u0k9<3d@4-7 zmFhiiSWp{{xn0R=dLXx}NstN&OSLZ@*-%gG{f!H!C@wFrFym;_wLrJ1twOLu6|c4! zr>evsw3qi}G)jDHAQ#c;8p%Z74Iqak!Z=<+SHdTzg;y8*DM{3d%3HkV4?!Qq+Uh(e z6g`{eks#vaA*-MU0DVPb&c|r!u0O8Txt1U{J)rj5)IMp|YoQ^|Ql+z}0-iC|oAP{g|FaH6i3?sa3V%-{3PpXmo%cMvK~SQwq;`{h|WCDG96Awi^K_GZ{Mz z(t$gCed7U==}p541m?twQ8Zt2Y^-%@Hh=iCzZbbNT45(~eE`8hX!L2K zDY*lrsZR!Vd5}_bl2yL-7(8GCa+~Rre366G@Yk;;T{9QlPCJ}3-lDIak-nc)DV=s) zGc_tU*zzLXZ@Oc>(%VlaaIrhoy`DSYZu4nV{LWk?(9D^KQk;!m|EQ9@QYV=;H@%8A z6A(MnGG8x-k^t)#2jpf{Bh~Y6i8!0G<5>Jvv;Q$sb+Ehq)j3UNn}Mf&ttzMO62pAB zI>#D-9A??B#Tx)xqJOARPeJ1ykVP}D1@pDpcgeTWP=S2*JhTg5spF*J=GQjMtwp5; zc={~hh#Vz)J#7y3B~CVe6?}QwIt}5XHs2c-MaOOwu?6KQ#zPn2^K5nz!Gk$&C&u%9 z9pClc?6b!SIIR8hzC;T6s}W=VvV1`D%`E~C4lN4Kmd|N*$gAyp65vyY5E0Y%(CpWh zZ3(44z`DY2nkWEhd%o3Wsi_D+wDX|3zD;hGfkeq)!VEN+ZhuoL!!8mQACkNU!GWt< zm+p@a%dO+Z!4H7UhIo=-tA=)LrxpI|Ym*I}9ZS@2o{->LM2}iX?hl@U^VA!?6{F*K zx`<(;fx#2-sIddnj28U8tA2XPnSAprZv>sfB8=IHYCEljB>5MP;}=S>?Z0QQ-yq#! zkomjOi2J#ZBGCc7{H|OJLo+x!Iz0-6o4Xbxk)7yiv^}R$+C6KE4-{E) zw%F57!}7I?2Nc>A{wW_xehxa`wF#+G6QLto>|W(2<<|VX`?i0-a0`ZkK3x7TR|*K> zM8Dxg&$Tno?aPgJM{I-MT6qTJEHlP$4?dsRL0(^P4^uw4Vsd*U^XBMym?;@*x=vt* zAKM8lW62VGZuk`J|K7uXc-Rp-%$sf&uRkQ6lwd4 zv$8$1l|Nh`0A_<72?jI;? zo5r^0KRhw9J*zhMBm8Wo#8dueql@Ne*JES&*b`3PXmz*t5P){hrDv}4wY@FKT4iaO zetAW-W%`i&YT;+frs@X?Meb=Ko;NcDdfF4*^=dm9+^V^fI-ibPzFd@=_dKsta?A9+ zID@|;ga2CKnqr?9F5~FXCzJ7b-B6$Ra!$hncO)Ajew>MNM}4cqsbG`7fP@J6T+>eYWe zc5~gmfS|DFVBQzpQ!BWPUvX^x+rvCuAR}SZ^JAC-bN>0<7azAX~s5|h3+ii392Xgt<%ys>73)rz2 zM6oj~>ZoQr0TQ9Vg$X>_xH`ZP0tl!(!WAZ<1%n^I2GUROD) zpCAw29~J_+8NeS}4ZF*6F$+%tPhJ&=)GkJXucX*wkh!FUrL432&b`j=(vRFvZkv1q zzugs8O}_7zTwXW2>V5x)^3e2_8sa9S2w;*>89T}qFzK;V67*9 z@dY&Ho_U%RQPN~al7L0(+c}Lcp*b~cI-PSgE#VgZFX+_|hiN8q=a8$5id1UB-X@CV zoY+X4EKG7+7xrlB2sZK$X#2t}tw=`Vx<$ld&Q3KSWuDwTd3dlJ3Oy7fNn%hC0F@S2 z8qOYn=>BfI+?P(lxIgg$1Xeqj5y6ue#N-zz70+6HT>U~;7CFSjFdqWF!C^Hopb>HB zTcOaM>`SF%YP5sC0jbfBjwdI>tdz@lt7@%weWpASLqQt;jlWm#<~$ zC>kzwgt%$r`G*5`L9d=ZQg0tAw;${sbJv%N4$D+!OBTJv+seQS4sxrYs8sA{}}t?}+xp<6_AQPB{lyjJcIfm4=J+4mj?SM0y9 zdWL);WOM+gM_-spKA;_atLp>8E{@HiL5!qK`e02UDe?Rm6+BO`txYn?sDb#s%5n>< z-vraelU3$CY=%X~7BKg%&97YwQYU0;le$9Byfue>rf$1%<*1bPog~rw%O4W?u1*>= zclQpIgC9Ncf2K;P4z|=dmsBN0a?Cf(KyYeE-p|nKku}H5#)!*Wy2nU+uO3^6F38o1 z`Sx56#x*d%wyAHZz$1}lS1hfYC}Q8(swequ^q*y4c~3=)-$r1&!~pR_xOnch=C&^q~;p<2qyFsxuLu{ScA$Ad|bYNA@QD6Y_fT z@lNvk4L#rYD%aDY$!U@vapkO14E4Tuw@(lc zSWiXqTvE2f`<|s-+jesqf;OE%dsZm_&;j+XAc6Dm=vjWSwSF>q2x)}+^GiA#t?(>4 zNL?F#LEUk)^b!n*Dlukt!uv?=!3C@Mw~2GF*`ftQlr`NYb%*e%w3mI?F5q)6je9`n zpc}=t8@Y?~F#6eH%P&GIZ=>JMtog+*se1L~5Y+IJwpipui(jUH^inM)v&Ak?Hi_>T zZ@8=C+iS{O{v2Kh(!<}qUOP^DfV%wxbKJ?5?+E5c?+>|O;F%2#YpJ(~$h2GWF*GOW zCx|rh*1;XVr{;_Z1?u=a%kO*U9ufN@a0!pm9^w@)GXHUA?v&sg+1}n>%I1OW@=94o z)>y=VAr53+jqKu7Z2i|KQQMAc`U4F(qs zdMTRI&nEK5m@zMHx`6d(mg@bkk}GD?^K$lZnjaY>iaHINNo)^AqSfcmCO2t|6unZs zO(e(cCKN|E+9lEN^pr@d#pXJRQ{ZR)q)MO5!P(PC&XuO0nbX%iAy41Jb#+W8WpV+`FkE zV)6cunpU%v_0O-jdG}~vJybVrBq-OoDScd4>$x$!{zzHMy<4_NTW=mqi8505Jx#J5 zuKXG|V}JtD{sVW)_1Qjef79C~NdfwPU1EM|Dm9Er&MNmagFNrX5O;EHE1x)}Vnw#P zI#Oe1Iy!cAh+0aqzw}ffEs4Omg>i1s#(94c&DzV2pKtsSwz^)zvPvQ{ty1km@woBu z@n@RD!O786EBIG=E6OVmY&dkjqh!$XNN;E~A+ua07;T=UXT}|i2;yNj!IjB5Sj7_D zM=@8r;T$|IJg!;S;*ILqM74`bWBFBhR7uhvu>UWIa z>+JRS?imHw{F2$A&x&@zGEVEs6C_+Dl4xJYnudVUiukDvCd!SBz zazRkbmh?)z$vUpdx(P>yWgUOTQ(ax~Z%>O8VRDHd zZo<}TNQK6`i!Rfyo@ajdLQ*NxFJd{*km^P=d)=bi;kxgEJ=}ELWarBRVQZ|F0mcwb z$43PfeFGZ;>^~NM{QP!6jH0mr3`j}_j_j3`(4!|M=rEZyqzcI1eG1f9E{RJAY)uUko7NQ2L}d;X|$q3W}4uA}=GPr!aMu9Bl~syoUfMNJN}_-_n|RKWojf3sW$gXe^weQvh(*hj)}TzFlf}Xq+>{R zOSWccXvp*0JZ$Xs;G+yFD$8`Cl?W162bS!VJ3@bvO_%&XnYx3B8OZJ!q`Q zJi$V~AXsivSJQ$0V@~6F4?XS@iC=55DH^(MDfX5}p}%JIfSoWj+~Tw19hdVFm2x77 zT(oi8yYCiOpSCN8*_t=O62$rnsYTBXnF(JFyE7>ryqz@tDlD?OnVVP2!X2_H9~Uhiu9l@g z8YU?eA8USYCf5nx7%O#j_Fm6J(i*&(7^MaIg5%8$l78;BybTirfrXXJGkm(9>_23Uk80+B`jVIO2*1tS zaF8(%G3>o8w11@A?wl5nbzrJ0iL`v!Su4W#K?;{yjEA(PUpH0-r(HY65S6x0=wC%e z?ByO6D;Z+zD($;Q$#D1`vsblQ0Y&cC*$%p^LXd%aH`( zn8D(%!mWCF-eyFOgX6dz&RG51j`$CWjMIeA4Liil9md=|n~T-#vv2bqb-L(KR+~Zp z?LB!+=CF;ngW2gjlT^3gR)=yje0xt{RhHW=@t8Vvz_!>sKDF=rSNqm!cd{|6sXE^U z=Nd_wj*tu1>vc0#5vt1v*D@>Dqo3VfyqPs#GpO+_IqyxT;9HN%5zXfT&yqVYwS9Td z*&``D|BdN7Pg={8u>Yv>?c=kJ6uPSJ>2kqa5O4EkSq(p1qDW%>aw<*uaZQWHt|;*? z;u+@xt78$tz1KHcd|H%n87T^NO*5n0qAypg+*%X8&N}f!2LdzXNC*pjP0RUN3{Fo+ z)%WGuI*-x$ksGG>jhO}R_smD$ZAh=>RemE38(h6l@x-a`yO#eaLW>hQYj|`st&^V< z`=X-~3I-{LP8@aA>-Z$LHetP(T%dHkud#)Xv$R%|rGA>0{iVFzOFoY@f3g;@{E2Cj z6#aI|c*4!-mBdp4$z8BV`!{3d@c5Y5wfH8vW`-a~9QqwAy`sMe4v8K!v$QxqzCA-j z9SKpDybz0XjxM)f^qY@GDdh?QdsM}>DEiVPk4{?B9Kz0BK3+?@T4qCh*5a7Z_EYrUZlGh})DeVeR^o0-_7sahvR zE~KQ!C`C!xxt~fqvrk&gl+B>?J}~tb=tI^W z=0DgIXTz*lpv0Htn?=h+%L2-}1ll{xwq*{hP=APaNL#GY82Ik}nM4dO-frXe2G`|_ zS0YsG7WHwc%2%K-VwEdw9=<##B>Dh))J<}rhjfwY6yro&qt|u( zl`5%BW8-M^R7;Ms6P1GJ7NCuLw%dalyf&^Ox}HNmC-!^ zK1^_Fg z&?@1XPxLPspRG1uX$l+5s*b1Jimn)X8N|L^k~vWGNc>WPsLGMhE@2)2(T?}S&THW$ zS#KV5K7GKc5IvI>Fac*UAIZ|oPvD=;eoHeb(Y&EJM%P%Au(&02{rI&=Mvu*uhKOeZ zY_cqjLR>>r*t>zdHD}Z8|(T@<5zgm7_r1Le+fxX5!b+DmOUp z=zYRTn|72hKkQB1;z7Vk^Gem^w7@cs>0J#0?RagCmDiNhRQ*$rXeO%9F*DTL4*%fT zFQ<5~ViB(^o^5Jy;%0gup1N9UgQ;HK?O4Ydw%q$O2bXkHueYpmx7w2Bn1nhkt~xg9 z6gAln%73EFW(knnZq5ISCu_>K>ZLldKUC({TFrpZ_Ql$4G51_isCcX=t`Wy;UUofE z;^4HA@657FYp?VDUH1VGhrtJq@UVlzt)Wl^zd0P9 zAKMV|T97Ot-N%gvCZOK;Di$yB+okeY|9uMWYAKwZY-OL`@V+ICy*!p$^xon2ZFP39 z+&jT_H$J2n2`AU<+6fLw7tV7LeolMQVDZgH>?RSZMW#-8VT4PR!gs628;)D&1HRkV z!AV|9pD5FJadQ1s&-~#VUj&srEQr$Vn6p7UG0AXwx7KWKm4$)3jLq1+(619)maex( zm#f4s5h#;g9pt7okUW&@@+!~qE-#`hDDSG#!GqoguSjTFU5KEQm&{JkhIQ4MtA2KL z;|hPQ&zrm9meduwIP4i$I@QoA#z(NtC=@e&TjnbV{yZT(v#lm$O4l@y${>)_?B>hN zV*-~ZiRNqH+!s4@-L6X*u4m**M!*Px5)=+Q*H%>b9EEu z*PHsyZ+?p)UHBgKA>v+&X@p=^C4(=;tM?4Kw9`GL3q2xE$3z0%rKR6Ur>+ipexoM& z%6D~CXOU(8=nabXEB5RIp=F$96f5PUCdPwx^fa%HH_wmMAr}b?+D@;`gR?g*z>b{@ z>bQ86IAGfjD35DUmkEJ0{VrrUTk^=qbIOhx>i9-PXA*%_PQ~4p!KtLYY zL5cjIe+wsg`djUsyr!y(vi2qZb1FMu?1HvF3BK0-U63!n1D=1aR_?6RhE|98PZbc5)V&DH%73P=V7y7O7FV8}@@tfe$$q4XY zGQk-f$YU@66&CQ9U#9-m#`0GjT@gw!h@GR=pPv%^lHwoCr}7=t6~F>^!M4DjWo)qD zhXbO+23&g+H9!aID6UDI)Qh0f8-Swz#{o||z*sv`8jeH94(su5EaRLN0syMU2MRzhTxHww5K;du&$nPVy(8aL~D1iF{Gvxv^F`$U^7)ZAh z^>2=j&E)4uYK4WsIRZ6cLpW;tdDCEL31ZeUhs61e5fDs#mtY&mvBI!4pJ|qXbunNS z>|$jBov|qS`z%3pgB5AMo(Kl4d%IYfOjudJPpjL-+UBXz1RP3WG*shXWx>i4#cVHp z=e3GZjB=5&V`Yh9W+|284+jF){ardvb6{nOV`gb7GpODIEM!ol>R8Q%l_i0hRT5Fu zW&l{iyI4UySXq*oS#-8S4p%|&cuL{m0EgO9_(2wh*(WfQEX`vTkz;HL=vRPBJIcK3 zAJUEiM&29&y9z}{lQwcYEVf`uT?_ki84x8wFae|QC?_@kn~GV|u$u{^Ho$gJ;BTms zzCDkDsqAhJbwYqq5h<)91P;YajZXeJ(SvLe&?B+#kP`A!~GWtXNmtx4n;ZPSWmOB&fn?eKR z>I5GU5jzUIB{mXhUlj<<4r-z7;)2b7qXQQ$%7N|jLAr*_^8S@LtKD2hM|&rT3k?1X z?COHWZ!G3d2_O@`I`CosM+8q9w!5KVBtw8hnjzq%NX%B`vU>A15mc@OA379Vu-$GP zcJIS~`@t{+xH1DsGf_J%m_23=_R#PV>8TL{cIyXrLsjuI95V~q6j&`5PW9kQFHlVf zG%uUO?4*AYWxdX@X9V zy?B@#v>=i##2rXy52Ry8p(XfWrtR6Gj#GHl@Pbs<8ua$4*@?-mJ#d(1`|6T#dKk~nndW20X>@mL;DV0+6OfM^MOECpXgEbX2poO^g0Mz1 z6&=Buh{%)6cVlmrDD%}RZhVUaI6`1WCKRmx12inu-Q3RA0&2nU-dE+VAC+p)uk1;0#S{N^>q4c|Qi65%6I~ku9Pejic@80yYn^p`68uuBry_>(|ym zO>$sES`=Jp1xDO%7e>0s8H>zrWV($Dv_l=yq^i*|2#A9P#N}sV@t-YW5Zp-q9rW<0 zz{~`-C63jiaew+82;!fSw^!>n!N1N%0abzT;xM78Y~6~9i9L9cnk!~_&K&}pH-Qfe z3QeUA0}XU9*gJb9ueLXA@j2j}+{Mdm$H3dug2-w9?0fy>4ZyPoJXF6c>ph+Vk;mwVwbOdXN9l({OR zV$d`5fFTWa)JW^z1BY2vyhp7ACkb$HzLTOmNa`RuOwrL5xkK;gaS51ddi7>B5TKP`Zml)J0!u7a~rtF6fhhc3fb^8 zI{CM?Kx83@Fic4g(nH`plqp7_39f^Nl|mt&n*R^t?-wAAvHREW=r+mT_e8oCxqdR2?7f;_SKVhJ>BY9W4q6 z`niK>LY-si;NhYP1tJvT3fF)+V3_wHS$Q?-)d|3(2>}mvgcLY{&O#1lXa891Km2;c zTB?l?$Y%y}Qq;u)27>>EyW8uK(q@m!n0Vwp}$moW<1vI-DUeeV|eUNj7Am`LHwO%L)_0^HIr z9E5T&oDgQ))%U|smjK+>E}S*>UN|g6+bI9+f3fpi?u6hBReVPO=h zY(3M5&*1=~Mxl+I(8FJurz$OITsnEyEm zcv_|33x{Ea6p5pdZD`B_(iQEJj*(?A9EL?wB<{Fdm?|GYA=i^or8~m57Y@TZDH7*7 z(d-YrA14)jP;p@#d*LuFl_GH!A5N!n1L=@!`%Ead`^32i4vX8}rf1d)2DF}CGziZg zG%Q{hClS_!bQ9#2P^hv=oY;eg#p&)*byy;OxOf+hQeY1n7N5()o8t%qw31!41;IUN zSX}P)zB><)%OSD5XiXrc#~Q9!JdWyROd?V?!R(^L7{`CNi_m`m4cOJwpghEe-?@9l;M6v7A z33U9h%^A^L&VUH zu=6khRG+Q~nI8rt{1qFs8Fu~*j~PX1VUU&Y=Aj)(uv0wbzKfk3=$TM(su_Q9*ni1~ zPTRRr40Vn-nu|vJm*MT!6S=c&=T0K97P6x#72~4qGrOR^A50u@aNNK@8{iubrC@Bp F`9DH82$KK+ diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/setup-rabbit-test.sh b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/setup-rabbit-test.sh deleted file mode 100755 index 9b2708a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/setup-rabbit-test.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -e -sh -e `dirname $0`/rabbit-test.sh "`dirname $0`/../../rabbitmq-server/scripts/rabbitmqctl -n rabbit-test" diff --git a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/setup-rabbit-test.sh b/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/setup-rabbit-test.sh deleted file mode 100644 index 9b2708a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-mqtt/test/src/com/rabbitmq/mqtt/test/setup-rabbit-test.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -e -sh -e `dirname $0`/rabbit-test.sh "`dirname $0`/../../rabbitmq-server/scripts/rabbitmqctl -n rabbit-test" diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel-management/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-shovel-management/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel-management/Makefile b/rabbitmq-server/plugins-src/rabbitmq-shovel-management/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-shovel-management/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel-management/package.mk b/rabbitmq-server/plugins-src/rabbitmq-shovel-management/package.mk deleted file mode 100644 index 6c3bac2..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-shovel-management/package.mk +++ /dev/null @@ -1,9 +0,0 @@ -RELEASABLE:=true -DEPS:=rabbitmq-management rabbitmq-shovel -WITH_BROKER_TEST_COMMANDS:=rabbit_shovel_mgmt_test_all:all_tests() -WITH_BROKER_TEST_CONFIG:=$(PACKAGE_DIR)/etc/rabbit-test - -CONSTRUCT_APP_PREREQS:=$(shell find $(PACKAGE_DIR)/priv -type f) -define construct_app_commands - cp -r $(PACKAGE_DIR)/priv $(APP_DIR) -endef diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-shovel/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel/Makefile b/rabbitmq-server/plugins-src/rabbitmq-shovel/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-shovel/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel/README b/rabbitmq-server/plugins-src/rabbitmq-shovel/README deleted file mode 100644 index 1d7d1b0..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-shovel/README +++ /dev/null @@ -1,4 +0,0 @@ -Generic build instructions are at: - http://www.rabbitmq.com/plugin-development.html - -See the http://www.rabbitmq.com/shovel.html page for full instructions. diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel/generate_deps b/rabbitmq-server/plugins-src/rabbitmq-shovel/generate_deps deleted file mode 100644 index 29587b5..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-shovel/generate_deps +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- --mode(compile). - -main([IncludeDir, ErlDir, EbinDir, TargetFile]) -> - ErlDirContents = filelib:wildcard("*.erl", ErlDir), - ErlFiles = [filename:join(ErlDir, FileName) || FileName <- ErlDirContents], - Modules = sets:from_list( - [list_to_atom(filename:basename(FileName, ".erl")) || - FileName <- ErlDirContents]), - Headers = sets:from_list( - [filename:join(IncludeDir, FileName) || - FileName <- filelib:wildcard("*.hrl", IncludeDir)]), - Deps = lists:foldl( - fun (Path, Deps1) -> - dict:store(Path, detect_deps(IncludeDir, EbinDir, - Modules, Headers, Path), - Deps1) - end, dict:new(), ErlFiles), - {ok, Hdl} = file:open(TargetFile, [write, delayed_write]), - dict:fold( - fun (_Path, [], ok) -> - ok; - (Path, Dep, ok) -> - Module = filename:basename(Path, ".erl"), - ok = file:write(Hdl, [EbinDir, "/", Module, ".beam: ", - Path]), - ok = sets:fold(fun (E, ok) -> file:write(Hdl, [" ", E]) end, - ok, Dep), - file:write(Hdl, ["\n"]) - end, ok, Deps), - ok = file:write(Hdl, [TargetFile, ": ", escript:script_name(), "\n"]), - ok = file:sync(Hdl), - ok = file:close(Hdl). - -detect_deps(IncludeDir, EbinDir, Modules, Headers, Path) -> - {ok, Forms} = epp:parse_file(Path, [IncludeDir], [{use_specs, true}]), - lists:foldl( - fun ({attribute, _LineNumber, Attribute, Behaviour}, Deps) - when Attribute =:= behaviour orelse Attribute =:= behavior -> - case sets:is_element(Behaviour, Modules) of - true -> sets:add_element( - [EbinDir, "/", atom_to_list(Behaviour), ".beam"], - Deps); - false -> Deps - end; - ({attribute, _LineNumber, file, {FileName, _LineNumber1}}, Deps) -> - case sets:is_element(FileName, Headers) of - true -> sets:add_element(FileName, Deps); - false -> Deps - end; - (_Form, Deps) -> - Deps - end, sets:new(), Forms). diff --git a/rabbitmq-server/plugins-src/rabbitmq-shovel/package.mk b/rabbitmq-server/plugins-src/rabbitmq-shovel/package.mk deleted file mode 100644 index 6cf8254..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-shovel/package.mk +++ /dev/null @@ -1,3 +0,0 @@ -RELEASABLE:=true -DEPS:=rabbitmq-erlang-client -WITH_BROKER_TEST_COMMANDS:=rabbit_shovel_test_all:all_tests() diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-stomp/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/.travis.yml b/rabbitmq-server/plugins-src/rabbitmq-stomp/.travis.yml deleted file mode 100644 index 467bda0..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-stomp/.travis.yml +++ /dev/null @@ -1,32 +0,0 @@ -sudo: false -language: erlang -notifications: - email: - - alerts@rabbitmq.com -addons: - apt: - packages: - - xsltproc -otp_release: - - R16B03-1 - - "17.5" - - "18.0" -install: - - if [ ! -d "$HOME/rabbitmq-public-umbrella/.git" ]; then git clone https://github.com/rabbitmq/rabbitmq-public-umbrella.git $HOME/rabbitmq-public-umbrella; fi - - cd $HOME/rabbitmq-public-umbrella - - make co - - make up -before_script: - - IFS="/" read -a PARTS <<< "$TRAVIS_REPO_SLUG" - - export TEST_DIR=$HOME/rabbitmq-public-umbrella/${PARTS[1]} - - rm -rf ${TEST_DIR} - - cp -r ${TRAVIS_BUILD_DIR} ${TEST_DIR} - - cd ${TEST_DIR} -script: make test -before_cache: - - rm -rf ${TEST_DIR} - - cd $HOME -cache: - apt: true - directories: - - $HOME/rabbitmq-public-umbrella diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/Makefile b/rabbitmq-server/plugins-src/rabbitmq-stomp/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-stomp/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/deps/pika/Makefile b/rabbitmq-server/plugins-src/rabbitmq-stomp/deps/pika/Makefile deleted file mode 100644 index b082bb5..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-stomp/deps/pika/Makefile +++ /dev/null @@ -1,27 +0,0 @@ -UPSTREAM_GIT=https://github.com/pika/pika.git -REVISION=0.9.14 - -LIB_DIR=pika -CHECKOUT_DIR=pika-git - -TARGETS=$(LIB_DIR) - -all: $(TARGETS) - -clean: - rm -rf $(LIB_DIR) - -distclean: clean - rm -rf $(CHECKOUT_DIR) - -$(LIB_DIR) : $(CHECKOUT_DIR) - rm -rf $@ - cp -R $< $@ - -$(CHECKOUT_DIR): - git clone $(UPSTREAM_GIT) $@ - (cd $@ && git checkout $(REVISION)) || rm -rf $@ - -echo-revision: - @echo $(REVISION) - diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/deps/stomppy/Makefile b/rabbitmq-server/plugins-src/rabbitmq-stomp/deps/stomppy/Makefile deleted file mode 100644 index 40f5bd1..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-stomp/deps/stomppy/Makefile +++ /dev/null @@ -1,27 +0,0 @@ -UPSTREAM_GIT=https://github.com/jasonrbriggs/stomp.py.git -REVISION=v4.0.16 - -LIB_DIR=stomppy -CHECKOUT_DIR=stomppy-git - -TARGETS=$(LIB_DIR) - -all: $(TARGETS) - -clean: - rm -rf $(LIB_DIR) - -distclean: clean - rm -rf $(CHECKOUT_DIR) - -$(LIB_DIR) : $(CHECKOUT_DIR) - rm -rf $@ - cp -R $< $@ - -$(CHECKOUT_DIR): - git clone $(UPSTREAM_GIT) $@ - (cd $@ && git checkout $(REVISION)) || rm -rf $@ - -echo-revision: - @echo $(REVISION) - diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/package.mk b/rabbitmq-server/plugins-src/rabbitmq-stomp/package.mk deleted file mode 100644 index daacc68..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-stomp/package.mk +++ /dev/null @@ -1,26 +0,0 @@ -RELEASABLE:=true -DEPS:=rabbitmq-server rabbitmq-erlang-client rabbitmq-test -STANDALONE_TEST_COMMANDS:=eunit:test([rabbit_stomp_test_util,rabbit_stomp_test_frame],[verbose]) -WITH_BROKER_TEST_SCRIPTS:=$(PACKAGE_DIR)/test/src/test.py $(PACKAGE_DIR)/test/src/test_connect_options.py $(PACKAGE_DIR)/test/src/test_ssl.py -WITH_BROKER_TEST_COMMANDS:=rabbit_stomp_test:all_tests() rabbit_stomp_amqqueue_test:all_tests() -WITH_BROKER_TEST_CONFIG:=$(PACKAGE_DIR)/test/ebin/test - -define package_rules - -$(PACKAGE_DIR)+pre-test:: - rm -rf $(PACKAGE_DIR)/test/certs - mkdir $(PACKAGE_DIR)/test/certs - mkdir -p $(PACKAGE_DIR)/test/ebin - sed -e "s|%%CERTS_DIR%%|$(abspath $(PACKAGE_DIR))/test/certs|g" < $(PACKAGE_DIR)/test/src/test.config > $(PACKAGE_DIR)/test/ebin/test.config - $(MAKE) -C $(PACKAGE_DIR)/../rabbitmq-test/certs all PASSWORD=test DIR=$(abspath $(PACKAGE_DIR))/test/certs - $(MAKE) -C $(PACKAGE_DIR)/deps/stomppy - $(MAKE) -C $(PACKAGE_DIR)/deps/pika - -$(PACKAGE_DIR)+clean:: - rm -rf $(PACKAGE_DIR)/test/certs - -$(PACKAGE_DIR)+clean-with-deps:: - $(MAKE) -C $(PACKAGE_DIR)/deps/stomppy distclean - $(MAKE) -C $(PACKAGE_DIR)/deps/pika distclean - -endef diff --git a/rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_reader.erl b/rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_reader.erl deleted file mode 100644 index 673afee..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-stomp/src/rabbit_stomp_reader.erl +++ /dev/null @@ -1,243 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2015 Pivotal Software, Inc. All rights reserved. -%% - --module(rabbit_stomp_reader). - --export([start_link/3]). --export([init/3, mainloop/2]). --export([system_continue/3, system_terminate/4, system_code_change/4]). --export([conserve_resources/3]). - --include("rabbit_stomp.hrl"). --include("rabbit_stomp_frame.hrl"). --include_lib("amqp_client/include/amqp_client.hrl"). - --record(reader_state, {socket, parse_state, processor, state, - conserve_resources, recv_outstanding, - parent}). - -%%---------------------------------------------------------------------------- - -start_link(SupHelperPid, ProcessorPid, Configuration) -> - {ok, proc_lib:spawn_link(?MODULE, init, - [SupHelperPid, ProcessorPid, Configuration])}. - -log(Level, Fmt, Args) -> rabbit_log:log(connection, Level, Fmt, Args). - -init(SupHelperPid, ProcessorPid, Configuration) -> - Reply = go(SupHelperPid, ProcessorPid, Configuration), - rabbit_stomp_processor:flush_and_die(ProcessorPid), - Reply. - -go(SupHelperPid, ProcessorPid, Configuration) -> - process_flag(trap_exit, true), - receive - {go, Sock0, SockTransform} -> - case rabbit_net:connection_string(Sock0, inbound) of - {ok, ConnStr} -> - case SockTransform(Sock0) of - {ok, Sock} -> - DebugOpts = sys:debug_options([]), - ProcInitArgs = processor_args(SupHelperPid, - Configuration, - Sock), - rabbit_stomp_processor:init_arg(ProcessorPid, - ProcInitArgs), - log(info, "accepting STOMP connection ~p (~s)~n", - [self(), ConnStr]), - - ParseState = rabbit_stomp_frame:initial_state(), - try - mainloop(DebugOpts, - register_resource_alarm( - #reader_state{socket = Sock, - parse_state = ParseState, - processor = ProcessorPid, - state = running, - conserve_resources = false, - recv_outstanding = false})), - log(info, "closing STOMP connection ~p (~s)~n", - [self(), ConnStr]) - catch _:Ex -> - log_network_error(ConnStr, Ex), - rabbit_net:fast_close(Sock), - rabbit_stomp_processor:flush_and_die(ProcessorPid), - exit(normal) - end, - done; - {error, enotconn} -> - rabbit_net:fast_close(Sock0), - rabbit_stomp_processor:flush_and_die(ProcessorPid), - exit(normal); - {error, Reason} -> - log_network_error(ConnStr, Reason), - rabbit_net:fast_close(Sock0), - rabbit_stomp_processor:flush_and_die(ProcessorPid), - exit(normal) - end - end - end. - -mainloop(DebugOpts, State0 = #reader_state{socket = Sock}) -> - State = run_socket(control_throttle(State0)), - receive - {inet_async, Sock, _Ref, {ok, Data}} -> - mainloop(DebugOpts, process_received_bytes( - Data, State#reader_state{recv_outstanding = false})); - {inet_async, _Sock, _Ref, {error, closed}} -> - ok; - {inet_async, _Sock, _Ref, {error, Reason}} -> - throw({inet_error, Reason}); - {inet_reply, _Sock, {error, closed}} -> - ok; - {conserve_resources, Conserve} -> - mainloop(DebugOpts, State#reader_state{conserve_resources = Conserve}); - {bump_credit, Msg} -> - credit_flow:handle_bump_msg(Msg), - mainloop(DebugOpts, State); - {system, From, Request} -> - sys:handle_system_msg(Request, From, State#reader_state.parent, - ?MODULE, DebugOpts, State); - {'EXIT', _From, shutdown} -> - ok; - Other -> - log(warning, "STOMP connection ~p received " - "an unexpected message ~p~n", [Other]), - ok - end. - -process_received_bytes([], State) -> - State; -process_received_bytes(Bytes, - State = #reader_state{ - processor = Processor, - parse_state = ParseState, - state = S}) -> - case rabbit_stomp_frame:parse(Bytes, ParseState) of - {more, ParseState1} -> - State#reader_state{parse_state = ParseState1}; - {ok, Frame, Rest} -> - rabbit_stomp_processor:process_frame(Processor, Frame), - PS = rabbit_stomp_frame:initial_state(), - process_received_bytes(Rest, State#reader_state{ - parse_state = PS, - state = next_state(S, Frame)}) - end. - -conserve_resources(Pid, _Source, Conserve) -> - Pid ! {conserve_resources, Conserve}, - ok. - -register_resource_alarm(State) -> - rabbit_alarm:register(self(), {?MODULE, conserve_resources, []}), State. - -control_throttle(State = #reader_state{state = CS, - conserve_resources = Mem}) -> - case {CS, Mem orelse credit_flow:blocked()} of - {running, true} -> State#reader_state{state = blocking}; - {blocking, false} -> State#reader_state{state = running}; - {blocked, false} -> State#reader_state{state = running}; - {_, _} -> State - end. - -next_state(blocking, #stomp_frame{command = "SEND"}) -> - blocked; -next_state(S, _) -> - S. - -run_socket(State = #reader_state{state = blocked}) -> - State; -run_socket(State = #reader_state{recv_outstanding = true}) -> - State; -run_socket(State = #reader_state{socket = Sock}) -> - rabbit_net:async_recv(Sock, 0, infinity), - State#reader_state{recv_outstanding = true}. - -%%---------------------------------------------------------------------------- - -system_continue(Parent, DebugOpts, State) -> - mainloop(DebugOpts, State#reader_state{parent = Parent}). - -system_terminate(Reason, _Parent, _OldVsn, _Extra) -> - exit(Reason). - -system_code_change(Misc, _Module, _OldSvn, _Extra) -> - {ok, Misc}. - -%%---------------------------------------------------------------------------- - -processor_args(SupPid, Configuration, Sock) -> - SendFun = fun (sync, IoData) -> - %% no messages emitted - catch rabbit_net:send(Sock, IoData); - (async, IoData) -> - %% {inet_reply, _, _} will appear soon - %% We ignore certain errors here, as we will be - %% receiving an asynchronous notification of the - %% same (or a related) fault shortly anyway. See - %% bug 21365. - catch rabbit_net:port_command(Sock, IoData) - end, - - StartHeartbeatFun = - fun (SendTimeout, SendFin, ReceiveTimeout, ReceiveFun) -> - rabbit_heartbeat:start(SupPid, Sock, SendTimeout, - SendFin, ReceiveTimeout, ReceiveFun) - end, - {ok, {PeerAddr, _PeerPort}} = rabbit_net:sockname(Sock), - [SendFun, adapter_info(Sock), StartHeartbeatFun, - ssl_login_name(Sock, Configuration), PeerAddr]. - -adapter_info(Sock) -> - amqp_connection:socket_adapter_info(Sock, {'STOMP', 0}). - -ssl_login_name(_Sock, #stomp_configuration{ssl_cert_login = false}) -> - none; -ssl_login_name(Sock, #stomp_configuration{ssl_cert_login = true}) -> - case rabbit_net:peercert(Sock) of - {ok, C} -> case rabbit_ssl:peer_cert_auth_name(C) of - unsafe -> none; - not_found -> none; - Name -> Name - end; - {error, no_peercert} -> none; - nossl -> none - end. - -%%---------------------------------------------------------------------------- - -log_network_error(ConnStr, {ssl_upgrade_error, - {tls_alert, "handshake failure"}}) -> - log(error, "STOMP detected TLS upgrade error on " - "~p (~s): handshake failure~n", [self(), ConnStr]); - -log_network_error(ConnStr, {ssl_upgrade_error, - {tls_alert, "unknown ca"}}) -> - log(error, "STOMP detected TLS certificate " - "verification error on " - "~p (~s): alert 'unknown CA'~n", [self(), ConnStr]); - -log_network_error(ConnStr, {ssl_upgrade_error, {tls_alert, Alert}}) -> - log(error, "STOMP detected TLS upgrade error on " - "~p (~s): alert ~s~n", [self(), ConnStr, Alert]); - -log_network_error(ConnStr, {ssl_upgrade_error, closed}) -> - log(error, "STOMP detected TLS upgrade error on " - "~p (~s): connection closed~n", [self(), ConnStr]); - -log_network_error(ConnStr, Ex) -> - log(error, "STOMP detected network error on " - "~p (~s):~n~p~n", [self(), ConnStr, Ex]). diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-test/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/Makefile b/rabbitmq-server/plugins-src/rabbitmq-test/Makefile deleted file mode 100644 index 3f11414..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/Makefile +++ /dev/null @@ -1,210 +0,0 @@ -.PHONY: all full lite conformance16 update-qpid-testsuite run-qpid-testsuite \ - prepare restart-app stop-app start-app \ - start-secondary-app stop-secondary-app \ - restart-secondary-node cleanup force-snapshot \ - enable-ha disable-ha - -include ../umbrella.mk - -BROKER_DIR=../rabbitmq-server -TEST_DIR=../rabbitmq-java-client - -TEST_RABBIT_PORT=5672 -TEST_HARE_PORT=5673 -TEST_RABBIT_SSL_PORT=5671 -TEST_HARE_SSL_PORT=5670 - -COVER=true - -ifeq ($(COVER), true) -COVER_START=start-cover -COVER_STOP=stop-cover -else -COVER_START= -COVER_STOP= -endif - -# we actually want to test for ssl above 3.9 (eg >= 3.10), but this -# comparison is buggy because it doesn't believe 10 > 9, so it doesn't -# believe 3.10 > 3.9. As a result, we cheat, and use the erts version -# instead. SSL 3.10 came out with R13B, which included erts 5.7.1, so -# we require > 5.7.0. -SSL_VERIFY=$(shell if [ $$(erl -noshell -eval 'io:format(erlang:system_info(version)), halt().') \> "5.7.0" ]; then echo "true"; else echo "false"; fi) -ifeq (true,$(SSL_VERIFY)) -SSL_VERIFY_OPTION :={verify,verify_peer},{fail_if_no_peer_cert,false} -else -SSL_VERIFY_OPTION :={verify_code,1} -endif -export SSL_CERTS_DIR := $(realpath certs) -export PASSWORD := test -RABBIT_BROKER_OPTIONS := "-rabbit ssl_listeners [{\\\"0.0.0.0\\\",$(TEST_RABBIT_SSL_PORT)}] -rabbit ssl_options [{cacertfile,\\\"$(SSL_CERTS_DIR)/testca/cacert.pem\\\"},{certfile,\\\"$(SSL_CERTS_DIR)/server/cert.pem\\\"},{keyfile,\\\"$(SSL_CERTS_DIR)/server/key.pem\\\"},$(SSL_VERIFY_OPTION)] -rabbit auth_mechanisms ['PLAIN','AMQPLAIN','EXTERNAL','RABBIT-CR-DEMO']" -HARE_BROKER_OPTIONS := "-rabbit ssl_listeners [{\\\"0.0.0.0\\\",$(TEST_HARE_SSL_PORT)}] -rabbit ssl_options [{cacertfile,\\\"$(SSL_CERTS_DIR)/testca/cacert.pem\\\"},{certfile,\\\"$(SSL_CERTS_DIR)/server/cert.pem\\\"},{keyfile,\\\"$(SSL_CERTS_DIR)/server/key.pem\\\"},$(SSL_VERIFY_OPTION)] -rabbit auth_mechanisms ['PLAIN','AMQPLAIN','EXTERNAL','RABBIT-CR-DEMO']" - -TESTS_FAILED := echo '\n============'\ - '\nTESTS FAILED'\ - '\n============\n' - -all: full test - -full: - OK=true && \ - $(MAKE) prepare && \ - { $(MAKE) -C $(BROKER_DIR) run-tests || { OK=false; $(TESTS_FAILED); } } && \ - { $(MAKE) run-qpid-testsuite || { OK=false; $(TESTS_FAILED); } } && \ - { ( cd $(TEST_DIR) && MAKE=$(MAKE) ant test-suite ) || { OK=false; $(TESTS_FAILED); } } && \ - $(MAKE) cleanup && { $$OK || $(TESTS_FAILED); } && $$OK - -unit: - OK=true && \ - $(MAKE) prepare && \ - { $(MAKE) -C $(BROKER_DIR) run-tests || OK=false; } && \ - $(MAKE) cleanup && $$OK - -lite: - OK=true && \ - $(MAKE) prepare && \ - { $(MAKE) -C $(BROKER_DIR) run-tests || OK=false; } && \ - { ( cd $(TEST_DIR) && MAKE=$(MAKE) ant test-suite ) || OK=false; } && \ - $(MAKE) cleanup && $$OK - -conformance16: - OK=true && \ - $(MAKE) prepare && \ - { $(MAKE) -C $(BROKER_DIR) run-tests || OK=false; } && \ - { ( cd $(TEST_DIR) && MAKE=$(MAKE) ant test-suite ) || OK=false; } && \ - $(MAKE) cleanup && $$OK - -qpid_testsuite: - $(MAKE) update-qpid-testsuite - -update-qpid-testsuite: - svn co -r 906960 http://svn.apache.org/repos/asf/qpid/trunk/qpid/python qpid_testsuite - # hg clone http://rabbit-hg.eng.vmware.com/mirrors/qpid_testsuite - - patch -N -r - -p0 -d qpid_testsuite/ < qpid_patch - -prepare-qpid-patch: - cd qpid_testsuite && svn diff > ../qpid_patch && cd .. - -run-qpid-testsuite: qpid_testsuite - AMQP_SPEC=../rabbitmq-docs/specs/amqp0-8.xml qpid_testsuite/qpid-python-test -m tests_0-8 -I rabbit_failing.txt - AMQP_SPEC=../rabbitmq-docs/specs/amqp0-9-1.xml qpid_testsuite/qpid-python-test -m tests_0-9 -I rabbit_failing.txt - -clean: - rm -rf qpid_testsuite - -prepare: create_ssl_certs - $(MAKE) -C $(BROKER_DIR) \ - RABBITMQ_NODENAME=hare \ - RABBITMQ_NODE_IP_ADDRESS=0.0.0.0 \ - RABBITMQ_NODE_PORT=${TEST_HARE_PORT} \ - RABBITMQ_SERVER_START_ARGS=$(HARE_BROKER_OPTIONS) \ - RABBITMQ_CONFIG_FILE=/does-not-exist \ - RABBITMQ_ENABLED_PLUGINS_FILE=/does-not-exist \ - stop-node cleandb start-background-node - $(MAKE) -C $(BROKER_DIR) \ - RABBITMQ_NODE_IP_ADDRESS=0.0.0.0 \ - RABBITMQ_NODE_PORT=${TEST_RABBIT_PORT} \ - RABBITMQ_SERVER_START_ARGS=$(RABBIT_BROKER_OPTIONS) \ - RABBITMQ_CONFIG_FILE=/does-not-exist \ - RABBITMQ_ENABLED_PLUGINS_FILE=/does-not-exist \ - stop-node cleandb start-background-node ${COVER_START} start-rabbit-on-node - $(MAKE) -C $(BROKER_DIR) RABBITMQ_NODENAME=hare start-rabbit-on-node - -start-app: - $(MAKE) -C $(BROKER_DIR) \ - RABBITMQ_NODE_IP_ADDRESS=0.0.0.0 \ - RABBITMQ_NODE_PORT=${TEST_RABBIT_PORT} \ - RABBITMQ_SERVER_START_ARGS=$(RABBIT_BROKER_OPTIONS) \ - RABBITMQ_CONFIG_FILE=/does-not-exist \ - RABBITMQ_ENABLED_PLUGINS_FILE=/does-not-exist \ - start-rabbit-on-node - -stop-app: - $(MAKE) -C $(BROKER_DIR) stop-rabbit-on-node - -restart-app: stop-app start-app - -start-secondary-app: - $(MAKE) -C $(BROKER_DIR) RABBITMQ_NODENAME=hare start-rabbit-on-node - -stop-secondary-app: - $(MAKE) -C $(BROKER_DIR) RABBITMQ_NODENAME=hare stop-rabbit-on-node - -restart-secondary-node: - $(MAKE) -C $(BROKER_DIR) \ - RABBITMQ_NODENAME=hare \ - RABBITMQ_NODE_IP_ADDRESS=0.0.0.0 \ - RABBITMQ_NODE_PORT=${TEST_HARE_PORT} \ - RABBITMQ_SERVER_START_ARGS=$(HARE_BROKER_OPTIONS) \ - RABBITMQ_CONFIG_FILE=/does-not-exist \ - RABBITMQ_ENABLED_PLUGINS_FILE=/does-not-exist \ - stop-node start-background-node - $(MAKE) -C $(BROKER_DIR) RABBITMQ_NODENAME=hare start-rabbit-on-node - -force-snapshot: - $(MAKE) -C $(BROKER_DIR) force-snapshot - -set-resource-alarm: - $(MAKE) -C $(BROKER_DIR) set-resource-alarm SOURCE=$(SOURCE) - -clear-resource-alarm: - $(MAKE) -C $(BROKER_DIR) clear-resource-alarm SOURCE=$(SOURCE) - -enable-ha: - $(BROKER_DIR)/scripts/rabbitmqctl set_policy HA \ - ".*" '{"ha-mode": "all"}' - -disable-ha: - $(BROKER_DIR)/scripts/rabbitmqctl clear_policy HA - -cleanup: - -$(MAKE) -C $(BROKER_DIR) \ - RABBITMQ_NODENAME=hare \ - RABBITMQ_NODE_IP_ADDRESS=0.0.0.0 \ - RABBITMQ_NODE_PORT=${TEST_HARE_PORT} \ - RABBITMQ_SERVER_START_ARGS=$(HARE_BROKER_OPTIONS) \ - RABBITMQ_CONFIG_FILE=/does-not-exist \ - RABBITMQ_ENABLED_PLUGINS_FILE=/does-not-exist \ - stop-rabbit-on-node stop-node - -$(MAKE) -C $(BROKER_DIR) \ - RABBITMQ_NODE_IP_ADDRESS=0.0.0.0 \ - RABBITMQ_NODE_PORT=${TEST_RABBIT_PORT} \ - RABBITMQ_SERVER_START_ARGS=$(RABBIT_BROKER_OPTIONS) \ - RABBITMQ_CONFIG_FILE=/does-not-exist \ - RABBITMQ_ENABLED_PLUGINS_FILE=/does-not-exist \ - stop-rabbit-on-node ${COVER_STOP} stop-node - -define compare_version -$(shell awk 'BEGIN { - split("$(1)", v1, "\."); - version1 = v1[1] * 1000000 + v1[2] * 10000 + v1[3] * 100 + v1[4]; - - split("$(2)", v2, "\."); - version2 = v2[1] * 1000000 + v2[2] * 10000 + v2[3] * 100 + v2[4]; - - if (version1 $(3) version2) { - print "true"; - } else { - print "false"; - } -}') -endef - -ERLANG_SSL_VER = $(shell erl -noshell -eval '\ - ok = application:load(ssl), \ - {ok, VSN} = application:get_key(ssl, vsn), \ - io:format("~s~n", [VSN]), \ - halt(0).') -MINIMUM_ERLANG_SSL_VER = 5.3 - -ifeq ($(call compare_version,$(ERLANG_SSL_VER),$(MINIMUM_ERLANG_SSL_VER),>=),true) -create_ssl_certs: - $(MAKE) -C certs DIR=$(SSL_CERTS_DIR) clean all -else -create_ssl_certs: - @# Skip SSL certs if Erlang is older than R16B01 (ssl 5.3). - $(MAKE) -C certs DIR=$(SSL_CERTS_DIR) clean - @echo "WARNING: Skip SSL certs creation; Erlang's SSL application is too" \ - "old ($(ERLANG_SSL_VER) < $(MINIMUM_ERLANG_SSL_VER)) and SSL support" \ - "is disabled in RabbitMQ" -endif diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/README b/rabbitmq-server/plugins-src/rabbitmq-test/README deleted file mode 100644 index 9b19505..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/README +++ /dev/null @@ -1,19 +0,0 @@ -Useful targets: - -$ make unit # runs the Erlang unit tests -$ make lite # runs the Erlang unit tests and the Java client / functional tests -$ make full # runs both the above plus the QPid test suite -$ make test # runs the Erlang multi-node integration tests -$ make all # runs all of the above - -The multi-node tests take a long time, so you might want to run a subset: - -$ make test FILTER=dynamic_ha # <- run just one suite -$ make test FILTER=dynamic_ha:change_policy # <- run just one test - -The multi-node tests also default to coverage off, to turn it on: - -$ make test COVER=true - -This repository is not related to plugin tests; run "make test" in a -plugin directory to test that plugin. diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/certs/Makefile b/rabbitmq-server/plugins-src/rabbitmq-test/certs/Makefile deleted file mode 100644 index 32db63f..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/certs/Makefile +++ /dev/null @@ -1,58 +0,0 @@ -OPENSSL=openssl - -ifndef DIR -DIR := . -endif - -ifdef PASSWORD -P12PASS := true -else -P12PASS := @echo No PASSWORD defined. && false -endif - -.PRECIOUS: %/testca -.PHONY: %/clean target all p12pass - -all: client server - -client: p12pass - echo $(DIR) - $(MAKE) target DIR=$(DIR) TARGET=client EXTENSIONS=client_ca_extensions - -server: p12pass - $(MAKE) target DIR=$(DIR) TARGET=server EXTENSIONS=server_ca_extensions - -p12pass: - $(P12PASS) - -target: $(DIR)/testca - mkdir $(DIR)/$(TARGET) - { ( cd $(DIR)/$(TARGET) && \ - openssl genrsa -out key.pem 2048 &&\ - openssl req -new -key key.pem -out req.pem -outform PEM\ - -subj /CN=$$(hostname)/O=$(TARGET)/L=$$$$/ -nodes &&\ - cd ../testca && \ - openssl ca -config openssl.cnf -in ../$(TARGET)/req.pem -out \ - ../$(TARGET)/cert.pem -notext -batch -extensions \ - $(EXTENSIONS) && \ - cd ../$(TARGET) && \ - openssl pkcs12 -export -out keycert.p12 -in cert.pem -inkey key.pem \ - -passout pass:$(PASSWORD) ) || (rm -rf $(DIR)/$(TARGET) && false); } - -$(DIR)/testca: - mkdir $(DIR)/testca - cp openssl.cnf $(DIR)/testca/openssl.cnf - { ( cd $(DIR)/testca && \ - mkdir certs private && \ - chmod 700 private && \ - echo 01 > serial && \ - touch index.txt && \ - openssl req -x509 -config openssl.cnf -newkey rsa:2048 -days 365 \ - -out cacert.pem -outform PEM -subj /CN=MyTestCA/L=$$$$/ -nodes && \ - openssl x509 -in cacert.pem -out cacert.cer -outform DER ) \ - || (rm -rf $@ && false); } - -clean: - rm -rf $(DIR)/testca - rm -rf $(DIR)/server - rm -rf $(DIR)/client diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/certs/openssl.cnf b/rabbitmq-server/plugins-src/rabbitmq-test/certs/openssl.cnf deleted file mode 100644 index 93ffb2f..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/certs/openssl.cnf +++ /dev/null @@ -1,54 +0,0 @@ -[ ca ] -default_ca = testca - -[ testca ] -dir = . -certificate = $dir/cacert.pem -database = $dir/index.txt -new_certs_dir = $dir/certs -private_key = $dir/private/cakey.pem -serial = $dir/serial - -default_crl_days = 7 -default_days = 365 -default_md = sha1 - -policy = testca_policy -x509_extensions = certificate_extensions - -[ testca_policy ] -commonName = supplied -stateOrProvinceName = optional -countryName = optional -emailAddress = optional -organizationName = optional -organizationalUnitName = optional -domainComponent = optional - -[ certificate_extensions ] -basicConstraints = CA:false - -[ req ] -default_bits = 2048 -default_keyfile = ./private/cakey.pem -default_md = sha1 -prompt = yes -distinguished_name = root_ca_distinguished_name -x509_extensions = root_ca_extensions - -[ root_ca_distinguished_name ] -commonName = hostname - -[ root_ca_extensions ] -basicConstraints = CA:true -keyUsage = keyCertSign, cRLSign - -[ client_ca_extensions ] -basicConstraints = CA:false -keyUsage = digitalSignature -extendedKeyUsage = 1.3.6.1.5.5.7.3.2 - -[ server_ca_extensions ] -basicConstraints = CA:false -keyUsage = keyEncipherment -extendedKeyUsage = 1.3.6.1.5.5.7.3.1 diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/package.mk b/rabbitmq-server/plugins-src/rabbitmq-test/package.mk deleted file mode 100644 index 161d016..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/package.mk +++ /dev/null @@ -1,11 +0,0 @@ -DEPS:=rabbitmq-erlang-client -FILTER:=all -COVER:=false -WITH_BROKER_TEST_COMMANDS:=rabbit_test_runner:run_in_broker(\"$(PACKAGE_DIR)/test/ebin\",\"$(FILTER)\") - -## Require R15B to compile inet_proxy_dist since it requires includes -## introduced there. -ifeq ($(shell erl -noshell -eval 'io:format([list_to_integer(X) || X <- string:tokens(erlang:system_info(version), ".")] >= [5,9]), halt().'),true) -STANDALONE_TEST_COMMANDS:=rabbit_test_runner:run_multi(\"$(UMBRELLA_BASE_DIR)/rabbitmq-server\",\"$(PACKAGE_DIR)/test/ebin\",\"$(FILTER)\",$(COVER),none) -PACKAGE_ERLC_OPTS+=-Derlang_r15b_or_later -endif diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/qpid_config.py b/rabbitmq-server/plugins-src/rabbitmq-test/qpid_config.py deleted file mode 100644 index 16388a6..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/qpid_config.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -import os - -AMQP_SPEC_DIR=os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "../rabbitmq-docs/specs") -amqp_spec = os.path.join(AMQP_SPEC_DIR, "amqp.0-10-qpid-errata.xml") -amqp_spec_0_8 = os.path.join(AMQP_SPEC_DIR, "amqp0-8.xml") -amqp_spec_0_9 = os.path.join(AMQP_SPEC_DIR, "amqp0-9.xml") -amqp_spec = 'file://'+os.path.join(AMQP_SPEC_DIR, 'amqp.0-10.xml') diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/qpid_patch b/rabbitmq-server/plugins-src/rabbitmq-test/qpid_patch deleted file mode 100644 index 2c4b590..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/qpid_patch +++ /dev/null @@ -1,142 +0,0 @@ -Index: tests_0-8/basic.py -=================================================================== ---- tests_0-8/basic.py (revision 906960) -+++ tests_0-8/basic.py (working copy) -@@ -98,7 +98,7 @@ - channel.basic_consume(queue="") - self.fail("Expected failure when consuming from unspecified queue") - except Closed, e: -- self.assertConnectionException(530, e.args[0]) -+ self.assertChannelException(404, e.args[0]) - - def test_consume_unique_consumers(self): - """ -Index: tests_0-8/exchange.py -=================================================================== ---- tests_0-8/exchange.py (revision 906960) -+++ tests_0-8/exchange.py (working copy) -@@ -138,8 +138,6 @@ - # Test automatic binding by queue name. - self.queue_declare(queue="d") - self.assertPublishConsume(queue="d", routing_key="d") -- # Test explicit bind to default queue -- self.verifyDirectExchange("") - - - # TODO aconway 2006-09-27: Fill in empty tests: -@@ -318,7 +316,7 @@ - self.channel.exchange_declare(exchange="test_different_declared_type_exchange", type="topic") - self.fail("Expected 530 for redeclaration of exchange with different type.") - except Closed, e: -- self.assertConnectionException(530, e.args[0]) -+ self.assertChannelException(406, e.args[0]) - #cleanup - other = self.connect() - c2 = other.channel(1) -Index: tests_0-8/queue.py -=================================================================== ---- tests_0-8/queue.py (revision 906960) -+++ tests_0-8/queue.py (working copy) -@@ -37,14 +37,10 @@ - channel.basic_publish(exchange="test-exchange", routing_key="key", content=Content("two")) - channel.basic_publish(exchange="test-exchange", routing_key="key", content=Content("three")) - -- #check that the queue now reports 3 messages: -- reply = channel.queue_declare(queue="test-queue") -- self.assertEqual(3, reply.message_count) -- - #now do the purge, then test that three messages are purged and the count drops to 0 - reply = channel.queue_purge(queue="test-queue"); - self.assertEqual(3, reply.message_count) -- reply = channel.queue_declare(queue="test-queue") -+ reply = channel.queue_declare(queue="test-queue", exclusive=True) - self.assertEqual(0, reply.message_count) - - #send a further message and consume it, ensuring that the other messages are really gone -@@ -71,7 +67,7 @@ - channel.queue_purge() - self.fail("Expected failure when purging unspecified queue") - except Closed, e: -- self.assertConnectionException(530, e.args[0]) -+ self.assertChannelException(404, e.args[0]) - - #cleanup - other = self.connect() -@@ -174,11 +170,7 @@ - #check attempted deletion of non-existant queue is handled correctly: - channel = self.client.channel(2) - channel.channel_open() -- try: -- channel.queue_delete(queue="i-dont-exist", if_empty="True") -- self.fail("Expected delete of non-existant queue to fail") -- except Closed, e: -- self.assertChannelException(404, e.args[0]) -+ channel.queue_delete(queue="i-dont-exist", if_empty="True") - - - -Index: qpid/codec.py -=================================================================== ---- qpid/codec.py (revision 906960) -+++ qpid/codec.py (working copy) -@@ -76,6 +76,7 @@ - if not self.types: - self.typecode(ord('S'), "longstr") - self.typecode(ord('I'), "long") -+ self.typecode(ord('t'), "bool") - - def typecode(self, code, type): - self.types[code] = type -@@ -206,6 +207,22 @@ - """ - return self.unpack("!B") - -+ def encode_bool(self, b): -+ """ -+ encodes bool (8 bits) data 't' in network byte order -+ """ -+ -+ if ((b is not True) and (b is not False)): -+ raise ValueError('Valid range of bool is True or False') -+ -+ self.pack("!B", int(b)) -+ -+ def decode_bool(self): -+ """ -+ decodes a bool (8 bits) encoded in network byte order -+ """ -+ return bool(self.unpack("!B")) -+ - def encode_short(self, o): - """ - encodes short (16 bits) data 'o' in network byte order -Index: qpid/testlib.py -=================================================================== ---- qpid/testlib.py (revision 906960) -+++ qpid/testlib.py (working copy) -@@ -67,8 +67,7 @@ - - if not self.client.closed: - self.client.channel(0).connection_close(reply_code=200) -- else: -- self.client.close() -+ self.client.close() - - def connect(self, host=None, port=None, user=None, password=None, tune_params=None): - """Create a new connction, return the Client object""" -Index: qpid_config.py -=================================================================== ---- qpid_config.py (revision 906960) -+++ qpid_config.py (working copy) -@@ -19,7 +19,8 @@ - - import os - --AMQP_SPEC_DIR=os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "specs") -+AMQP_SPEC_DIR=os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "../rabbitmq-docs/specs") - amqp_spec = os.path.join(AMQP_SPEC_DIR, "amqp.0-10-qpid-errata.xml") --amqp_spec_0_8 = os.path.join(AMQP_SPEC_DIR, "amqp.0-8.xml") --amqp_spec_0_9 = os.path.join(AMQP_SPEC_DIR, "amqp.0-9.xml") -+amqp_spec_0_8 = os.path.join(AMQP_SPEC_DIR, "amqp0-8.xml") -+amqp_spec_0_9 = os.path.join(AMQP_SPEC_DIR, "amqp0-9.xml") -+amqp_spec = 'file://'+os.path.join(AMQP_SPEC_DIR, 'amqp.0-10.xml') diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/rabbit_failing.txt b/rabbitmq-server/plugins-src/rabbitmq-test/rabbit_failing.txt deleted file mode 100644 index be4eccf..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/rabbit_failing.txt +++ /dev/null @@ -1,9 +0,0 @@ -tests_0-8.basic.BasicTests.test_ack -tests_0-8.basic.BasicTests.test_consume_no_local -tests_0-8.basic.BasicTests.test_qos_prefetch_count -tests_0-8.basic.BasicTests.test_qos_prefetch_size -tests_0-8.broker.BrokerTests.test_basic_delivery_immediate -tests_0-8.broker.BrokerTests.test_channel_flow -tests_0-8.tx.TxTests.test_auto_rollback -tests_0-8.tx.TxTests.test_rollback -tests_0-9.query.* diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_proxy_dist.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_proxy_dist.erl deleted file mode 100644 index 847ef2e..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_proxy_dist.erl +++ /dev/null @@ -1,199 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(inet_proxy_dist). - -%% A distribution plugin that uses the usual inet_tcp_dist but allows -%% insertion of a proxy at the receiving end. - -%% inet_*_dist "behaviour" --export([listen/1, accept/1, accept_connection/5, - setup/5, close/1, select/1, is_node_name/1]). - -%% For copypasta from inet_tcp_dist --export([do_setup/6]). --import(error_logger,[error_msg/2]). - --define(REAL, inet_tcp_dist). - -%%---------------------------------------------------------------------------- - -listen(Name) -> ?REAL:listen(Name). -select(Node) -> ?REAL:select(Node). -accept(Listen) -> ?REAL:accept(Listen). -close(Socket) -> ?REAL:close(Socket). -is_node_name(Node) -> ?REAL:is_node_name(Node). - -accept_connection(AcceptPid, Socket, MyNode, Allowed, SetupTime) -> - ?REAL:accept_connection(AcceptPid, Socket, MyNode, Allowed, SetupTime). - -%% This is copied from inet_tcp_dist, in order to change the -%% output of erl_epmd:port_please/2. - --ifdef(erlang_r15b_or_later). - --include_lib("kernel/include/net_address.hrl"). --include_lib("kernel/include/dist_util.hrl"). - -setup(Node, Type, MyNode, LongOrShortNames,SetupTime) -> - spawn_opt(?MODULE, do_setup, - [self(), Node, Type, MyNode, LongOrShortNames, SetupTime], - [link, {priority, max}]). - -do_setup(Kernel, Node, Type, MyNode, LongOrShortNames,SetupTime) -> - ?trace("~p~n",[{inet_tcp_dist,self(),setup,Node}]), - [Name, Address] = splitnode(Node, LongOrShortNames), - case inet:getaddr(Address, inet) of - {ok, Ip} -> - Timer = dist_util:start_timer(SetupTime), - case erl_epmd:port_please(Name, Ip) of - {port, TcpPort, Version} -> - ?trace("port_please(~p) -> version ~p~n", - [Node,Version]), - dist_util:reset_timer(Timer), - %% Modification START - ProxyPort = case TcpPort >= 25672 andalso TcpPort < 25700 - andalso inet_tcp_proxy:is_enabled() of - true -> TcpPort + 5000; - false -> TcpPort - end, - case inet_tcp:connect(Ip, ProxyPort, - [{active, false}, - {packet,2}]) of - {ok, Socket} -> - {ok, {_, SrcPort}} = inet:sockname(Socket), - ok = inet_tcp_proxy_manager:register( - node(), Node, SrcPort, TcpPort, ProxyPort), - %% Modification END - HSData = #hs_data{ - kernel_pid = Kernel, - other_node = Node, - this_node = MyNode, - socket = Socket, - timer = Timer, - this_flags = 0, - other_version = Version, - f_send = fun inet_tcp:send/2, - f_recv = fun inet_tcp:recv/3, - f_setopts_pre_nodeup = - fun(S) -> - inet:setopts - (S, - [{active, false}, - {packet, 4}, - nodelay()]) - end, - f_setopts_post_nodeup = - fun(S) -> - inet:setopts - (S, - [{active, true}, - {deliver, port}, - {packet, 4}, - nodelay()]) - end, - f_getll = fun inet:getll/1, - f_address = - fun(_,_) -> - #net_address{ - address = {Ip,TcpPort}, - host = Address, - protocol = tcp, - family = inet} - end, - mf_tick = fun inet_tcp_dist:tick/1, - mf_getstat = fun inet_tcp_dist:getstat/1, - request_type = Type - }, - dist_util:handshake_we_started(HSData); - R -> - io:format("~p failed! ~p~n", [node(), R]), - %% Other Node may have closed since - %% port_please ! - ?trace("other node (~p) " - "closed since port_please.~n", - [Node]), - ?shutdown(Node) - end; - _ -> - ?trace("port_please (~p) " - "failed.~n", [Node]), - ?shutdown(Node) - end; - _Other -> - ?trace("inet_getaddr(~p) " - "failed (~p).~n", [Node,_Other]), - ?shutdown(Node) - end. - -%% If Node is illegal terminate the connection setup!! -splitnode(Node, LongOrShortNames) -> - case split_node(atom_to_list(Node), $@, []) of - [Name|Tail] when Tail =/= [] -> - Host = lists:append(Tail), - case split_node(Host, $., []) of - [_] when LongOrShortNames =:= longnames -> - error_msg("** System running to use " - "fully qualified " - "hostnames **~n" - "** Hostname ~s is illegal **~n", - [Host]), - ?shutdown(Node); - L when length(L) > 1, LongOrShortNames =:= shortnames -> - error_msg("** System NOT running to use fully qualified " - "hostnames **~n" - "** Hostname ~s is illegal **~n", - [Host]), - ?shutdown(Node); - _ -> - [Name, Host] - end; - [_] -> - error_msg("** Nodename ~p illegal, no '@' character **~n", - [Node]), - ?shutdown(Node); - _ -> - error_msg("** Nodename ~p illegal **~n", [Node]), - ?shutdown(Node) - end. - -split_node([Chr|T], Chr, Ack) -> [lists:reverse(Ack)|split_node(T, Chr, [])]; -split_node([H|T], Chr, Ack) -> split_node(T, Chr, [H|Ack]); -split_node([], _, Ack) -> [lists:reverse(Ack)]. - -%% we may not always want the nodelay behaviour -%% for performance reasons - -nodelay() -> - case application:get_env(kernel, dist_nodelay) of - undefined -> - {nodelay, true}; - {ok, true} -> - {nodelay, true}; - {ok, false} -> - {nodelay, false}; - _ -> - {nodelay, true} - end. - --else. - -setup(_Node, _Type, _MyNode, _LongOrShortNames, _SetupTime) -> - exit(erlang_r15b_required). - -do_setup(_Kernel, _Node, _Type, _MyNode, _LongOrShortNames, _SetupTime) -> - exit(erlang_r15b_required). - --endif. diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_tcp_proxy.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_tcp_proxy.erl deleted file mode 100644 index 28d58e0..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_tcp_proxy.erl +++ /dev/null @@ -1,106 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(inet_tcp_proxy). - -%% A TCP proxy for insertion into the Erlang distribution mechanism, -%% which allows us to simulate network partitions. - --export([start/0, reconnect/1, is_enabled/0, allow/1, block/1]). - --define(TABLE, ?MODULE). - -%% This can't start_link because there's no supervision hierarchy we -%% can easily fit it into (we need to survive all application -%% restarts). So we have to do some horrible error handling. - -start() -> - spawn(error_handler(fun go/0)), - ok. - -reconnect(Nodes) -> - [erlang:disconnect_node(N) || N <- Nodes, N =/= node()], - ok. - -is_enabled() -> - lists:member(?TABLE, ets:all()). - -allow(Node) -> ets:delete(?TABLE, Node). -block(Node) -> ets:insert(?TABLE, {Node, block}). - -%%---------------------------------------------------------------------------- - -error_handler(Thunk) -> - fun () -> - try - Thunk() - catch _:{{nodedown, _}, _} -> - %% The only other node we ever talk to is the test - %% runner; if that's down then the test is nearly - %% over; die quietly. - ok; - _:X -> - io:format(user, "TCP proxy died with ~p~n At ~p~n", - [X, erlang:get_stacktrace()]), - erlang:halt(1) - end - end. - -go() -> - ets:new(?TABLE, [public, named_table]), - {ok, Port} = application:get_env(kernel, inet_dist_listen_min), - ProxyPort = Port + 5000, - {ok, Sock} = gen_tcp:listen(ProxyPort, [inet, - {reuseaddr, true}]), - accept_loop(Sock, Port). - -accept_loop(ListenSock, Port) -> - {ok, Sock} = gen_tcp:accept(ListenSock), - Proxy = spawn(error_handler(fun() -> run_it(Sock, Port) end)), - ok = gen_tcp:controlling_process(Sock, Proxy), - accept_loop(ListenSock, Port). - -run_it(SockIn, Port) -> - case {inet:peername(SockIn), inet:sockname(SockIn)} of - {{ok, {_Addr, SrcPort}}, {ok, {Addr, _OtherPort}}} -> - {ok, Remote, This} = inet_tcp_proxy_manager:lookup(SrcPort), - case node() of - This -> ok; - _ -> exit({not_me, node(), This}) - end, - {ok, SockOut} = gen_tcp:connect(Addr, Port, [inet]), - run_loop({SockIn, SockOut}, Remote, []); - _ -> - ok - end. - -run_loop(Sockets, RemoteNode, Buf0) -> - Block = [{RemoteNode, block}] =:= ets:lookup(?TABLE, RemoteNode), - receive - {tcp, Sock, Data} -> - Buf = [Data | Buf0], - case Block of - false -> gen_tcp:send(other(Sock, Sockets), lists:reverse(Buf)), - run_loop(Sockets, RemoteNode, []); - true -> run_loop(Sockets, RemoteNode, Buf) - end; - {tcp_closed, Sock} -> - gen_tcp:close(other(Sock, Sockets)); - X -> - exit({weirdness, X}) - end. - -other(A, {A, B}) -> B; -other(B, {A, B}) -> A. diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_tcp_proxy_manager.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_tcp_proxy_manager.erl deleted file mode 100644 index a79ea9f..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/src/inet_tcp_proxy_manager.erl +++ /dev/null @@ -1,107 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(inet_tcp_proxy_manager). - -%% The TCP proxies need to decide whether to block based on the node -%% they're running on, and the node connecting to them. The trouble -%% is, they don't have an easy way to determine the latter. Therefore -%% when A connects to B we register the source port used by A here, so -%% that B can later look it up and find out who A is without having to -%% sniff the distribution protocol. -%% -%% That does unfortunately mean that we need a central control -%% thing. We assume here it's running on the node called -%% 'standalone_test' since that's where tests are orchestrated from. -%% -%% Yes, this leaks. For its intended lifecycle, that's fine. - --behaviour(gen_server). - --export([start_link/0, register/5, lookup/1]). - --export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, - code_change/3]). - --define(NODE, standalone_test). - --record(state, {ports, pending}). - -start_link() -> - Node = node(), - Node = controller_node(), - gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). - -register(_From, _To, _SrcPort, Port, Port) -> - %% No proxy, don't register - ok; -register(From, To, SrcPort, _Port, _ProxyPort) -> - gen_server:call(name(), {register, From, To, SrcPort}, infinity). - -lookup(SrcPort) -> - gen_server:call(name(), {lookup, SrcPort}, infinity). - -controller_node() -> - rabbit_nodes:make(atom_to_list(?NODE)). - -name() -> - {?MODULE, controller_node()}. - -%%---------------------------------------------------------------------------- - -init([]) -> - net_kernel:monitor_nodes(true), - {ok, #state{ports = dict:new(), - pending = []}}. - -handle_call({register, FromNode, ToNode, SrcPort}, _From, - State = #state{ports = Ports, - pending = Pending}) -> - {Notify, Pending2} = - lists:partition(fun ({P, _}) -> P =:= SrcPort end, Pending), - [gen_server:reply(From, {ok, FromNode, ToNode}) || {_, From} <- Notify], - {reply, ok, - State#state{ports = dict:store(SrcPort, {FromNode, ToNode}, Ports), - pending = Pending2}}; - -handle_call({lookup, SrcPort}, From, - State = #state{ports = Ports, pending = Pending}) -> - case dict:find(SrcPort, Ports) of - {ok, {FromNode, ToNode}} -> - {reply, {ok, FromNode, ToNode}, State}; - error -> - {noreply, State#state{pending = [{SrcPort, From} | Pending]}} - end; - -handle_call(_Req, _From, State) -> - {reply, unknown_request, State}. - -handle_cast(_C, State) -> - {noreply, State}. - -handle_info({nodedown, Node}, State = #state{ports = Ports}) -> - Ports1 = dict:filter( - fun (_, {From, To}) -> - Node =/= From andalso Node =/= To - end, Ports), - {noreply, State#state{ports = Ports1}}; - -handle_info(_I, State) -> - {noreply, State}. - -terminate(_Reason, _State) -> - ok. - -code_change(_, State, _) -> {ok, State}. diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_ha_test_consumer.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_ha_test_consumer.erl deleted file mode 100644 index f11d8d4..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_ha_test_consumer.erl +++ /dev/null @@ -1,114 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(rabbit_ha_test_consumer). - --include_lib("amqp_client/include/amqp_client.hrl"). - --export([await_response/1, create/5, start/6]). - -await_response(ConsumerPid) -> - case receive {ConsumerPid, Response} -> Response end of - {error, Reason} -> erlang:error(Reason); - ok -> ok - end. - -create(Channel, Queue, TestPid, CancelOnFailover, ExpectingMsgs) -> - ConsumerPid = spawn_link(?MODULE, start, - [TestPid, Channel, Queue, CancelOnFailover, - ExpectingMsgs + 1, ExpectingMsgs]), - amqp_channel:subscribe( - Channel, consume_method(Queue, CancelOnFailover), ConsumerPid), - ConsumerPid. - -start(TestPid, Channel, Queue, CancelOnFailover, LowestSeen, MsgsToConsume) -> - error_logger:info_msg("consumer ~p on ~p awaiting ~w messages " - "(lowest seen = ~w, cancel-on-failover = ~w)~n", - [self(), Channel, MsgsToConsume, LowestSeen, - CancelOnFailover]), - run(TestPid, Channel, Queue, CancelOnFailover, LowestSeen, MsgsToConsume). - -run(TestPid, _Channel, _Queue, _CancelOnFailover, _LowestSeen, 0) -> - consumer_reply(TestPid, ok); -run(TestPid, Channel, Queue, CancelOnFailover, LowestSeen, MsgsToConsume) -> - receive - #'basic.consume_ok'{} -> - run(TestPid, Channel, Queue, - CancelOnFailover, LowestSeen, MsgsToConsume); - {Delivery = #'basic.deliver'{ redelivered = Redelivered }, - #amqp_msg{payload = Payload}} -> - MsgNum = list_to_integer(binary_to_list(Payload)), - - ack(Delivery, Channel), - - %% we can receive any message we've already seen and, - %% because of the possibility of multiple requeuings, we - %% might see these messages in any order. If we are seeing - %% a message again, we don't decrement the MsgsToConsume - %% counter. - if - MsgNum + 1 == LowestSeen -> - run(TestPid, Channel, Queue, - CancelOnFailover, MsgNum, MsgsToConsume - 1); - MsgNum >= LowestSeen -> - error_logger:info_msg( - "consumer ~p on ~p ignoring redeliverd msg ~p~n", - [self(), Channel, MsgNum]), - true = Redelivered, %% ASSERTION - run(TestPid, Channel, Queue, - CancelOnFailover, LowestSeen, MsgsToConsume); - true -> - %% We received a message we haven't seen before, - %% but it is not the next message in the expected - %% sequence. - consumer_reply(TestPid, - {error, {unexpected_message, MsgNum}}) - end; - #'basic.cancel'{} when CancelOnFailover -> - error_logger:info_msg("consumer ~p on ~p received basic.cancel: " - "resubscribing to ~p on ~p~n", - [self(), Channel, Queue, Channel]), - resubscribe(TestPid, Channel, Queue, CancelOnFailover, - LowestSeen, MsgsToConsume); - #'basic.cancel'{} -> - exit(cancel_received_without_cancel_on_failover) - end. - -%% -%% Private API -%% - -resubscribe(TestPid, Channel, Queue, CancelOnFailover, LowestSeen, - MsgsToConsume) -> - amqp_channel:subscribe( - Channel, consume_method(Queue, CancelOnFailover), self()), - ok = receive #'basic.consume_ok'{} -> ok - end, - error_logger:info_msg("re-subscripting consumer ~p on ~p complete " - "(received basic.consume_ok)", - [self(), Channel]), - start(TestPid, Channel, Queue, CancelOnFailover, LowestSeen, MsgsToConsume). - -consume_method(Queue, CancelOnFailover) -> - Args = [{<<"x-cancel-on-ha-failover">>, bool, CancelOnFailover}], - #'basic.consume'{queue = Queue, - arguments = Args}. - -ack(#'basic.deliver'{delivery_tag = DeliveryTag}, Channel) -> - amqp_channel:call(Channel, #'basic.ack'{delivery_tag = DeliveryTag}), - ok. - -consumer_reply(TestPid, Reply) -> - TestPid ! {self(), Reply}. diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_ha_test_producer.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_ha_test_producer.erl deleted file mode 100644 index f3070fe..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_ha_test_producer.erl +++ /dev/null @@ -1,119 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(rabbit_ha_test_producer). - --export([await_response/1, start/5, create/5]). - --include_lib("amqp_client/include/amqp_client.hrl"). - -await_response(ProducerPid) -> - error_logger:info_msg("waiting for producer pid ~p~n", [ProducerPid]), - case receive {ProducerPid, Response} -> Response end of - ok -> ok; - {error, _} = Else -> exit(Else); - Else -> exit({weird_response, Else}) - end. - -create(Channel, Queue, TestPid, Confirm, MsgsToSend) -> - ProducerPid = spawn_link(?MODULE, start, [Channel, Queue, TestPid, - Confirm, MsgsToSend]), - receive - {ProducerPid, started} -> ProducerPid - end. - -start(Channel, Queue, TestPid, Confirm, MsgsToSend) -> - ConfirmState = - case Confirm of - true -> amqp_channel:register_confirm_handler(Channel, self()), - #'confirm.select_ok'{} = - amqp_channel:call(Channel, #'confirm.select'{}), - gb_trees:empty(); - false -> none - end, - TestPid ! {self(), started}, - error_logger:info_msg("publishing ~w msgs on ~p~n", [MsgsToSend, Channel]), - producer(Channel, Queue, TestPid, ConfirmState, MsgsToSend). - -%% -%% Private API -%% - -producer(_Channel, _Queue, TestPid, none, 0) -> - TestPid ! {self(), ok}; -producer(Channel, _Queue, TestPid, ConfirmState, 0) -> - error_logger:info_msg("awaiting confirms on channel ~p~n", [Channel]), - Msg = case drain_confirms(no_nacks, ConfirmState) of - no_nacks -> ok; - nacks -> {error, received_nacks}; - {Nacks, CS} -> {error, {missing_confirms, Nacks, - lists:sort(gb_trees:keys(CS))}} - end, - TestPid ! {self(), Msg}; - -producer(Channel, Queue, TestPid, ConfirmState, MsgsToSend) -> - Method = #'basic.publish'{exchange = <<"">>, - routing_key = Queue, - mandatory = false, - immediate = false}, - - ConfirmState1 = maybe_record_confirm(ConfirmState, Channel, MsgsToSend), - - amqp_channel:call(Channel, Method, - #amqp_msg{props = #'P_basic'{delivery_mode = 2}, - payload = list_to_binary( - integer_to_list(MsgsToSend))}), - - producer(Channel, Queue, TestPid, ConfirmState1, MsgsToSend - 1). - -maybe_record_confirm(none, _, _) -> - none; -maybe_record_confirm(ConfirmState, Channel, MsgsToSend) -> - SeqNo = amqp_channel:next_publish_seqno(Channel), - gb_trees:insert(SeqNo, MsgsToSend, ConfirmState). - -drain_confirms(Nacks, ConfirmState) -> - case gb_trees:is_empty(ConfirmState) of - true -> Nacks; - false -> receive - #'basic.ack'{delivery_tag = DeliveryTag, - multiple = IsMulti} -> - drain_confirms(Nacks, - delete_confirms(DeliveryTag, IsMulti, - ConfirmState)); - #'basic.nack'{delivery_tag = DeliveryTag, - multiple = IsMulti} -> - drain_confirms(nacks, - delete_confirms(DeliveryTag, IsMulti, - ConfirmState)) - after - 60000 -> {Nacks, ConfirmState} - end - end. - -delete_confirms(DeliveryTag, false, ConfirmState) -> - gb_trees:delete(DeliveryTag, ConfirmState); -delete_confirms(DeliveryTag, true, ConfirmState) -> - multi_confirm(DeliveryTag, ConfirmState). - -multi_confirm(DeliveryTag, ConfirmState) -> - case gb_trees:is_empty(ConfirmState) of - true -> ConfirmState; - false -> {Key, _, ConfirmState1} = gb_trees:take_smallest(ConfirmState), - case Key =< DeliveryTag of - true -> multi_confirm(DeliveryTag, ConfirmState1); - false -> ConfirmState - end - end. diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_configs.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_configs.erl deleted file mode 100644 index f286733..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_configs.erl +++ /dev/null @@ -1,279 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(rabbit_test_configs). - --include_lib("amqp_client/include/amqp_client.hrl"). - --export([enable_plugins/1]). --export([cluster/2, cluster_ab/1, cluster_abc/1, start_ab/1, start_abc/1]). --export([start_connections/1, build_cluster/1]). --export([ha_policy_all/1, ha_policy_two_pos/1]). --export([start_nodes/2, start_nodes/3, add_to_cluster/2, - rabbitmqctl/2, rabbitmqctl_fail/2]). --export([stop_nodes/1, start_node/1, stop_node/1, kill_node/1, restart_node/1, - start_node_fail/1, execute/1]). --export([cover_work_factor/2]). - --import(rabbit_test_util, [set_ha_policy/3, set_ha_policy/4, a2b/1]). --import(rabbit_misc, [pget/2, pget/3]). - --define(INITIAL_KEYS, [cover, base, server, plugins]). --define(NON_RUNNING_KEYS, ?INITIAL_KEYS ++ [nodename, port, mnesia_dir]). - -cluster_ab(InitialCfg) -> cluster(InitialCfg, [a, b]). -cluster_abc(InitialCfg) -> cluster(InitialCfg, [a, b, c]). -start_ab(InitialCfg) -> start_nodes(InitialCfg, [a, b]). -start_abc(InitialCfg) -> start_nodes(InitialCfg, [a, b, c]). - -cluster(InitialCfg, NodeNames) -> - start_connections(build_cluster(start_nodes(InitialCfg, NodeNames))). - -start_nodes(InitialCfg, NodeNames) -> - start_nodes(InitialCfg, NodeNames, 5672). - -start_nodes(InitialCfg0, NodeNames, FirstPort) -> - {ok, Already0} = net_adm:names(), - Already = [list_to_atom(N) || {N, _P} <- Already0], - [check_node_not_running(Node, Already) || Node <- NodeNames], - Ports = lists:seq(FirstPort, length(NodeNames) + FirstPort - 1), - InitialCfgs = case InitialCfg0 of - [{_, _}|_] -> [InitialCfg0 || _ <- NodeNames]; - _ -> InitialCfg0 - end, - Nodes = [[{nodename, N}, {port, P}, - {mnesia_dir, rabbit_misc:format("rabbitmq-~s-mnesia", [N])} | - strip_non_initial(Cfg)] - || {N, P, Cfg} <- lists:zip3(NodeNames, Ports, InitialCfgs)], - [start_node(Node) || Node <- Nodes]. - -check_node_not_running(Node, Already) -> - case lists:member(Node, Already) of - true -> exit({node_already_running, Node}); - false -> ok - end. - -strip_non_initial(Cfg) -> - [{K, V} || {K, V} <- Cfg, lists:member(K, ?INITIAL_KEYS)]. - -strip_running(Cfg) -> - [{K, V} || {K, V} <- Cfg, lists:member(K, ?NON_RUNNING_KEYS)]. - -enable_plugins(Cfg) -> - enable_plugins(pget(plugins, Cfg), pget(server, Cfg), Cfg). - -enable_plugins(none, _Server, _Cfg) -> ok; -enable_plugins(_Dir, Server, Cfg) -> - R = execute(Cfg, Server ++ "/scripts/rabbitmq-plugins list -m"), - Plugins = string:join(string:tokens(R, "\n"), " "), - execute(Cfg, {Server ++ "/scripts/rabbitmq-plugins set --offline ~s", - [Plugins]}), - ok. - -start_node(Cfg0) -> - Node = rabbit_nodes:make(pget(nodename, Cfg0)), - Cfg = [{node, Node} | Cfg0], - Server = pget(server, Cfg), - Linked = execute_bg(Cfg, Server ++ "/scripts/rabbitmq-server"), - rabbitmqctl(Cfg, {"wait ~s", [pid_file(Cfg)]}), - OSPid = rpc:call(Node, os, getpid, []), - %% The cover system thinks all nodes with the same name are the - %% same node and will automaticaly re-establish cover as soon as - %% we see them, so we only want to start cover once per node name - %% for the entire test run. - case {pget(cover, Cfg), lists:member(Node, cover:which_nodes())} of - {true, false} -> cover:start([Node]); - _ -> ok - end, - [{os_pid, OSPid}, - {linked_pid, Linked} | Cfg]. - -start_node_fail(Cfg0) -> - Node = rabbit_nodes:make(pget(nodename, Cfg0)), - Cfg = [{node, Node}, {acceptable_exit_codes, lists:seq(1, 255)} | Cfg0], - Server = pget(server, Cfg), - execute(Cfg, Server ++ "/scripts/rabbitmq-server"), - ok. - -build_cluster([First | Rest]) -> - add_to_cluster([First], Rest). - -add_to_cluster([First | _] = Existing, New) -> - [cluster_with(First, Node) || Node <- New], - Existing ++ New. - -cluster_with(Cfg, NewCfg) -> - Node = pget(node, Cfg), - rabbitmqctl(NewCfg, stop_app), - rabbitmqctl(NewCfg, {"join_cluster ~s", [Node]}), - rabbitmqctl(NewCfg, start_app). - -rabbitmqctl(Cfg, Str) -> - Node = pget(node, Cfg), - Server = pget(server, Cfg), - Cmd = case Node of - undefined -> {"~s", [fmt(Str)]}; - _ -> {"-n ~s ~s", [Node, fmt(Str)]} - end, - execute(Cfg, {Server ++ "/scripts/rabbitmqctl ~s", [fmt(Cmd)]}). - -rabbitmqctl_fail(Cfg, Str) -> - rabbitmqctl([{acceptable_exit_codes, lists:seq(1, 255)} | Cfg], Str). - -ha_policy_all([Cfg | _] = Cfgs) -> - set_ha_policy(Cfg, <<".*">>, <<"all">>), - Cfgs. - -ha_policy_two_pos([Cfg | _] = Cfgs) -> - Members = [a2b(pget(node, C)) || C <- Cfgs], - TwoNodes = [M || M <- lists:sublist(Members, 2)], - set_ha_policy(Cfg, <<"^ha.two.">>, {<<"nodes">>, TwoNodes}, - [{<<"ha-promote-on-shutdown">>, <<"always">>}]), - set_ha_policy(Cfg, <<"^ha.auto.">>, {<<"nodes">>, TwoNodes}, - [{<<"ha-sync-mode">>, <<"automatic">>}, - {<<"ha-promote-on-shutdown">>, <<"always">>}]), - Cfgs. - -start_connections(Nodes) -> [start_connection(Node) || Node <- Nodes]. - -start_connection(Cfg) -> - Port = pget(port, Cfg), - {ok, Conn} = amqp_connection:start(#amqp_params_network{port = Port}), - {ok, Ch} = amqp_connection:open_channel(Conn), - [{connection, Conn}, {channel, Ch} | Cfg]. - -stop_nodes(Nodes) -> [stop_node(Node) || Node <- Nodes]. - -stop_node(Cfg) -> - maybe_flush_cover(Cfg), - catch rabbitmqctl(Cfg, {"stop ~s", [pid_file(Cfg)]}), - strip_running(Cfg). - -kill_node(Cfg) -> - maybe_flush_cover(Cfg), - OSPid = pget(os_pid, Cfg), - catch execute(Cfg, {"kill -9 ~s", [OSPid]}), - await_os_pid_death(OSPid), - strip_running(Cfg). - -await_os_pid_death(OSPid) -> - case rabbit_misc:is_os_process_alive(OSPid) of - true -> timer:sleep(100), - await_os_pid_death(OSPid); - false -> ok - end. - -restart_node(Cfg) -> - start_node(stop_node(Cfg)). - -maybe_flush_cover(Cfg) -> - case pget(cover, Cfg) of - true -> cover:flush(pget(node, Cfg)); - false -> ok - end. - -%% Cover slows things down enough that if we are sending messages in -%% bulk, we want to send fewer or we'll be here all day... -cover_work_factor(Without, Cfg) -> - case pget(cover, Cfg) of - true -> trunc(Without * 0.1); - false -> Without - end. - -%%---------------------------------------------------------------------------- - -execute(Cmd) -> - execute([], Cmd, [0]). - -execute(Cfg, Cmd) -> - %% code 137 -> killed with SIGKILL which we do in some tests - execute(environment(Cfg), Cmd, pget(acceptable_exit_codes, Cfg, [0, 137])). - -execute(Env0, Cmd0, AcceptableExitCodes) -> - Env = [{"RABBITMQ_" ++ K, fmt(V)} || {K, V} <- Env0], - Cmd = fmt(Cmd0), - error_logger:info_msg("Invoking '~s'~n", [Cmd]), - Port = erlang:open_port( - {spawn, "/usr/bin/env sh -c \"" ++ Cmd ++ "\""}, - [{env, Env}, exit_status, - stderr_to_stdout, use_stdio]), - port_receive_loop(Port, "", AcceptableExitCodes). - -environment(Cfg) -> - Nodename = pget(nodename, Cfg), - Plugins = pget(plugins, Cfg), - case Nodename of - undefined -> - plugins_env(Plugins); - _ -> - Port = pget(port, Cfg), - Base = pget(base, Cfg), - Server = pget(server, Cfg), - [{"MNESIA_DIR", {"~s/~s", [Base, pget(mnesia_dir, Cfg)]}}, - {"PLUGINS_EXPAND_DIR", {"~s/~s-plugins-expand", [Base, Nodename]}}, - {"LOG_BASE", {"~s", [Base]}}, - {"NODENAME", {"~s", [Nodename]}}, - {"NODE_PORT", {"~B", [Port]}}, - {"PID_FILE", pid_file(Cfg)}, - {"CONFIG_FILE", "/some/path/which/does/not/exist"}, - {"ALLOW_INPUT", "1"}, %% Needed to make it close on exit - %% Bit of a hack - only needed for mgmt tests. - {"SERVER_START_ARGS", - {"-rabbitmq_management listener [{port,1~B}]", [Port]}}, - {"SERVER_ERL_ARGS", - %% Next two lines are defaults - {"+K true +A30 +P 1048576 " - "-kernel inet_default_connect_options [{nodelay,true}] " - %% Some tests need to be able to make distribution unhappy - "-pa ~s/../rabbitmq-test/ebin " - "-proto_dist inet_proxy", [Server]}} - | plugins_env(Plugins)] - end. - -plugins_env(none) -> - [{"ENABLED_PLUGINS_FILE", "/does-not-exist"}]; -plugins_env(Dir) -> - [{"PLUGINS_DIR", {"~s/plugins", [Dir]}}, - {"PLUGINS_EXPAND_DIR", {"~s/expand", [Dir]}}, - {"ENABLED_PLUGINS_FILE", {"~s/enabled_plugins", [Dir]}}]. - -pid_file(Cfg) -> - rabbit_misc:format("~s/~s.pid", [pget(base, Cfg), pget(nodename, Cfg)]). - -port_receive_loop(Port, Stdout, AcceptableExitCodes) -> - receive - {Port, {exit_status, X}} -> - Fmt = "Command exited with code ~p~nStdout: ~s~n", - Args = [X, Stdout], - case lists:member(X, AcceptableExitCodes) of - true -> error_logger:info_msg(Fmt, Args), - Stdout; - false -> error_logger:error_msg(Fmt, Args), - exit({exit_status, X, AcceptableExitCodes, Stdout}) - end; - {Port, {data, Out}} -> - port_receive_loop(Port, Stdout ++ Out, AcceptableExitCodes) - end. - -execute_bg(Cfg, Cmd) -> - spawn_link(fun () -> - execute(Cfg, Cmd), - {links, Links} = process_info(self(), links), - [unlink(L) || L <- Links] - end). - -fmt({Fmt, Args}) -> rabbit_misc:format(Fmt, Args); -fmt(Str) -> Str. - diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_runner.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_runner.erl deleted file mode 100644 index d0df292..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_runner.erl +++ /dev/null @@ -1,230 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License at -%% http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the -%% License for the specific language governing rights and limitations -%% under the License. -%% -%% The Original Code is RabbitMQ -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2010-2014 GoPivotal, Inc. All rights reserved. -%% - --module(rabbit_test_runner). - --include_lib("kernel/include/file.hrl"). - --define(TIMEOUT, 600). - --import(rabbit_misc, [pget/2]). - --export([run_in_broker/2, run_multi/5]). - -run_in_broker(Dir, Filter) -> - add_server_test_ebin_dir(), - io:format("~nIn-broker tests~n================~n~n", []), - eunit:test(make_tests_single(Dir, Filter, ?TIMEOUT), []). - -run_multi(ServerDir, Dir, Filter, Cover, PluginsDir) -> - io:format("~nMulti-node tests~n================~n~n", []), - %% Umbrella does not give us -sname - net_kernel:start([?MODULE, shortnames]), - inets:start(), %% Used by HTTP tests - error_logger:tty(false), - case Cover of - true -> io:format("Cover compiling..."), - cover:start(), - ok = rabbit_misc:enable_cover(["../rabbitmq-server/"]), - io:format(" done.~n~n"); - false -> ok - end, - R = eunit:test(make_tests_multi( - ServerDir, Dir, Filter, Cover, PluginsDir, ?TIMEOUT), []), - case Cover of - true -> io:format("~nCover reporting..."), - ok = rabbit_misc:report_cover(), - io:format(" done.~n~n"); - false -> ok - end, - R. - -make_tests_single(Dir, Filter, Timeout) -> - {Filtered, AllCount, Width} = find_tests(Dir, Filter, "_test"), - io:format("Running ~B of ~B tests; FILTER=~s~n~n", - [length(Filtered), AllCount, Filter]), - [make_test_single(M, FWith, F, ShowHeading, Timeout, Width) - || {M, FWith, F, ShowHeading} <- annotate_show_heading(Filtered)]. - -make_tests_multi(ServerDir, Dir, Filter, Cover, PluginsDir, Timeout) -> - {Filtered, AllCount, Width} = find_tests(Dir, Filter, "_with"), - io:format("Running ~B of ~B tests; FILTER=~s; COVER=~s~n~n", - [length(Filtered), AllCount, Filter, Cover]), - Cfg = [{cover, Cover}, - {base, basedir() ++ "/nodes"}, - {server, ServerDir}, - {plugins, PluginsDir}], - rabbit_test_configs:enable_plugins(Cfg), - [make_test_multi(M, FWith, F, ShowHeading, Timeout, Width, Cfg) - || {M, FWith, F, ShowHeading} <- annotate_show_heading(Filtered)]. - -find_tests(Dir, Filter, Suffix) -> - All = [{M, FWith, F} || - M <- modules(Dir), - {FWith, _Arity} <- proplists:get_value(exports, M:module_info()), - string:right(atom_to_list(FWith), length(Suffix)) =:= Suffix, - F <- [truncate_function_name(FWith, length(Suffix))]], - Filtered = [Test || {M, _FWith, F} = Test <- All, - should_run(M, F, Filter)], - Width = case Filtered of - [] -> 0; - _ -> lists:max([atom_length(F) || {_, _, F} <- Filtered]) - end, - {Filtered, length(All), Width}. - -make_test_single(M, FWith, F, ShowHeading, Timeout, Width) -> - {timeout, - Timeout, - fun () -> - maybe_print_heading(M, ShowHeading), - io:format(user, "~s [running]", [name(F, Width)]), - M:FWith(), - io:format(user, " [PASSED].~n", []) - end}. - -make_test_multi(M, FWith, F, ShowHeading, Timeout, Width, InitialCfg) -> - {setup, - fun () -> - maybe_print_heading(M, ShowHeading), - io:format(user, "~s [setup]", [name(F, Width)]), - setup_error_logger(M, F, basedir()), - recursive_delete(pget(base, InitialCfg)), - try - apply_config(M:FWith(), InitialCfg) - catch - error:{Type, Error, Cfg, Stack} -> - case Cfg of - InitialCfg -> ok; %% [0] - _ -> rabbit_test_configs:stop_nodes(Cfg) - end, - exit({Type, Error, Stack}) - end - end, - fun (Nodes) -> - rabbit_test_configs:stop_nodes(Nodes), - %% Partition tests change this, let's revert - net_kernel:set_net_ticktime(60, 1), - io:format(user, ".~n", []) - end, - fun (Nodes) -> - [{timeout, - Timeout, - fun () -> - [link(pget(linked_pid, N)) || N <- Nodes], - io:format(user, " [running]", []), - %%try - M:F(Nodes), - io:format(user, " [PASSED]", []) - %% catch - %% Type:Reason -> - %% io:format(user, "YYY stop~n", []), - %% rabbit_test_configs:stop_nodes(Nodes), - %% exit({Type, Reason, erlang:get_stacktrace()}) - %% end - end}] - end}. -%% [0] If we didn't get as far as starting any nodes then we only have -%% one proplist for initial config, not several per node. So avoid -%% trying to "stop" it - it won't work (and there's nothing to do -%% anyway). - -maybe_print_heading(M, true) -> - io:format(user, "~n~s~n~s~n", [M, string:chars($-, atom_length(M))]); -maybe_print_heading(_M, false) -> - ok. - -apply_config(Things, Cfg) when is_list(Things) -> - lists:foldl(fun apply_config/2, Cfg, Things); -apply_config(F, Cfg) when is_atom(F) -> - apply_config(fun (C) -> rabbit_test_configs:F(C) end, Cfg); -apply_config(F, Cfg) when is_function(F) -> - try - F(Cfg) - catch - Type:Error -> erlang:error({Type, Error, Cfg, erlang:get_stacktrace()}) - end. - -annotate_show_heading(List) -> - annotate_show_heading(List, undefined). - -annotate_show_heading([], _) -> - []; -annotate_show_heading([{M, FWith, F} | Rest], Current) -> - [{M, FWith, F, M =/= Current} | annotate_show_heading(Rest, M)]. - -setup_error_logger(M, F, Base) -> - case error_logger_logfile_filename() of - {error, no_log_file} -> ok; - _ -> ok = error_logger:logfile(close) - end, - FN = rabbit_misc:format("~s/~s:~s.log", [basedir(), M, F]), - ensure_dir(Base), - ok = error_logger:logfile({open, FN}). - -truncate_function_name(FWith, Length) -> - FName = atom_to_list(FWith), - list_to_atom(string:substr(FName, 1, length(FName) - Length)). - -should_run(_M, _F, "all") -> true; -should_run(M, F, Filter) -> MF = rabbit_misc:format("~s:~s", [M, F]), - case re:run(MF, Filter) of - {match, _} -> true; - nomatch -> false - end. - -ensure_dir(Path) -> - case file:read_file_info(Path) of - {ok, #file_info{type=regular}} -> exit({exists_as_file, Path}); - {ok, #file_info{type=directory}} -> ok; - _ -> file:make_dir(Path) - end. - -modules(RelDir) -> - {ok, Files} = file:list_dir(RelDir), - [M || F <- Files, - M <- case string:tokens(F, ".") of - [MStr, "beam"] -> [list_to_atom(MStr)]; - _ -> [] - end]. - -recursive_delete(Dir) -> - rabbit_test_configs:execute({"rm -rf ~s", [Dir]}). - -name(F, Width) -> - R = atom_to_list(F), - R ++ ":" ++ string:chars($ , Width - length(R)). - -atom_length(A) -> length(atom_to_list(A)). - -basedir() -> "/tmp/rabbitmq-multi-node". - -%% reimplement error_logger:logfile(filename) only using -%% gen_event:call/4 instead of gen_event:call/3 with our old friend -%% the 5 second timeout. Grr. -error_logger_logfile_filename() -> - case gen_event:call( - error_logger, error_logger_file_h, filename, infinity) of - {error,_} -> {error, no_log_file}; - Val -> Val - end. - -add_server_test_ebin_dir() -> - %% Some tests need modules from this dir, but it's not on the path - %% by default. - {file, Path} = code:is_loaded(rabbit), - Ebin = filename:dirname(Path), - TestEbin = filename:join([Ebin, "..", "test", "ebin"]), - code:add_path(TestEbin). diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_util.erl b/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_util.erl deleted file mode 100644 index 973e1b0..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbit_test_util.erl +++ /dev/null @@ -1,147 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(rabbit_test_util). - --include_lib("amqp_client/include/amqp_client.hrl"). --import(rabbit_misc, [pget/2]). - --compile(export_all). - -set_ha_policy(Cfg, Pattern, Policy) -> - set_ha_policy(Cfg, Pattern, Policy, []). - -set_ha_policy(Cfg, Pattern, Policy, Extra) -> - set_policy(Cfg, Pattern, Pattern, <<"queues">>, ha_policy(Policy) ++ Extra). - -ha_policy(<<"all">>) -> [{<<"ha-mode">>, <<"all">>}]; -ha_policy({Mode, Params}) -> [{<<"ha-mode">>, Mode}, - {<<"ha-params">>, Params}]. - -set_policy(Cfg, Name, Pattern, ApplyTo, Definition) -> - ok = rpc:call(pget(node, Cfg), rabbit_policy, set, - [<<"/">>, Name, Pattern, Definition, 0, ApplyTo]). - -clear_policy(Cfg, Name) -> - ok = rpc:call(pget(node, Cfg), rabbit_policy, delete, [<<"/">>, Name]). - -set_param(Cfg, Component, Name, Value) -> - ok = rpc:call(pget(node, Cfg), rabbit_runtime_parameters, set, - [<<"/">>, Component, Name, Value, none]). - -clear_param(Cfg, Component, Name) -> - ok = rpc:call(pget(node, Cfg), rabbit_runtime_parameters, clear, - [<<"/">>, Component, Name]). - -enable_plugin(Cfg, Plugin) -> - plugins_action(enable, Cfg, [Plugin], []). - -disable_plugin(Cfg, Plugin) -> - plugins_action(disable, Cfg, [Plugin], []). - -control_action(Command, Cfg) -> - control_action(Command, Cfg, [], []). - -control_action(Command, Cfg, Args) -> - control_action(Command, Cfg, Args, []). - -control_action(Command, Cfg, Args, Opts) -> - Node = pget(node, Cfg), - rpc:call(Node, rabbit_control_main, action, - [Command, Node, Args, Opts, - fun (F, A) -> - error_logger:info_msg(F ++ "~n", A) - end]). - -plugins_action(Command, Cfg, Args, Opts) -> - PluginsFile = os:getenv("RABBITMQ_ENABLED_PLUGINS_FILE"), - PluginsDir = os:getenv("RABBITMQ_PLUGINS_DIR"), - Node = pget(node, Cfg), - rpc:call(Node, rabbit_plugins_main, action, - [Command, Node, Args, Opts, PluginsFile, PluginsDir]). - -restart_app(Cfg) -> - stop_app(Cfg), - start_app(Cfg). - -stop_app(Cfg) -> - control_action(stop_app, Cfg). - -start_app(Cfg) -> - control_action(start_app, Cfg). - -connect(Cfg) -> - Port = pget(port, Cfg), - {ok, Conn} = amqp_connection:start(#amqp_params_network{port = Port}), - {ok, Ch} = amqp_connection:open_channel(Conn), - {Conn, Ch}. - -%%---------------------------------------------------------------------------- - -kill_after(Time, Cfg, Method) -> - timer:sleep(Time), - kill(Cfg, Method). - -kill(Cfg, Method) -> - kill0(Cfg, Method), - wait_down(pget(node, Cfg)). - -kill0(Cfg, stop) -> rabbit_test_configs:stop_node(Cfg); -kill0(Cfg, sigkill) -> rabbit_test_configs:kill_node(Cfg). - -wait_down(Node) -> - case net_adm:ping(Node) of - pong -> timer:sleep(25), - wait_down(Node); - pang -> ok - end. - -a2b(A) -> list_to_binary(atom_to_list(A)). - -%%---------------------------------------------------------------------------- - -publish(Ch, QName, Count) -> - amqp_channel:call(Ch, #'confirm.select'{}), - [amqp_channel:call(Ch, - #'basic.publish'{routing_key = QName}, - #amqp_msg{props = #'P_basic'{delivery_mode = 2}, - payload = list_to_binary(integer_to_list(I))}) - || I <- lists:seq(1, Count)], - amqp_channel:wait_for_confirms(Ch). - -consume(Ch, QName, Count) -> - amqp_channel:subscribe(Ch, #'basic.consume'{queue = QName, no_ack = true}, - self()), - CTag = receive #'basic.consume_ok'{consumer_tag = C} -> C end, - [begin - Exp = list_to_binary(integer_to_list(I)), - receive {#'basic.deliver'{consumer_tag = CTag}, - #amqp_msg{payload = Exp}} -> - ok - after 500 -> - exit(timeout) - end - end|| I <- lists:seq(1, Count)], - #'queue.declare_ok'{message_count = 0} - = amqp_channel:call(Ch, #'queue.declare'{queue = QName, - durable = true}), - amqp_channel:call(Ch, #'basic.cancel'{consumer_tag = CTag}), - ok. - -fetch(Ch, QName, Count) -> - [{#'basic.get_ok'{}, _} = - amqp_channel:call(Ch, #'basic.get'{queue = QName}) || - _ <- lists:seq(1, Count)], - ok. diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbitmq_test.app.src b/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbitmq_test.app.src deleted file mode 100644 index 108f874..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/src/rabbitmq_test.app.src +++ /dev/null @@ -1,11 +0,0 @@ -{application, rabbitmq_test, - [ - {description, ""}, - {vsn, "1"}, - {registered, []}, - {applications, [ - kernel, - stdlib - ]}, - {env, []} - ]}. diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/cluster_rename.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/cluster_rename.erl deleted file mode 100644 index 258c0dc..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/cluster_rename.erl +++ /dev/null @@ -1,194 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(cluster_rename). - --compile(export_all). --include_lib("eunit/include/eunit.hrl"). --include_lib("amqp_client/include/amqp_client.hrl"). - --import(rabbit_misc, [pget/2]). - --define(CLUSTER2, - fun(C) -> rabbit_test_configs:cluster(C, [bugs, bigwig]) end). - --define(CLUSTER3, - fun(C) -> rabbit_test_configs:cluster(C, [bugs, bigwig, peter]) end). - -%% Rolling rename of a cluster, each node should do a secondary rename. -rename_cluster_one_by_one_with() -> ?CLUSTER3. -rename_cluster_one_by_one([Bugs, Bigwig, Peter]) -> - publish_all([{Bugs, <<"1">>}, {Bigwig, <<"2">>}, {Peter, <<"3">>}]), - - Jessica = stop_rename_start(Bugs, jessica, [bugs, jessica]), - Hazel = stop_rename_start(Bigwig, hazel, [bigwig, hazel]), - Flopsy = stop_rename_start(Peter, flopsy, [peter, flopsy]), - - consume_all([{Jessica, <<"1">>}, {Hazel, <<"2">>}, {Flopsy, <<"3">>}]), - stop_all([Jessica, Hazel, Flopsy]), - ok. - -%% Big bang rename of a cluster, bugs should do a primary rename. -rename_cluster_big_bang_with() -> ?CLUSTER3. -rename_cluster_big_bang([Bugs, Bigwig, Peter]) -> - publish_all([{Bugs, <<"1">>}, {Bigwig, <<"2">>}, {Peter, <<"3">>}]), - - Peter1 = rabbit_test_configs:stop_node(Peter), - Bigwig1 = rabbit_test_configs:stop_node(Bigwig), - Bugs1 = rabbit_test_configs:stop_node(Bugs), - - Map = [bugs, jessica, bigwig, hazel, peter, flopsy], - Jessica0 = rename_node(Bugs1, jessica, Map), - Hazel0 = rename_node(Bigwig1, hazel, Map), - Flopsy0 = rename_node(Peter1, flopsy, Map), - - Jessica = rabbit_test_configs:start_node(Jessica0), - Hazel = rabbit_test_configs:start_node(Hazel0), - Flopsy = rabbit_test_configs:start_node(Flopsy0), - - consume_all([{Jessica, <<"1">>}, {Hazel, <<"2">>}, {Flopsy, <<"3">>}]), - stop_all([Jessica, Hazel, Flopsy]), - ok. - -%% Here we test that bugs copes with things being renamed around it. -partial_one_by_one_with() -> ?CLUSTER3. -partial_one_by_one([Bugs, Bigwig, Peter]) -> - publish_all([{Bugs, <<"1">>}, {Bigwig, <<"2">>}, {Peter, <<"3">>}]), - - Jessica = stop_rename_start(Bugs, jessica, [bugs, jessica]), - Hazel = stop_rename_start(Bigwig, hazel, [bigwig, hazel]), - - consume_all([{Jessica, <<"1">>}, {Hazel, <<"2">>}, {Peter, <<"3">>}]), - stop_all([Jessica, Hazel, Peter]), - ok. - -%% Here we test that bugs copes with things being renamed around it. -partial_big_bang_with() -> ?CLUSTER3. -partial_big_bang([Bugs, Bigwig, Peter]) -> - publish_all([{Bugs, <<"1">>}, {Bigwig, <<"2">>}, {Peter, <<"3">>}]), - - Peter1 = rabbit_test_configs:stop_node(Peter), - Bigwig1 = rabbit_test_configs:stop_node(Bigwig), - Bugs1 = rabbit_test_configs:stop_node(Bugs), - - Map = [bigwig, hazel, peter, flopsy], - Hazel0 = rename_node(Bigwig1, hazel, Map), - Flopsy0 = rename_node(Peter1, flopsy, Map), - - Bugs2 = rabbit_test_configs:start_node(Bugs1), - Hazel = rabbit_test_configs:start_node(Hazel0), - Flopsy = rabbit_test_configs:start_node(Flopsy0), - - consume_all([{Bugs2, <<"1">>}, {Hazel, <<"2">>}, {Flopsy, <<"3">>}]), - stop_all([Bugs2, Hazel, Flopsy]), - ok. - -%% We should be able to specify the -n parameter on ctl with either -%% the before or after name for the local node (since in real cases -%% one might want to invoke the command before or after the hostname -%% has changed) - usually we test before so here we test after. -post_change_nodename_with() -> ?CLUSTER2. -post_change_nodename([Bugs, _Bigwig]) -> - publish(Bugs, <<"bugs">>), - - Bugs1 = rabbit_test_configs:stop_node(Bugs), - Bugs2 = [{nodename, jessica} | proplists:delete(nodename, Bugs1)], - Jessica0 = rename_node(Bugs2, jessica, [bugs, jessica]), - Jessica = rabbit_test_configs:start_node(Jessica0), - - consume(Jessica, <<"bugs">>), - stop_all([Jessica]), - ok. - -%% If we invoke rename but the node name does not actually change, we -%% should roll back. -abortive_rename_with() -> ?CLUSTER2. -abortive_rename([Bugs, _Bigwig]) -> - publish(Bugs, <<"bugs">>), - - Bugs1 = rabbit_test_configs:stop_node(Bugs), - _Jessica = rename_node(Bugs1, jessica, [bugs, jessica]), - Bugs2 = rabbit_test_configs:start_node(Bugs1), - - consume(Bugs2, <<"bugs">>), - ok. - -%% And test some ways the command can fail. -rename_fail_with() -> ?CLUSTER2. -rename_fail([Bugs, _Bigwig]) -> - Bugs1 = rabbit_test_configs:stop_node(Bugs), - %% Rename from a node that does not exist - rename_node_fail(Bugs1, [bugzilla, jessica]), - %% Rename to a node which does - rename_node_fail(Bugs1, [bugs, bigwig]), - %% Rename two nodes to the same thing - rename_node_fail(Bugs1, [bugs, jessica, bigwig, jessica]), - %% Rename while impersonating a node not in the cluster - rename_node_fail(set_node(rabbit, Bugs1), [bugs, jessica]), - ok. - -rename_twice_fail_with() -> ?CLUSTER2. -rename_twice_fail([Bugs, _Bigwig]) -> - Bugs1 = rabbit_test_configs:stop_node(Bugs), - Indecisive = rename_node(Bugs1, indecisive, [bugs, indecisive]), - rename_node_fail(Indecisive, [indecisive, jessica]), - ok. - -%% ---------------------------------------------------------------------------- - -%% Normal post-test stop does not work since names have changed... -stop_all(Cfgs) -> - [rabbit_test_configs:stop_node(Cfg) || Cfg <- Cfgs]. - -stop_rename_start(Cfg, Nodename, Map) -> - rabbit_test_configs:start_node( - rename_node(rabbit_test_configs:stop_node(Cfg), Nodename, Map)). - -rename_node(Cfg, Nodename, Map) -> - rename_node(Cfg, Nodename, Map, fun rabbit_test_configs:rabbitmqctl/2). - -rename_node_fail(Cfg, Map) -> - rename_node(Cfg, ignored, Map, fun rabbit_test_configs:rabbitmqctl_fail/2). - -rename_node(Cfg, Nodename, Map, Ctl) -> - MapS = string:join( - [atom_to_list(rabbit_nodes:make(N)) || N <- Map], " "), - Ctl(Cfg, {"rename_cluster_node ~s", [MapS]}), - set_node(Nodename, Cfg). - -publish(Cfg, Q) -> - Ch = pget(channel, Cfg), - amqp_channel:call(Ch, #'confirm.select'{}), - amqp_channel:call(Ch, #'queue.declare'{queue = Q, durable = true}), - amqp_channel:cast(Ch, #'basic.publish'{routing_key = Q}, - #amqp_msg{props = #'P_basic'{delivery_mode = 2}, - payload = Q}), - amqp_channel:wait_for_confirms(Ch). - -consume(Cfg, Q) -> - {_Conn, Ch} = rabbit_test_util:connect(Cfg), - amqp_channel:call(Ch, #'queue.declare'{queue = Q, durable = true}), - {#'basic.get_ok'{}, #amqp_msg{payload = Q}} = - amqp_channel:call(Ch, #'basic.get'{queue = Q}). - - -publish_all(CfgsKeys) -> - [publish(Cfg, Key) || {Cfg, Key} <- CfgsKeys]. - -consume_all(CfgsKeys) -> - [consume(Cfg, Key) || {Cfg, Key} <- CfgsKeys]. - -set_node(Nodename, Cfg) -> - [{nodename, Nodename} | proplists:delete(nodename, Cfg)]. diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/clustering_management.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/clustering_management.erl deleted file mode 100644 index b114aab..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/clustering_management.erl +++ /dev/null @@ -1,608 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(clustering_management). - --compile(export_all). --include_lib("eunit/include/eunit.hrl"). --include_lib("amqp_client/include/amqp_client.hrl"). - --import(rabbit_misc, [pget/2]). - --define(LOOP_RECURSION_DELAY, 100). - -join_and_part_cluster_with() -> start_abc. -join_and_part_cluster(Config) -> - [Rabbit, Hare, Bunny] = cluster_members(Config), - assert_not_clustered(Rabbit), - assert_not_clustered(Hare), - assert_not_clustered(Bunny), - - stop_join_start(Rabbit, Bunny), - assert_clustered([Rabbit, Bunny]), - - stop_join_start(Hare, Bunny, true), - assert_cluster_status( - {[Bunny, Hare, Rabbit], [Bunny, Rabbit], [Bunny, Hare, Rabbit]}, - [Rabbit, Hare, Bunny]), - - %% Allow clustering with already clustered node - ok = stop_app(Rabbit), - {ok, already_member} = join_cluster(Rabbit, Hare), - ok = start_app(Rabbit), - - stop_reset_start(Rabbit), - assert_not_clustered(Rabbit), - assert_cluster_status({[Bunny, Hare], [Bunny], [Bunny, Hare]}, - [Hare, Bunny]), - - stop_reset_start(Hare), - assert_not_clustered(Hare), - assert_not_clustered(Bunny). - -join_cluster_bad_operations_with() -> start_abc. -join_cluster_bad_operations(Config) -> - [Rabbit, Hare, Bunny] = cluster_members(Config), - - %% Non-existant node - ok = stop_app(Rabbit), - assert_failure(fun () -> join_cluster(Rabbit, non@existant) end), - ok = start_app(Rabbit), - assert_not_clustered(Rabbit), - - %% Trying to cluster with mnesia running - assert_failure(fun () -> join_cluster(Rabbit, Bunny) end), - assert_not_clustered(Rabbit), - - %% Trying to cluster the node with itself - ok = stop_app(Rabbit), - assert_failure(fun () -> join_cluster(Rabbit, Rabbit) end), - ok = start_app(Rabbit), - assert_not_clustered(Rabbit), - - %% Do not let the node leave the cluster or reset if it's the only - %% ram node - stop_join_start(Hare, Rabbit, true), - assert_cluster_status({[Rabbit, Hare], [Rabbit], [Rabbit, Hare]}, - [Rabbit, Hare]), - ok = stop_app(Hare), - assert_failure(fun () -> join_cluster(Rabbit, Bunny) end), - assert_failure(fun () -> reset(Rabbit) end), - ok = start_app(Hare), - assert_cluster_status({[Rabbit, Hare], [Rabbit], [Rabbit, Hare]}, - [Rabbit, Hare]), - - %% Cannot start RAM-only node first - ok = stop_app(Rabbit), - ok = stop_app(Hare), - assert_failure(fun () -> start_app(Hare) end), - ok = start_app(Rabbit), - ok = start_app(Hare), - ok. - -%% This tests that the nodes in the cluster are notified immediately of a node -%% join, and not just after the app is started. -join_to_start_interval_with() -> start_abc. -join_to_start_interval(Config) -> - [Rabbit, Hare, _Bunny] = cluster_members(Config), - - ok = stop_app(Rabbit), - ok = join_cluster(Rabbit, Hare), - assert_cluster_status({[Rabbit, Hare], [Rabbit, Hare], [Hare]}, - [Rabbit, Hare]), - ok = start_app(Rabbit), - assert_clustered([Rabbit, Hare]). - -forget_cluster_node_with() -> start_abc. -forget_cluster_node([_, HareCfg, _] = Config) -> - [Rabbit, Hare, Bunny] = cluster_members(Config), - - %% Trying to remove a node not in the cluster should fail - assert_failure(fun () -> forget_cluster_node(Hare, Rabbit) end), - - stop_join_start(Rabbit, Hare), - assert_clustered([Rabbit, Hare]), - - %% Trying to remove an online node should fail - assert_failure(fun () -> forget_cluster_node(Hare, Rabbit) end), - - ok = stop_app(Rabbit), - %% We're passing the --offline flag, but Hare is online - assert_failure(fun () -> forget_cluster_node(Hare, Rabbit, true) end), - %% Removing some non-existant node will fail - assert_failure(fun () -> forget_cluster_node(Hare, non@existant) end), - ok = forget_cluster_node(Hare, Rabbit), - assert_not_clustered(Hare), - assert_cluster_status({[Rabbit, Hare], [Rabbit, Hare], [Hare]}, - [Rabbit]), - - %% Now we can't start Rabbit since it thinks that it's still in the cluster - %% with Hare, while Hare disagrees. - assert_failure(fun () -> start_app(Rabbit) end), - - ok = reset(Rabbit), - ok = start_app(Rabbit), - assert_not_clustered(Rabbit), - - %% Now we remove Rabbit from an offline node. - stop_join_start(Bunny, Hare), - stop_join_start(Rabbit, Hare), - assert_clustered([Rabbit, Hare, Bunny]), - ok = stop_app(Hare), - ok = stop_app(Rabbit), - ok = stop_app(Bunny), - %% This is fine but we need the flag - assert_failure(fun () -> forget_cluster_node(Hare, Bunny) end), - %% Also fails because hare node is still running - assert_failure(fun () -> forget_cluster_node(Hare, Bunny, true) end), - %% But this works - HareCfg2 = rabbit_test_configs:stop_node(HareCfg), - rabbit_test_configs:rabbitmqctl( - HareCfg2, {"forget_cluster_node --offline ~s", [Bunny]}), - _HareCfg3 = rabbit_test_configs:start_node(HareCfg2), - ok = start_app(Rabbit), - %% Bunny still thinks its clustered with Rabbit and Hare - assert_failure(fun () -> start_app(Bunny) end), - ok = reset(Bunny), - ok = start_app(Bunny), - assert_not_clustered(Bunny), - assert_clustered([Rabbit, Hare]). - -forget_removes_things_with() -> cluster_ab. -forget_removes_things(Cfg) -> - test_removes_things(Cfg, fun (R, H) -> ok = forget_cluster_node(H, R) end). - -reset_removes_things_with() -> cluster_ab. -reset_removes_things(Cfg) -> - test_removes_things(Cfg, fun (R, _H) -> ok = reset(R) end). - -test_removes_things([RabbitCfg, HareCfg] = Config, LoseRabbit) -> - Unmirrored = <<"unmirrored-queue">>, - [Rabbit, Hare] = cluster_members(Config), - RCh = pget(channel, RabbitCfg), - declare(RCh, Unmirrored), - ok = stop_app(Rabbit), - - {_HConn, HCh} = rabbit_test_util:connect(HareCfg), - {'EXIT',{{shutdown,{server_initiated_close,404,_}}, _}} = - (catch declare(HCh, Unmirrored)), - - ok = LoseRabbit(Rabbit, Hare), - {_HConn2, HCh2} = rabbit_test_util:connect(HareCfg), - declare(HCh2, Unmirrored), - ok. - -forget_offline_removes_things_with() -> cluster_ab. -forget_offline_removes_things([Rabbit, Hare]) -> - Unmirrored = <<"unmirrored-queue">>, - X = <<"X">>, - RCh = pget(channel, Rabbit), - declare(RCh, Unmirrored), - - amqp_channel:call(RCh, #'exchange.declare'{durable = true, - exchange = X, - auto_delete = true}), - amqp_channel:call(RCh, #'queue.bind'{queue = Unmirrored, - exchange = X}), - ok = stop_app(pget(node, Rabbit)), - - {_HConn, HCh} = rabbit_test_util:connect(Hare), - {'EXIT',{{shutdown,{server_initiated_close,404,_}}, _}} = - (catch declare(HCh, Unmirrored)), - - Hare2 = rabbit_test_configs:stop_node(Hare), - _Rabbit2 = rabbit_test_configs:stop_node(Rabbit), - rabbit_test_configs:rabbitmqctl( - Hare2, {"forget_cluster_node --offline ~s", [pget(node, Rabbit)]}), - Hare3 = rabbit_test_configs:start_node(Hare2), - - {_HConn2, HCh2} = rabbit_test_util:connect(Hare3), - declare(HCh2, Unmirrored), - {'EXIT',{{shutdown,{server_initiated_close,404,_}}, _}} = - (catch amqp_channel:call(HCh2,#'exchange.declare'{durable = true, - exchange = X, - auto_delete = true, - passive = true})), - ok. - -forget_promotes_offline_slave_with() -> - fun (Cfgs) -> - rabbit_test_configs:cluster(Cfgs, [a, b, c, d]) - end. - -forget_promotes_offline_slave([A, B, C, D]) -> - ACh = pget(channel, A), - ANode = pget(node, A), - Q = <<"mirrored-queue">>, - declare(ACh, Q), - set_ha_policy(Q, A, [B, C]), - set_ha_policy(Q, A, [C, D]), %% Test add and remove from recoverable_slaves - - %% Publish and confirm - amqp_channel:call(ACh, #'confirm.select'{}), - amqp_channel:cast(ACh, #'basic.publish'{routing_key = Q}, - #amqp_msg{props = #'P_basic'{delivery_mode = 2}}), - amqp_channel:wait_for_confirms(ACh), - - %% We kill nodes rather than stop them in order to make sure - %% that we aren't dependent on anything that happens as they shut - %% down (see bug 26467). - D2 = rabbit_test_configs:kill_node(D), - C2 = rabbit_test_configs:kill_node(C), - _B2 = rabbit_test_configs:kill_node(B), - _A2 = rabbit_test_configs:kill_node(A), - - rabbit_test_configs:rabbitmqctl(C2, "force_boot"), - - C3 = rabbit_test_configs:start_node(C2), - - %% We should now have the following dramatis personae: - %% A - down, master - %% B - down, used to be slave, no longer is, never had the message - %% C - running, should be slave, but has wiped the message on restart - %% D - down, recoverable slave, contains message - %% - %% So forgetting A should offline-promote the queue to D, keeping - %% the message. - - rabbit_test_configs:rabbitmqctl(C3, {"forget_cluster_node ~s", [ANode]}), - - D3 = rabbit_test_configs:start_node(D2), - {_DConn2, DCh2} = rabbit_test_util:connect(D3), - #'queue.declare_ok'{message_count = 1} = declare(DCh2, Q), - ok. - -set_ha_policy(Q, MasterCfg, SlaveCfgs) -> - Nodes = [list_to_binary(atom_to_list(pget(node, N))) || - N <- [MasterCfg | SlaveCfgs]], - rabbit_test_util:set_ha_policy(MasterCfg, Q, {<<"nodes">>, Nodes}), - await_slaves(Q, pget(node, MasterCfg), [pget(node, C) || C <- SlaveCfgs]). - -await_slaves(Q, MNode, SNodes) -> - {ok, #amqqueue{pid = MPid, - slave_pids = SPids}} = - rpc:call(MNode, rabbit_amqqueue, lookup, - [rabbit_misc:r(<<"/">>, queue, Q)]), - ActMNode = node(MPid), - ActSNodes = lists:usort([node(P) || P <- SPids]), - case {MNode, lists:usort(SNodes)} of - {ActMNode, ActSNodes} -> ok; - _ -> timer:sleep(100), - await_slaves(Q, MNode, SNodes) - end. - -force_boot_with() -> cluster_ab. -force_boot([Rabbit, Hare]) -> - rabbit_test_configs:rabbitmqctl_fail(Rabbit, force_boot), - Rabbit2 = rabbit_test_configs:stop_node(Rabbit), - _Hare2 = rabbit_test_configs:stop_node(Hare), - rabbit_test_configs:start_node_fail(Rabbit2), - rabbit_test_configs:rabbitmqctl(Rabbit2, force_boot), - _Rabbit3 = rabbit_test_configs:start_node(Rabbit2), - ok. - -change_cluster_node_type_with() -> start_abc. -change_cluster_node_type(Config) -> - [Rabbit, Hare, _Bunny] = cluster_members(Config), - - %% Trying to change the ram node when not clustered should always fail - ok = stop_app(Rabbit), - assert_failure(fun () -> change_cluster_node_type(Rabbit, ram) end), - assert_failure(fun () -> change_cluster_node_type(Rabbit, disc) end), - ok = start_app(Rabbit), - - ok = stop_app(Rabbit), - join_cluster(Rabbit, Hare), - assert_cluster_status({[Rabbit, Hare], [Rabbit, Hare], [Hare]}, - [Rabbit, Hare]), - change_cluster_node_type(Rabbit, ram), - assert_cluster_status({[Rabbit, Hare], [Hare], [Hare]}, - [Rabbit, Hare]), - change_cluster_node_type(Rabbit, disc), - assert_cluster_status({[Rabbit, Hare], [Rabbit, Hare], [Hare]}, - [Rabbit, Hare]), - change_cluster_node_type(Rabbit, ram), - ok = start_app(Rabbit), - assert_cluster_status({[Rabbit, Hare], [Hare], [Hare, Rabbit]}, - [Rabbit, Hare]), - - %% Changing to ram when you're the only ram node should fail - ok = stop_app(Hare), - assert_failure(fun () -> change_cluster_node_type(Hare, ram) end), - ok = start_app(Hare). - -change_cluster_when_node_offline_with() -> start_abc. -change_cluster_when_node_offline(Config) -> - [Rabbit, Hare, Bunny] = cluster_members(Config), - - %% Cluster the three notes - stop_join_start(Rabbit, Hare), - assert_clustered([Rabbit, Hare]), - - stop_join_start(Bunny, Hare), - assert_clustered([Rabbit, Hare, Bunny]), - - %% Bring down Rabbit, and remove Bunny from the cluster while - %% Rabbit is offline - ok = stop_app(Rabbit), - ok = stop_app(Bunny), - ok = reset(Bunny), - assert_cluster_status({[Bunny], [Bunny], []}, [Bunny]), - assert_cluster_status({[Rabbit, Hare], [Rabbit, Hare], [Hare]}, [Hare]), - assert_cluster_status( - {[Rabbit, Hare, Bunny], [Rabbit, Hare, Bunny], [Hare, Bunny]}, [Rabbit]), - - %% Bring Rabbit back up - ok = start_app(Rabbit), - assert_clustered([Rabbit, Hare]), - ok = start_app(Bunny), - assert_not_clustered(Bunny), - - %% Now the same, but Rabbit is a RAM node, and we bring up Bunny - %% before - ok = stop_app(Rabbit), - ok = change_cluster_node_type(Rabbit, ram), - ok = start_app(Rabbit), - stop_join_start(Bunny, Hare), - assert_cluster_status( - {[Rabbit, Hare, Bunny], [Hare, Bunny], [Rabbit, Hare, Bunny]}, - [Rabbit, Hare, Bunny]), - ok = stop_app(Rabbit), - ok = stop_app(Bunny), - ok = reset(Bunny), - ok = start_app(Bunny), - assert_not_clustered(Bunny), - assert_cluster_status({[Rabbit, Hare], [Hare], [Hare]}, [Hare]), - assert_cluster_status( - {[Rabbit, Hare, Bunny], [Hare, Bunny], [Hare, Bunny]}, - [Rabbit]), - ok = start_app(Rabbit), - assert_cluster_status({[Rabbit, Hare], [Hare], [Rabbit, Hare]}, - [Rabbit, Hare]), - assert_not_clustered(Bunny). - -update_cluster_nodes_with() -> start_abc. -update_cluster_nodes(Config) -> - [Rabbit, Hare, Bunny] = cluster_members(Config), - - %% Mnesia is running... - assert_failure(fun () -> update_cluster_nodes(Rabbit, Hare) end), - - ok = stop_app(Rabbit), - ok = join_cluster(Rabbit, Hare), - ok = stop_app(Bunny), - ok = join_cluster(Bunny, Hare), - ok = start_app(Bunny), - stop_reset_start(Hare), - assert_failure(fun () -> start_app(Rabbit) end), - %% Bogus node - assert_failure(fun () -> update_cluster_nodes(Rabbit, non@existant) end), - %% Inconsisent node - assert_failure(fun () -> update_cluster_nodes(Rabbit, Hare) end), - ok = update_cluster_nodes(Rabbit, Bunny), - ok = start_app(Rabbit), - assert_not_clustered(Hare), - assert_clustered([Rabbit, Bunny]). - -erlang_config_with() -> start_ab. -erlang_config(Config) -> - [Rabbit, Hare] = cluster_members(Config), - - ok = stop_app(Hare), - ok = reset(Hare), - ok = rpc:call(Hare, application, set_env, - [rabbit, cluster_nodes, {[Rabbit], disc}]), - ok = start_app(Hare), - assert_clustered([Rabbit, Hare]), - - ok = stop_app(Hare), - ok = reset(Hare), - ok = rpc:call(Hare, application, set_env, - [rabbit, cluster_nodes, {[Rabbit], ram}]), - ok = start_app(Hare), - assert_cluster_status({[Rabbit, Hare], [Rabbit], [Rabbit, Hare]}, - [Rabbit, Hare]), - - %% Check having a stop_app'ed node around doesn't break completely. - ok = stop_app(Hare), - ok = reset(Hare), - ok = stop_app(Rabbit), - ok = rpc:call(Hare, application, set_env, - [rabbit, cluster_nodes, {[Rabbit], disc}]), - ok = start_app(Hare), - ok = start_app(Rabbit), - assert_not_clustered(Hare), - assert_not_clustered(Rabbit), - - %% We get a warning but we start anyway - ok = stop_app(Hare), - ok = reset(Hare), - ok = rpc:call(Hare, application, set_env, - [rabbit, cluster_nodes, {[non@existent], disc}]), - ok = start_app(Hare), - assert_not_clustered(Hare), - assert_not_clustered(Rabbit), - - %% If we use a legacy config file, the node fails to start. - ok = stop_app(Hare), - ok = reset(Hare), - ok = rpc:call(Hare, application, set_env, - [rabbit, cluster_nodes, [Rabbit]]), - assert_failure(fun () -> start_app(Hare) end), - assert_not_clustered(Rabbit), - - %% If we use an invalid node name, the node fails to start. - ok = stop_app(Hare), - ok = reset(Hare), - ok = rpc:call(Hare, application, set_env, - [rabbit, cluster_nodes, {["Mike's computer"], disc}]), - assert_failure(fun () -> start_app(Hare) end), - assert_not_clustered(Rabbit), - - %% If we use an invalid node type, the node fails to start. - ok = stop_app(Hare), - ok = reset(Hare), - ok = rpc:call(Hare, application, set_env, - [rabbit, cluster_nodes, {[Rabbit], blue}]), - assert_failure(fun () -> start_app(Hare) end), - assert_not_clustered(Rabbit), - - %% If we use an invalid cluster_nodes conf, the node fails to start. - ok = stop_app(Hare), - ok = reset(Hare), - ok = rpc:call(Hare, application, set_env, - [rabbit, cluster_nodes, true]), - assert_failure(fun () -> start_app(Hare) end), - assert_not_clustered(Rabbit), - - ok = stop_app(Hare), - ok = reset(Hare), - ok = rpc:call(Hare, application, set_env, - [rabbit, cluster_nodes, "Yes, please"]), - assert_failure(fun () -> start_app(Hare) end), - assert_not_clustered(Rabbit). - -force_reset_node_with() -> start_abc. -force_reset_node(Config) -> - [Rabbit, Hare, _Bunny] = cluster_members(Config), - - stop_join_start(Rabbit, Hare), - stop_app(Rabbit), - force_reset(Rabbit), - %% Hare thinks that Rabbit is still clustered - assert_cluster_status({[Rabbit, Hare], [Rabbit, Hare], [Hare]}, - [Hare]), - %% %% ...but it isn't - assert_cluster_status({[Rabbit], [Rabbit], []}, [Rabbit]), - %% We can rejoin Rabbit and Hare - update_cluster_nodes(Rabbit, Hare), - start_app(Rabbit), - assert_clustered([Rabbit, Hare]). - -%% ---------------------------------------------------------------------------- -%% Internal utils - -cluster_members(Nodes) -> [pget(node,Cfg) || Cfg <- Nodes]. - -assert_cluster_status(Status0, Nodes) -> - Status = {AllNodes, _, _} = sort_cluster_status(Status0), - wait_for_cluster_status(Status, AllNodes, Nodes). - -wait_for_cluster_status(Status, AllNodes, Nodes) -> - Max = 10000 / ?LOOP_RECURSION_DELAY, - wait_for_cluster_status(0, Max, Status, AllNodes, Nodes). - -wait_for_cluster_status(N, Max, Status, _AllNodes, Nodes) when N >= Max -> - erlang:error({cluster_status_max_tries_failed, - [{nodes, Nodes}, - {expected_status, Status}, - {max_tried, Max}]}); -wait_for_cluster_status(N, Max, Status, AllNodes, Nodes) -> - case lists:all(fun (Node) -> - verify_status_equal(Node, Status, AllNodes) - end, Nodes) of - true -> ok; - false -> timer:sleep(?LOOP_RECURSION_DELAY), - wait_for_cluster_status(N + 1, Max, Status, AllNodes, Nodes) - end. - -verify_status_equal(Node, Status, AllNodes) -> - NodeStatus = sort_cluster_status(cluster_status(Node)), - (AllNodes =/= [Node]) =:= rpc:call(Node, rabbit_mnesia, is_clustered, []) - andalso NodeStatus =:= Status. - -cluster_status(Node) -> - {rpc:call(Node, rabbit_mnesia, cluster_nodes, [all]), - rpc:call(Node, rabbit_mnesia, cluster_nodes, [disc]), - rpc:call(Node, rabbit_mnesia, cluster_nodes, [running])}. - -sort_cluster_status({All, Disc, Running}) -> - {lists:sort(All), lists:sort(Disc), lists:sort(Running)}. - -assert_clustered(Nodes) -> - assert_cluster_status({Nodes, Nodes, Nodes}, Nodes). - -assert_not_clustered(Node) -> - assert_cluster_status({[Node], [Node], [Node]}, [Node]). - -assert_failure(Fun) -> - case catch Fun() of - {error, Reason} -> Reason; - {badrpc, {'EXIT', Reason}} -> Reason; - {badrpc_multi, Reason, _Nodes} -> Reason; - Other -> exit({expected_failure, Other}) - end. - -stop_app(Node) -> - control_action(stop_app, Node). - -start_app(Node) -> - control_action(start_app, Node). - -join_cluster(Node, To) -> - join_cluster(Node, To, false). - -join_cluster(Node, To, Ram) -> - control_action(join_cluster, Node, [atom_to_list(To)], [{"--ram", Ram}]). - -reset(Node) -> - control_action(reset, Node). - -force_reset(Node) -> - control_action(force_reset, Node). - -forget_cluster_node(Node, Removee, RemoveWhenOffline) -> - control_action(forget_cluster_node, Node, [atom_to_list(Removee)], - [{"--offline", RemoveWhenOffline}]). - -forget_cluster_node(Node, Removee) -> - forget_cluster_node(Node, Removee, false). - -change_cluster_node_type(Node, Type) -> - control_action(change_cluster_node_type, Node, [atom_to_list(Type)]). - -update_cluster_nodes(Node, DiscoveryNode) -> - control_action(update_cluster_nodes, Node, [atom_to_list(DiscoveryNode)]). - -stop_join_start(Node, ClusterTo, Ram) -> - ok = stop_app(Node), - ok = join_cluster(Node, ClusterTo, Ram), - ok = start_app(Node). - -stop_join_start(Node, ClusterTo) -> - stop_join_start(Node, ClusterTo, false). - -stop_reset_start(Node) -> - ok = stop_app(Node), - ok = reset(Node), - ok = start_app(Node). - -control_action(Command, Node) -> - control_action(Command, Node, [], []). - -control_action(Command, Node, Args) -> - control_action(Command, Node, Args, []). - -control_action(Command, Node, Args, Opts) -> - rpc:call(Node, rabbit_control_main, action, - [Command, Node, Args, Opts, - fun io:format/2]). - -declare(Ch, Name) -> - Res = amqp_channel:call(Ch, #'queue.declare'{durable = true, - queue = Name}), - amqp_channel:call(Ch, #'queue.bind'{queue = Name, - exchange = <<"amq.fanout">>}), - Res. diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/crashing_queues.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/crashing_queues.erl deleted file mode 100644 index e34fd04..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/crashing_queues.erl +++ /dev/null @@ -1,213 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(crashing_queues). - --compile(export_all). --include_lib("eunit/include/eunit.hrl"). --include_lib("amqp_client/include/amqp_client.hrl"). - --import(rabbit_test_util, [set_ha_policy/3, a2b/1]). --import(rabbit_misc, [pget/2]). - -crashing_unmirrored_with() -> [cluster_ab]. -crashing_unmirrored([CfgA, CfgB]) -> - A = pget(node, CfgA), - ChA = pget(channel, CfgA), - ConnB = pget(connection, CfgB), - amqp_channel:call(ChA, #'confirm.select'{}), - test_queue_failure(A, ChA, ConnB, 1, 0, - #'queue.declare'{queue = <<"test">>, durable = true}), - test_queue_failure(A, ChA, ConnB, 0, 0, - #'queue.declare'{queue = <<"test">>, durable = false}), - ok. - -crashing_mirrored_with() -> [cluster_ab, ha_policy_all]. -crashing_mirrored([CfgA, CfgB]) -> - A = pget(node, CfgA), - ChA = pget(channel, CfgA), - ConnB = pget(connection, CfgB), - amqp_channel:call(ChA, #'confirm.select'{}), - test_queue_failure(A, ChA, ConnB, 2, 1, - #'queue.declare'{queue = <<"test">>, durable = true}), - test_queue_failure(A, ChA, ConnB, 2, 1, - #'queue.declare'{queue = <<"test">>, durable = false}), - ok. - -test_queue_failure(Node, Ch, RaceConn, MsgCount, SlaveCount, Decl) -> - #'queue.declare_ok'{queue = QName} = amqp_channel:call(Ch, Decl), - publish(Ch, QName, transient), - publish(Ch, QName, durable), - Racer = spawn_declare_racer(RaceConn, Decl), - kill_queue(Node, QName), - assert_message_count(MsgCount, Ch, QName), - assert_slave_count(SlaveCount, Node, QName), - stop_declare_racer(Racer), - amqp_channel:call(Ch, #'queue.delete'{queue = QName}). - -give_up_after_repeated_crashes_with() -> [cluster_ab]. -give_up_after_repeated_crashes([CfgA, CfgB]) -> - A = pget(node, CfgA), - ChA = pget(channel, CfgA), - ChB = pget(channel, CfgB), - QName = <<"test">>, - amqp_channel:call(ChA, #'confirm.select'{}), - amqp_channel:call(ChA, #'queue.declare'{queue = QName, - durable = true}), - await_state(A, QName, running), - publish(ChA, QName, durable), - kill_queue_hard(A, QName), - {'EXIT', _} = (catch amqp_channel:call( - ChA, #'queue.declare'{queue = QName, - durable = true})), - await_state(A, QName, crashed), - amqp_channel:call(ChB, #'queue.delete'{queue = QName}), - amqp_channel:call(ChB, #'queue.declare'{queue = QName, - durable = true}), - await_state(A, QName, running), - - %% Since it's convenient, also test absent queue status here. - rabbit_test_configs:stop_node(CfgB), - await_state(A, QName, down), - ok. - - -publish(Ch, QName, DelMode) -> - Publish = #'basic.publish'{exchange = <<>>, routing_key = QName}, - Msg = #amqp_msg{props = #'P_basic'{delivery_mode = del_mode(DelMode)}}, - amqp_channel:cast(Ch, Publish, Msg), - amqp_channel:wait_for_confirms(Ch). - -del_mode(transient) -> 1; -del_mode(durable) -> 2. - -spawn_declare_racer(Conn, Decl) -> - Self = self(), - spawn_link(fun() -> declare_racer_loop(Self, Conn, Decl) end). - -stop_declare_racer(Pid) -> - Pid ! stop, - MRef = erlang:monitor(process, Pid), - receive - {'DOWN', MRef, process, Pid, _} -> ok - end. - -declare_racer_loop(Parent, Conn, Decl) -> - receive - stop -> unlink(Parent) - after 0 -> - %% Catch here because we might happen to catch the queue - %% while it is in the middle of recovering and thus - %% explode with NOT_FOUND because crashed. Doesn't matter, - %% we are only in this loop to try to fool the recovery - %% code anyway. - try - case amqp_connection:open_channel(Conn) of - {ok, Ch} -> amqp_channel:call(Ch, Decl); - closing -> ok - end - catch - exit:_ -> - ok - end, - declare_racer_loop(Parent, Conn, Decl) - end. - -await_state(Node, QName, State) -> - await_state(Node, QName, State, 30000). - -await_state(Node, QName, State, Time) -> - case state(Node, QName) of - State -> - ok; - Other -> - case Time of - 0 -> exit({timeout_awaiting_state, State, Other}); - _ -> timer:sleep(100), - await_state(Node, QName, State, Time - 100) - end - end. - -state(Node, QName) -> - V = <<"/">>, - Res = rabbit_misc:r(V, queue, QName), - [[{name, Res}, - {state, State}]] = - rpc:call(Node, rabbit_amqqueue, info_all, [V, [name, state]]), - State. - -kill_queue_hard(Node, QName) -> - case kill_queue(Node, QName) of - crashed -> ok; - _NewPid -> timer:sleep(100), - kill_queue_hard(Node, QName) - end. - -kill_queue(Node, QName) -> - Pid1 = queue_pid(Node, QName), - exit(Pid1, boom), - await_new_pid(Node, QName, Pid1). - -queue_pid(Node, QName) -> - #amqqueue{pid = QPid, - state = State} = lookup(Node, QName), - case State of - crashed -> case sup_child(Node, rabbit_amqqueue_sup_sup) of - {ok, _} -> QPid; %% restarting - {error, no_child} -> crashed %% given up - end; - _ -> QPid - end. - -sup_child(Node, Sup) -> - case rpc:call(Node, supervisor2, which_children, [Sup]) of - [{_, Child, _, _}] -> {ok, Child}; - [] -> {error, no_child}; - {badrpc, {'EXIT', {noproc, _}}} -> {error, no_sup} - end. - -lookup(Node, QName) -> - {ok, Q} = rpc:call(Node, rabbit_amqqueue, lookup, - [rabbit_misc:r(<<"/">>, queue, QName)]), - Q. - -await_new_pid(Node, QName, OldPid) -> - case queue_pid(Node, QName) of - OldPid -> timer:sleep(10), - await_new_pid(Node, QName, OldPid); - New -> New - end. - -assert_message_count(Count, Ch, QName) -> - #'queue.declare_ok'{message_count = Count} = - amqp_channel:call(Ch, #'queue.declare'{queue = QName, - passive = true}). - -assert_slave_count(Count, Node, QName) -> - Q = lookup(Node, QName), - [{_, Pids}] = rpc:call(Node, rabbit_amqqueue, info, [Q, [slave_pids]]), - RealCount = case Pids of - '' -> 0; - _ -> length(Pids) - end, - case RealCount of - Count -> - ok; - _ when RealCount < Count -> - timer:sleep(10), - assert_slave_count(Count, Node, QName); - _ -> - exit({too_many_slaves, Count, RealCount}) - end. diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/dynamic_ha.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/dynamic_ha.erl deleted file mode 100644 index e9acb52..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/dynamic_ha.erl +++ /dev/null @@ -1,254 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(dynamic_ha). - -%% rabbit_tests:test_dynamic_mirroring() is a unit test which should -%% test the logic of what all the policies decide to do, so we don't -%% need to exhaustively test that here. What we need to test is that: -%% -%% * Going from non-mirrored to mirrored works and vice versa -%% * Changing policy can add / remove mirrors and change the master -%% * Adding a node will create a new mirror when there are not enough nodes -%% for the policy -%% * Removing a node will not create a new mirror even if the policy -%% logic wants it (since this gives us a good way to lose messages -%% on cluster shutdown, by repeated failover to new nodes) -%% -%% The first two are change_policy, the last two are change_cluster - --compile(export_all). --include_lib("eunit/include/eunit.hrl"). --include_lib("amqp_client/include/amqp_client.hrl"). - --define(QNAME, <<"ha.test">>). --define(POLICY, <<"^ha.test$">>). %% " emacs --define(VHOST, <<"/">>). - --import(rabbit_test_util, [set_ha_policy/3, set_ha_policy/4, - clear_policy/2, a2b/1, publish/3, consume/3]). --import(rabbit_misc, [pget/2]). - -change_policy_with() -> cluster_abc. -change_policy([CfgA, _CfgB, _CfgC] = Cfgs) -> - ACh = pget(channel, CfgA), - [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs], - - %% When we first declare a queue with no policy, it's not HA. - amqp_channel:call(ACh, #'queue.declare'{queue = ?QNAME}), - assert_slaves(A, ?QNAME, {A, ''}), - - %% Give it policy "all", it becomes HA and gets all mirrors - set_ha_policy(CfgA, ?POLICY, <<"all">>), - assert_slaves(A, ?QNAME, {A, [B, C]}), - - %% Give it policy "nodes", it gets specific mirrors - set_ha_policy(CfgA, ?POLICY, {<<"nodes">>, [a2b(A), a2b(B)]}), - assert_slaves(A, ?QNAME, {A, [B]}), - - %% Now explicitly change the mirrors - set_ha_policy(CfgA, ?POLICY, {<<"nodes">>, [a2b(A), a2b(C)]}), - assert_slaves(A, ?QNAME, {A, [C]}, [{A, [B, C]}]), - - %% Clear the policy, and we go back to non-mirrored - clear_policy(CfgA, ?POLICY), - assert_slaves(A, ?QNAME, {A, ''}), - - %% Test switching "away" from an unmirrored node - set_ha_policy(CfgA, ?POLICY, {<<"nodes">>, [a2b(B), a2b(C)]}), - assert_slaves(A, ?QNAME, {A, [B, C]}, [{A, [B]}, {A, [C]}]), - - ok. - -change_cluster_with() -> cluster_abc. -change_cluster([CfgA, _CfgB, _CfgC] = CfgsABC) -> - ACh = pget(channel, CfgA), - [A, B, C] = [pget(node, Cfg) || Cfg <- CfgsABC], - - amqp_channel:call(ACh, #'queue.declare'{queue = ?QNAME}), - assert_slaves(A, ?QNAME, {A, ''}), - - %% Give it policy exactly 4, it should mirror to all 3 nodes - set_ha_policy(CfgA, ?POLICY, {<<"exactly">>, 4}), - assert_slaves(A, ?QNAME, {A, [B, C]}), - - %% Add D and E, D joins in - [CfgD, CfgE] = CfgsDE = rabbit_test_configs:start_nodes(CfgA, [d, e], 5675), - D = pget(node, CfgD), - E = pget(node, CfgE), - rabbit_test_configs:add_to_cluster(CfgsABC, CfgsDE), - assert_slaves(A, ?QNAME, {A, [B, C, D]}), - - %% Remove D, E joins in - rabbit_test_configs:stop_node(CfgD), - assert_slaves(A, ?QNAME, {A, [B, C, E]}), - - %% Clean up since we started this by hand - rabbit_test_configs:stop_node(CfgE), - ok. - -rapid_change_with() -> cluster_abc. -rapid_change([CfgA, _CfgB, _CfgC]) -> - ACh = pget(channel, CfgA), - {_Pid, MRef} = spawn_monitor( - fun() -> - [rapid_amqp_ops(ACh, I) || I <- lists:seq(1, 100)] - end), - rapid_loop(CfgA, MRef), - ok. - -rapid_amqp_ops(Ch, I) -> - Payload = list_to_binary(integer_to_list(I)), - amqp_channel:call(Ch, #'queue.declare'{queue = ?QNAME}), - amqp_channel:cast(Ch, #'basic.publish'{exchange = <<"">>, - routing_key = ?QNAME}, - #amqp_msg{payload = Payload}), - amqp_channel:subscribe(Ch, #'basic.consume'{queue = ?QNAME, - no_ack = true}, self()), - receive #'basic.consume_ok'{} -> ok - end, - receive {#'basic.deliver'{}, #amqp_msg{payload = Payload}} -> - ok - end, - amqp_channel:call(Ch, #'queue.delete'{queue = ?QNAME}). - -rapid_loop(Cfg, MRef) -> - receive - {'DOWN', MRef, process, _Pid, normal} -> - ok; - {'DOWN', MRef, process, _Pid, Reason} -> - exit({amqp_ops_died, Reason}) - after 0 -> - set_ha_policy(Cfg, ?POLICY, <<"all">>), - clear_policy(Cfg, ?POLICY), - rapid_loop(Cfg, MRef) - end. - -%% Vhost deletion needs to successfully tear down policies and queues -%% with policies. At least smoke-test that it doesn't blow up. -vhost_deletion_with() -> [cluster_ab, ha_policy_all]. -vhost_deletion([CfgA, _CfgB]) -> - ACh = pget(channel, CfgA), - Node = pget(node, CfgA), - amqp_channel:call(ACh, #'queue.declare'{queue = <<"test">>}), - ok = rpc:call(Node, rabbit_vhost, delete, [<<"/">>]), - ok. - -promote_on_shutdown_with() -> cluster_ab. -promote_on_shutdown([CfgA, CfgB]) -> - set_ha_policy(CfgA, <<"^ha.promote">>, <<"all">>, - [{<<"ha-promote-on-shutdown">>, <<"always">>}]), - set_ha_policy(CfgA, <<"^ha.nopromote">>, <<"all">>), - - ACh = pget(channel, CfgA), - [begin - amqp_channel:call(ACh, #'queue.declare'{queue = Q, - durable = true}), - publish(ACh, Q, 10) - end || Q <- [<<"ha.promote.test">>, <<"ha.nopromote.test">>]], - rabbit_test_configs:restart_node(CfgB), - CfgA1 = rabbit_test_configs:stop_node(CfgA), - {_, BCh} = rabbit_test_util:connect(CfgB), - #'queue.declare_ok'{message_count = 0} = - amqp_channel:call( - BCh, #'queue.declare'{queue = <<"ha.promote.test">>, - durable = true}), - ?assertExit( - {{shutdown, {server_initiated_close, 404, _}}, _}, - amqp_channel:call( - BCh, #'queue.declare'{queue = <<"ha.nopromote.test">>, - durable = true})), - CfgA2 = rabbit_test_configs:start_node(CfgA1), - {_, ACh2} = rabbit_test_util:connect(CfgA2), - #'queue.declare_ok'{message_count = 10} = - amqp_channel:call( - ACh2, #'queue.declare'{queue = <<"ha.nopromote.test">>, - durable = true}), - ok. - -%%---------------------------------------------------------------------------- - -assert_slaves(RPCNode, QName, Exp) -> - assert_slaves(RPCNode, QName, Exp, []). - -assert_slaves(RPCNode, QName, Exp, PermittedIntermediate) -> - assert_slaves0(RPCNode, QName, Exp, - [{get(previous_exp_m_node), get(previous_exp_s_nodes)} | - PermittedIntermediate]). - -assert_slaves0(RPCNode, QName, {ExpMNode, ExpSNodes}, PermittedIntermediate) -> - Q = find_queue(QName, RPCNode), - Pid = proplists:get_value(pid, Q), - SPids = proplists:get_value(slave_pids, Q), - ActMNode = node(Pid), - ActSNodes = case SPids of - '' -> ''; - _ -> [node(SPid) || SPid <- SPids] - end, - case ExpMNode =:= ActMNode andalso equal_list(ExpSNodes, ActSNodes) of - false -> - %% It's an async change, so if nothing has changed let's - %% just wait - of course this means if something does not - %% change when expected then we time out the test which is - %% a bit tedious - case [found || {PermMNode, PermSNodes} <- PermittedIntermediate, - PermMNode =:= ActMNode, - equal_list(PermSNodes, ActSNodes)] of - [] -> ct:fail("Expected ~p / ~p, got ~p / ~p~nat ~p~n", - [ExpMNode, ExpSNodes, ActMNode, ActSNodes, - get_stacktrace()]); - _ -> timer:sleep(100), - assert_slaves0(RPCNode, QName, {ExpMNode, ExpSNodes}, - PermittedIntermediate) - end; - true -> - put(previous_exp_m_node, ExpMNode), - put(previous_exp_s_nodes, ExpSNodes), - ok - end. - -equal_list('', '') -> true; -equal_list('', _Act) -> false; -equal_list(_Exp, '') -> false; -equal_list([], []) -> true; -equal_list(_Exp, []) -> false; -equal_list([], _Act) -> false; -equal_list([H|T], Act) -> case lists:member(H, Act) of - true -> equal_list(T, Act -- [H]); - false -> false - end. - -find_queue(QName, RPCNode) -> - Qs = rpc:call(RPCNode, rabbit_amqqueue, info_all, [?VHOST], infinity), - case find_queue0(QName, Qs) of - did_not_find_queue -> timer:sleep(100), - find_queue(QName, RPCNode); - Q -> Q - end. - -find_queue0(QName, Qs) -> - case [Q || Q <- Qs, proplists:get_value(name, Q) =:= - rabbit_misc:r(?VHOST, queue, QName)] of - [R] -> R; - [] -> did_not_find_queue - end. - -get_stacktrace() -> - try - throw(e) - catch - _:e -> - erlang:get_stacktrace() - end. diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/eager_sync.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/eager_sync.erl deleted file mode 100644 index 9c2d935..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/eager_sync.erl +++ /dev/null @@ -1,205 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(eager_sync). - --compile(export_all). --include_lib("eunit/include/eunit.hrl"). --include_lib("amqp_client/include/amqp_client.hrl"). - --define(QNAME, <<"ha.two.test">>). --define(QNAME_AUTO, <<"ha.auto.test">>). --define(MESSAGE_COUNT, 2000). - --import(rabbit_test_util, [a2b/1, publish/3, consume/3, fetch/3]). --import(rabbit_misc, [pget/2]). - --define(CONFIG, [cluster_abc, ha_policy_two_pos]). - -eager_sync_with() -> ?CONFIG. -eager_sync([A, B, C]) -> - %% Queue is on AB but not C. - ACh = pget(channel, A), - Ch = pget(channel, C), - amqp_channel:call(ACh, #'queue.declare'{queue = ?QNAME, - durable = true}), - - %% Don't sync, lose messages - publish(Ch, ?QNAME, ?MESSAGE_COUNT), - restart(A), - restart(B), - consume(Ch, ?QNAME, 0), - - %% Sync, keep messages - publish(Ch, ?QNAME, ?MESSAGE_COUNT), - restart(A), - ok = sync(C, ?QNAME), - restart(B), - consume(Ch, ?QNAME, ?MESSAGE_COUNT), - - %% Check the no-need-to-sync path - publish(Ch, ?QNAME, ?MESSAGE_COUNT), - ok = sync(C, ?QNAME), - consume(Ch, ?QNAME, ?MESSAGE_COUNT), - - %% keep unacknowledged messages - publish(Ch, ?QNAME, ?MESSAGE_COUNT), - fetch(Ch, ?QNAME, 2), - restart(A), - fetch(Ch, ?QNAME, 3), - sync(C, ?QNAME), - restart(B), - consume(Ch, ?QNAME, ?MESSAGE_COUNT), - - ok. - -eager_sync_cancel_with() -> ?CONFIG. -eager_sync_cancel([A, B, C]) -> - %% Queue is on AB but not C. - ACh = pget(channel, A), - Ch = pget(channel, C), - - amqp_channel:call(ACh, #'queue.declare'{queue = ?QNAME, - durable = true}), - {ok, not_syncing} = sync_cancel(C, ?QNAME), %% Idempotence - eager_sync_cancel_test2(A, B, C, Ch). - -eager_sync_cancel_test2(A, B, C, Ch) -> - %% Sync then cancel - publish(Ch, ?QNAME, ?MESSAGE_COUNT), - restart(A), - spawn_link(fun() -> ok = sync_nowait(C, ?QNAME) end), - case wait_for_syncing(C, ?QNAME, 1) of - ok -> - case sync_cancel(C, ?QNAME) of - ok -> - wait_for_running(C, ?QNAME), - restart(B), - consume(Ch, ?QNAME, 0), - - {ok, not_syncing} = sync_cancel(C, ?QNAME), %% Idempotence - ok; - {ok, not_syncing} -> - %% Damn. Syncing finished between wait_for_syncing/3 and - %% sync_cancel/2 above. Start again. - amqp_channel:call(Ch, #'queue.purge'{queue = ?QNAME}), - eager_sync_cancel_test2(A, B, C, Ch) - end; - synced_already -> - %% Damn. Syncing finished before wait_for_syncing/3. Start again. - amqp_channel:call(Ch, #'queue.purge'{queue = ?QNAME}), - eager_sync_cancel_test2(A, B, C, Ch) - end. - -eager_sync_auto_with() -> ?CONFIG. -eager_sync_auto([A, B, C]) -> - ACh = pget(channel, A), - Ch = pget(channel, C), - amqp_channel:call(ACh, #'queue.declare'{queue = ?QNAME_AUTO, - durable = true}), - - %% Sync automatically, don't lose messages - publish(Ch, ?QNAME_AUTO, ?MESSAGE_COUNT), - restart(A), - wait_for_sync(C, ?QNAME_AUTO), - restart(B), - wait_for_sync(C, ?QNAME_AUTO), - consume(Ch, ?QNAME_AUTO, ?MESSAGE_COUNT), - - ok. - -eager_sync_auto_on_policy_change_with() -> ?CONFIG. -eager_sync_auto_on_policy_change([A, B, C]) -> - ACh = pget(channel, A), - Ch = pget(channel, C), - amqp_channel:call(ACh, #'queue.declare'{queue = ?QNAME, - durable = true}), - - %% Sync automatically once the policy is changed to tell us to. - publish(Ch, ?QNAME, ?MESSAGE_COUNT), - restart(A), - Params = [a2b(pget(node, Cfg)) || Cfg <- [A, B]], - rabbit_test_util:set_ha_policy( - A, <<"^ha.two.">>, {<<"nodes">>, Params}, - [{<<"ha-sync-mode">>, <<"automatic">>}]), - wait_for_sync(C, ?QNAME), - - ok. - -eager_sync_requeue_with() -> ?CONFIG. -eager_sync_requeue([A, B, C]) -> - %% Queue is on AB but not C. - ACh = pget(channel, A), - Ch = pget(channel, C), - amqp_channel:call(ACh, #'queue.declare'{queue = ?QNAME, - durable = true}), - - publish(Ch, ?QNAME, 2), - {#'basic.get_ok'{delivery_tag = TagA}, _} = - amqp_channel:call(Ch, #'basic.get'{queue = ?QNAME}), - {#'basic.get_ok'{delivery_tag = TagB}, _} = - amqp_channel:call(Ch, #'basic.get'{queue = ?QNAME}), - amqp_channel:cast(Ch, #'basic.reject'{delivery_tag = TagA, requeue = true}), - restart(B), - ok = sync(C, ?QNAME), - amqp_channel:cast(Ch, #'basic.reject'{delivery_tag = TagB, requeue = true}), - consume(Ch, ?QNAME, 2), - - ok. - -restart(Cfg) -> rabbit_test_util:restart_app(Cfg). - -sync(Cfg, QName) -> - case sync_nowait(Cfg, QName) of - ok -> wait_for_sync(Cfg, QName), - ok; - R -> R - end. - -sync_nowait(Cfg, QName) -> action(Cfg, sync_queue, QName). -sync_cancel(Cfg, QName) -> action(Cfg, cancel_sync_queue, QName). - -wait_for_sync(Cfg, QName) -> - sync_detection:wait_for_sync_status(true, Cfg, QName). - -action(Cfg, Action, QName) -> - rabbit_test_util:control_action( - Action, Cfg, [binary_to_list(QName)], [{"-p", "/"}]). - -queue(Cfg, QName) -> - QNameRes = rabbit_misc:r(<<"/">>, queue, QName), - {ok, Q} = rpc:call(pget(node, Cfg), rabbit_amqqueue, lookup, [QNameRes]), - Q. - -wait_for_syncing(Cfg, QName, Target) -> - case state(Cfg, QName) of - {{syncing, _}, _} -> ok; - {running, Target} -> synced_already; - _ -> timer:sleep(100), - wait_for_syncing(Cfg, QName, Target) - end. - -wait_for_running(Cfg, QName) -> - case state(Cfg, QName) of - {running, _} -> ok; - _ -> timer:sleep(100), - wait_for_running(Cfg, QName) - end. - -state(Cfg, QName) -> - [{state, State}, {synchronised_slave_pids, Pids}] = - rpc:call(pget(node, Cfg), rabbit_amqqueue, info, - [queue(Cfg, QName), [state, synchronised_slave_pids]]), - {State, length(Pids)}. diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/many_node_ha.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/many_node_ha.erl deleted file mode 100644 index 9104d4c..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/many_node_ha.erl +++ /dev/null @@ -1,64 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(many_node_ha). - --compile(export_all). --include_lib("eunit/include/eunit.hrl"). --include_lib("amqp_client/include/amqp_client.hrl"). - --import(rabbit_test_util, [a2b/1]). --import(rabbit_misc, [pget/2]). - -kill_intermediate_with() -> - fun (Cfg) -> rabbit_test_configs:ha_policy_all( - rabbit_test_configs:cluster(Cfg, [a,b,c,d,e,f])) - end. -kill_intermediate([CfgA, CfgB, CfgC, CfgD, CfgE, CfgF]) -> - Msgs = rabbit_test_configs:cover_work_factor(20000, CfgA), - MasterChannel = pget(channel, CfgA), - ConsumerChannel = pget(channel, CfgE), - ProducerChannel = pget(channel, CfgF), - Queue = <<"test">>, - amqp_channel:call(MasterChannel, #'queue.declare'{queue = Queue, - auto_delete = false}), - - %% TODO: this seems *highly* timing dependant - the assumption being - %% that the kill will work quickly enough that there will still be - %% some messages in-flight that we *must* receive despite the intervening - %% node deaths. It would be nice if we could find a means to do this - %% in a way that is not actually timing dependent. - - %% Worse still, it assumes that killing the master will cause a - %% failover to Slave1, and so on. Nope. - - ConsumerPid = rabbit_ha_test_consumer:create(ConsumerChannel, - Queue, self(), false, Msgs), - - ProducerPid = rabbit_ha_test_producer:create(ProducerChannel, - Queue, self(), false, Msgs), - - %% create a killer for the master and the first 3 slaves - [rabbit_test_util:kill_after(Time, Cfg, sigkill) || - {Cfg, Time} <- [{CfgA, 50}, - {CfgB, 50}, - {CfgC, 100}, - {CfgD, 100}]], - - %% verify that the consumer got all msgs, or die, or time out - rabbit_ha_test_producer:await_response(ProducerPid), - rabbit_ha_test_consumer:await_response(ConsumerPid), - ok. - diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/partitions.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/partitions.erl deleted file mode 100644 index 56b99ca..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/partitions.erl +++ /dev/null @@ -1,370 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(partitions). - --compile(export_all). --include_lib("eunit/include/eunit.hrl"). --include_lib("amqp_client/include/amqp_client.hrl"). - --import(rabbit_misc, [pget/2]). - --define(CONFIG, [start_abc, fun enable_dist_proxy/1, - build_cluster, short_ticktime(1), start_connections]). -%% We set ticktime to 1s and setuptime is 7s so to make sure it -%% passes... --define(DELAY, 8000). - -ignore_with() -> ?CONFIG. -ignore(Cfgs) -> - [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs], - block_unblock([{A, B}, {A, C}]), - timer:sleep(?DELAY), - [B, C] = partitions(A), - [A] = partitions(B), - [A] = partitions(C), - ok. - -pause_minority_on_down_with() -> ?CONFIG. -pause_minority_on_down([CfgA, CfgB, CfgC] = Cfgs) -> - A = pget(node, CfgA), - set_mode(Cfgs, pause_minority), - true = is_running(A), - - rabbit_test_util:kill(CfgB, sigkill), - timer:sleep(?DELAY), - true = is_running(A), - - rabbit_test_util:kill(CfgC, sigkill), - await_running(A, false), - ok. - -pause_minority_on_blocked_with() -> ?CONFIG. -pause_minority_on_blocked(Cfgs) -> - [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs], - set_mode(Cfgs, pause_minority), - pause_on_blocked(A, B, C). - -pause_if_all_down_on_down_with() -> ?CONFIG. -pause_if_all_down_on_down([_, CfgB, CfgC] = Cfgs) -> - [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs], - set_mode(Cfgs, {pause_if_all_down, [C], ignore}), - [(true = is_running(N)) || N <- [A, B, C]], - - rabbit_test_util:kill(CfgB, sigkill), - timer:sleep(?DELAY), - [(true = is_running(N)) || N <- [A, C]], - - rabbit_test_util:kill(CfgC, sigkill), - timer:sleep(?DELAY), - await_running(A, false), - ok. - -pause_if_all_down_on_blocked_with() -> ?CONFIG. -pause_if_all_down_on_blocked(Cfgs) -> - [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs], - set_mode(Cfgs, {pause_if_all_down, [C], ignore}), - pause_on_blocked(A, B, C). - -pause_on_blocked(A, B, C) -> - [(true = is_running(N)) || N <- [A, B, C]], - block([{A, B}, {A, C}]), - await_running(A, false), - [await_running(N, true) || N <- [B, C]], - unblock([{A, B}, {A, C}]), - [await_running(N, true) || N <- [A, B, C]], - Status = rpc:call(B, rabbit_mnesia, status, []), - [] = pget(partitions, Status), - ok. - -%% Make sure we do not confirm any messages after a partition has -%% happened but before we pause, since any such confirmations would be -%% lies. -%% -%% This test has to use an AB cluster (not ABC) since GM ends up -%% taking longer to detect down slaves when there are more nodes and -%% we close the window by mistake. -%% -%% In general there are quite a few ways to accidentally cause this -%% test to pass since there are a lot of things in the broker that can -%% suddenly take several seconds to time out when TCP connections -%% won't establish. -pause_minority_false_promises_mirrored_with() -> - [start_ab, fun enable_dist_proxy/1, - build_cluster, short_ticktime(10), start_connections, ha_policy_all]. - -pause_minority_false_promises_mirrored(Cfgs) -> - pause_false_promises(Cfgs, pause_minority). - -pause_minority_false_promises_unmirrored_with() -> - [start_ab, fun enable_dist_proxy/1, - build_cluster, short_ticktime(10), start_connections]. - -pause_minority_false_promises_unmirrored(Cfgs) -> - pause_false_promises(Cfgs, pause_minority). - -pause_if_all_down_false_promises_mirrored_with() -> - [start_ab, fun enable_dist_proxy/1, - build_cluster, short_ticktime(10), start_connections, ha_policy_all]. - -pause_if_all_down_false_promises_mirrored([_, CfgB | _] = Cfgs) -> - B = pget(node, CfgB), - pause_false_promises(Cfgs, {pause_if_all_down, [B], ignore}). - -pause_if_all_down_false_promises_unmirrored_with() -> - [start_ab, fun enable_dist_proxy/1, - build_cluster, short_ticktime(10), start_connections]. - -pause_if_all_down_false_promises_unmirrored([_, CfgB | _] = Cfgs) -> - B = pget(node, CfgB), - pause_false_promises(Cfgs, {pause_if_all_down, [B], ignore}). - -pause_false_promises([CfgA, CfgB | _] = Cfgs, ClusterPartitionHandling) -> - [A, B] = [pget(node, Cfg) || Cfg <- Cfgs], - set_mode([CfgA], ClusterPartitionHandling), - ChA = pget(channel, CfgA), - ChB = pget(channel, CfgB), - amqp_channel:call(ChB, #'queue.declare'{queue = <<"test">>, - durable = true}), - amqp_channel:call(ChA, #'confirm.select'{}), - amqp_channel:register_confirm_handler(ChA, self()), - - %% Cause a partition after 1s - Self = self(), - spawn_link(fun () -> - timer:sleep(1000), - %%io:format(user, "~p BLOCK~n", [calendar:local_time()]), - block([{A, B}]), - unlink(Self) - end), - - %% Publish large no of messages, see how many we get confirmed - [amqp_channel:cast(ChA, #'basic.publish'{routing_key = <<"test">>}, - #amqp_msg{props = #'P_basic'{delivery_mode = 1}}) || - _ <- lists:seq(1, 100000)], - %%io:format(user, "~p finish publish~n", [calendar:local_time()]), - - %% Time for the partition to be detected. We don't put this sleep - %% in receive_acks since otherwise we'd have another similar sleep - %% at the end. - timer:sleep(30000), - Confirmed = receive_acks(0), - %%io:format(user, "~p got acks~n", [calendar:local_time()]), - await_running(A, false), - %%io:format(user, "~p A stopped~n", [calendar:local_time()]), - - unblock([{A, B}]), - await_running(A, true), - - %% But how many made it onto the rest of the cluster? - #'queue.declare_ok'{message_count = Survived} = - amqp_channel:call(ChB, #'queue.declare'{queue = <<"test">>, - durable = true}), - %%io:format(user, "~p queue declared~n", [calendar:local_time()]), - case Confirmed > Survived of - true -> ?debugVal({Confirmed, Survived}); - false -> ok - end, - ?assert(Confirmed =< Survived), - ok. - -receive_acks(Max) -> - receive - #'basic.ack'{delivery_tag = DTag} -> - receive_acks(DTag) - after ?DELAY -> - Max - end. - -prompt_disconnect_detection_with() -> - [start_ab, fun enable_dist_proxy/1, - build_cluster, short_ticktime(1), start_connections]. - -prompt_disconnect_detection([CfgA, CfgB]) -> - A = pget(node, CfgA), - B = pget(node, CfgB), - ChB = pget(channel, CfgB), - [amqp_channel:call(ChB, #'queue.declare'{}) || _ <- lists:seq(1, 100)], - block([{A, B}]), - timer:sleep(?DELAY), - %% We want to make sure we do not end up waiting for setuptime * - %% no of queues. Unfortunately that means we need a timeout... - [] = rpc(CfgA, rabbit_amqqueue, info_all, [<<"/">>], ?DELAY), - ok. - -ctl_ticktime_sync_with() -> [start_ab, short_ticktime(1)]. -ctl_ticktime_sync([CfgA | _]) -> - %% Server has 1s net_ticktime, make sure ctl doesn't get disconnected - "ok\n" = rabbit_test_configs:rabbitmqctl(CfgA, "eval 'timer:sleep(5000).'"), - ok. - -%% NB: we test full and partial partitions here. -autoheal_with() -> ?CONFIG. -autoheal(Cfgs) -> - set_mode(Cfgs, autoheal), - do_autoheal(Cfgs). - -autoheal_after_pause_if_all_down_with() -> ?CONFIG. -autoheal_after_pause_if_all_down([_, CfgB, CfgC | _] = Cfgs) -> - B = pget(node, CfgB), - C = pget(node, CfgC), - set_mode(Cfgs, {pause_if_all_down, [B, C], autoheal}), - do_autoheal(Cfgs). - -do_autoheal(Cfgs) -> - [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs], - Test = fun (Pairs) -> - block_unblock(Pairs), - %% Sleep to make sure all the partitions are noticed - %% ?DELAY for the net_tick timeout - timer:sleep(?DELAY), - [await_listening(N, true) || N <- [A, B, C]], - [await_partitions(N, []) || N <- [A, B, C]] - end, - Test([{B, C}]), - Test([{A, C}, {B, C}]), - Test([{A, B}, {A, C}, {B, C}]), - ok. - -partial_false_positive_with() -> ?CONFIG. -partial_false_positive(Cfgs) -> - [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs], - block([{A, B}]), - timer:sleep(1000), - block([{A, C}]), - timer:sleep(?DELAY), - unblock([{A, B}, {A, C}]), - timer:sleep(?DELAY), - %% When B times out A's connection, it will check with C. C will - %% not have timed out A yet, but already it can't talk to it. We - %% need to not consider this a partial partition; B and C should - %% still talk to each other. - [B, C] = partitions(A), - [A] = partitions(B), - [A] = partitions(C), - ok. - -partial_to_full_with() -> ?CONFIG. -partial_to_full(Cfgs) -> - [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs], - block_unblock([{A, B}]), - timer:sleep(?DELAY), - %% There are several valid ways this could go, depending on how - %% the DOWN messages race: either A gets disconnected first and BC - %% stay together, or B gets disconnected first and AC stay - %% together, or both make it through and all three get - %% disconnected. - case {partitions(A), partitions(B), partitions(C)} of - {[B, C], [A], [A]} -> ok; - {[B], [A, C], [B]} -> ok; - {[B, C], [A, C], [A, B]} -> ok; - Partitions -> exit({partitions, Partitions}) - end. - -partial_pause_minority_with() -> ?CONFIG. -partial_pause_minority(Cfgs) -> - [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs], - set_mode(Cfgs, pause_minority), - block([{A, B}]), - [await_running(N, false) || N <- [A, B]], - await_running(C, true), - unblock([{A, B}]), - [await_listening(N, true) || N <- [A, B, C]], - [await_partitions(N, []) || N <- [A, B, C]], - ok. - -partial_pause_if_all_down_with() -> ?CONFIG. -partial_pause_if_all_down(Cfgs) -> - [A, B, C] = [pget(node, Cfg) || Cfg <- Cfgs], - set_mode(Cfgs, {pause_if_all_down, [B], ignore}), - block([{A, B}]), - await_running(A, false), - [await_running(N, true) || N <- [B, C]], - unblock([{A, B}]), - [await_listening(N, true) || N <- [A, B, C]], - [await_partitions(N, []) || N <- [A, B, C]], - ok. - -set_mode(Cfgs, Mode) -> - [set_env(Cfg, rabbit, cluster_partition_handling, Mode) || Cfg <- Cfgs]. - -set_env(Cfg, App, K, V) -> - rpc(Cfg, application, set_env, [App, K, V]). - -block_unblock(Pairs) -> - block(Pairs), - timer:sleep(?DELAY), - unblock(Pairs). - -block(Pairs) -> [block(X, Y) || {X, Y} <- Pairs]. -unblock(Pairs) -> [allow(X, Y) || {X, Y} <- Pairs]. - -partitions(Node) -> - case rpc:call(Node, rabbit_node_monitor, partitions, []) of - {badrpc, {'EXIT', E}} = R -> case rabbit_misc:is_abnormal_exit(E) of - true -> R; - false -> timer:sleep(1000), - partitions(Node) - end; - Partitions -> Partitions - end. - -block(X, Y) -> - rpc:call(X, inet_tcp_proxy, block, [Y]), - rpc:call(Y, inet_tcp_proxy, block, [X]). - -allow(X, Y) -> - rpc:call(X, inet_tcp_proxy, allow, [Y]), - rpc:call(Y, inet_tcp_proxy, allow, [X]). - -await_running (Node, Bool) -> await(Node, Bool, fun is_running/1). -await_listening (Node, Bool) -> await(Node, Bool, fun is_listening/1). -await_partitions(Node, Parts) -> await(Node, Parts, fun partitions/1). - -await(Node, Res, Fun) -> - case Fun(Node) of - Res -> ok; - _ -> timer:sleep(100), - await(Node, Res, Fun) - end. - -is_running(Node) -> rpc:call(Node, rabbit, is_running, []). - -is_listening(Node) -> - case rpc:call(Node, rabbit_networking, node_listeners, [Node]) of - [] -> false; - [_|_] -> true; - _ -> false - end. - -enable_dist_proxy(Cfgs) -> - inet_tcp_proxy_manager:start_link(), - Nodes = [pget(node, Cfg) || Cfg <- Cfgs], - [ok = rpc:call(Node, inet_tcp_proxy, start, []) || Node <- Nodes], - [ok = rpc:call(Node, inet_tcp_proxy, reconnect, [Nodes]) || Node <- Nodes], - Cfgs. - -short_ticktime(Time) -> - fun (Cfgs) -> - [rpc(Cfg, net_kernel, set_net_ticktime, [Time, 0]) || Cfg <- Cfgs], - net_kernel:set_net_ticktime(Time, 0), - Cfgs - end. - -rpc(Cfg, M, F, A) -> - rpc:call(pget(node, Cfg), M, F, A). - -rpc(Cfg, M, F, A, T) -> - rpc:call(pget(node, Cfg), M, F, A, T). diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/rabbit_priority_queue_test.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/rabbit_priority_queue_test.erl deleted file mode 100644 index 44228ff..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/rabbit_priority_queue_test.erl +++ /dev/null @@ -1,335 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% - --module(rabbit_priority_queue_test). - --compile(export_all). --include_lib("eunit/include/eunit.hrl"). --include_lib("amqp_client/include/amqp_client.hrl"). - --import(rabbit_misc, [pget/2]). - -%% The BQ API is used in all sorts of places in all sorts of -%% ways. Therefore we have to jump through a few different hoops -%% in order to integration-test it. -%% -%% * start/1, stop/0, init/3, terminate/2, delete_and_terminate/2 -%% - starting and stopping rabbit. durable queues / persistent msgs needed -%% to test recovery -%% -%% * publish/5, drain_confirmed/1, fetch/2, ack/2, is_duplicate/2, msg_rates/1, -%% needs_timeout/1, timeout/1, invoke/3, resume/1 [0] -%% - regular publishing and consuming, with confirms and acks and durability -%% -%% * publish_delivered/4 - publish with acks straight through -%% * discard/3 - publish without acks straight through -%% * dropwhile/2 - expire messages without DLX -%% * fetchwhile/4 - expire messages with DLX -%% * ackfold/4 - reject messages with DLX -%% * requeue/2 - reject messages without DLX -%% * drop/2 - maxlen messages without DLX -%% * purge/1 - issue AMQP queue.purge -%% * purge_acks/1 - mirror queue explicit sync with unacked msgs -%% * fold/3 - mirror queue explicit sync -%% * depth/1 - mirror queue implicit sync detection -%% * len/1, is_empty/1 - info items -%% * handle_pre_hibernate/1 - hibernation -%% -%% * set_ram_duration_target/2, ram_duration/1, status/1 -%% - maybe need unit testing? -%% -%% [0] publish enough to get credit flow from msg store - -recovery_test() -> - {Conn, Ch} = open(), - Q = <<"test">>, - declare(Ch, Q, 3), - publish(Ch, Q, [1, 2, 3, 1, 2, 3, 1, 2, 3]), - amqp_connection:close(Conn), - - %% TODO these break coverage - rabbit:stop(), - rabbit:start(), - - {Conn2, Ch2} = open(), - get_all(Ch2, Q, do_ack, [3, 3, 3, 2, 2, 2, 1, 1, 1]), - delete(Ch2, Q), - amqp_connection:close(Conn2), - passed. - -simple_order_test() -> - {Conn, Ch} = open(), - Q = <<"test">>, - declare(Ch, Q, 3), - publish(Ch, Q, [1, 2, 3, 1, 2, 3, 1, 2, 3]), - get_all(Ch, Q, do_ack, [3, 3, 3, 2, 2, 2, 1, 1, 1]), - publish(Ch, Q, [2, 3, 1, 2, 3, 1, 2, 3, 1]), - get_all(Ch, Q, no_ack, [3, 3, 3, 2, 2, 2, 1, 1, 1]), - publish(Ch, Q, [3, 1, 2, 3, 1, 2, 3, 1, 2]), - get_all(Ch, Q, do_ack, [3, 3, 3, 2, 2, 2, 1, 1, 1]), - delete(Ch, Q), - amqp_connection:close(Conn), - passed. - -matching_test() -> - {Conn, Ch} = open(), - Q = <<"test">>, - declare(Ch, Q, 5), - %% We round priority down, and 0 is the default - publish(Ch, Q, [undefined, 0, 5, 10, undefined]), - get_all(Ch, Q, do_ack, [5, 10, undefined, 0, undefined]), - delete(Ch, Q), - amqp_connection:close(Conn), - passed. - -resume_test() -> - {Conn, Ch} = open(), - Q = <<"test">>, - declare(Ch, Q, 5), - amqp_channel:call(Ch, #'confirm.select'{}), - publish_many(Ch, Q, 10000), - amqp_channel:wait_for_confirms(Ch), - amqp_channel:call(Ch, #'queue.purge'{queue = Q}), %% Assert it exists - delete(Ch, Q), - amqp_connection:close(Conn), - passed. - -straight_through_test() -> - {Conn, Ch} = open(), - Q = <<"test">>, - declare(Ch, Q, 3), - [begin - consume(Ch, Q, Ack), - [begin - publish1(Ch, Q, P), - assert_delivered(Ch, Ack, P) - end || P <- [1, 2, 3]], - cancel(Ch) - end || Ack <- [do_ack, no_ack]], - get_empty(Ch, Q), - delete(Ch, Q), - amqp_connection:close(Conn), - passed. - -dropwhile_fetchwhile_test() -> - {Conn, Ch} = open(), - Q = <<"test">>, - [begin - declare(Ch, Q, Args ++ arguments(3)), - publish(Ch, Q, [1, 2, 3, 1, 2, 3, 1, 2, 3]), - timer:sleep(10), - get_empty(Ch, Q), - delete(Ch, Q) - end || - Args <- [[{<<"x-message-ttl">>, long, 1}], - [{<<"x-message-ttl">>, long, 1}, - {<<"x-dead-letter-exchange">>, longstr, <<"amq.fanout">>}] - ]], - amqp_connection:close(Conn), - passed. - -ackfold_test() -> - {Conn, Ch} = open(), - Q = <<"test">>, - Q2 = <<"test2">>, - declare(Ch, Q, - [{<<"x-dead-letter-exchange">>, longstr, <<>>}, - {<<"x-dead-letter-routing-key">>, longstr, Q2} - | arguments(3)]), - declare(Ch, Q2, none), - publish(Ch, Q, [1, 2, 3]), - [_, _, DTag] = get_all(Ch, Q, manual_ack, [3, 2, 1]), - amqp_channel:cast(Ch, #'basic.nack'{delivery_tag = DTag, - multiple = true, - requeue = false}), - timer:sleep(100), - get_all(Ch, Q2, do_ack, [3, 2, 1]), - delete(Ch, Q), - delete(Ch, Q2), - amqp_connection:close(Conn), - passed. - -requeue_test() -> - {Conn, Ch} = open(), - Q = <<"test">>, - declare(Ch, Q, 3), - publish(Ch, Q, [1, 2, 3]), - [_, _, DTag] = get_all(Ch, Q, manual_ack, [3, 2, 1]), - amqp_channel:cast(Ch, #'basic.nack'{delivery_tag = DTag, - multiple = true, - requeue = true}), - get_all(Ch, Q, do_ack, [3, 2, 1]), - delete(Ch, Q), - amqp_connection:close(Conn), - passed. - -drop_test() -> - {Conn, Ch} = open(), - Q = <<"test">>, - declare(Ch, Q, [{<<"x-max-length">>, long, 4} | arguments(3)]), - publish(Ch, Q, [1, 2, 3, 1, 2, 3, 1, 2, 3]), - %% We drop from the head, so this is according to the "spec" even - %% if not likely to be what the user wants. - get_all(Ch, Q, do_ack, [2, 1, 1, 1]), - delete(Ch, Q), - amqp_connection:close(Conn), - passed. - -purge_test() -> - {Conn, Ch} = open(), - Q = <<"test">>, - declare(Ch, Q, 3), - publish(Ch, Q, [1, 2, 3]), - amqp_channel:call(Ch, #'queue.purge'{queue = Q}), - get_empty(Ch, Q), - delete(Ch, Q), - amqp_connection:close(Conn), - passed. - -ram_duration_test() -> - QName = rabbit_misc:r(<<"/">>, queue, <<"pseudo">>), - Q0 = rabbit_amqqueue:pseudo_queue(QName, self()), - Q = Q0#amqqueue{arguments = [{<<"x-max-priority">>, long, 5}]}, - PQ = rabbit_priority_queue, - BQS1 = PQ:init(Q, new, fun(_, _) -> ok end), - {_Duration1, BQS2} = PQ:ram_duration(BQS1), - BQS3 = PQ:set_ram_duration_target(infinity, BQS2), - BQS4 = PQ:set_ram_duration_target(1, BQS3), - {_Duration2, BQS5} = PQ:ram_duration(BQS4), - PQ:delete_and_terminate(a_whim, BQS5), - passed. - -mirror_queue_sync_with() -> cluster_ab. -mirror_queue_sync([CfgA, _CfgB]) -> - Ch = pget(channel, CfgA), - Q = <<"test">>, - declare(Ch, Q, 3), - publish(Ch, Q, [1, 2, 3]), - ok = rabbit_test_util:set_ha_policy(CfgA, <<".*">>, <<"all">>), - publish(Ch, Q, [1, 2, 3, 1, 2, 3]), - %% master now has 9, slave 6. - get_partial(Ch, Q, manual_ack, [3, 3, 3, 2, 2, 2]), - %% So some but not all are unacked at the slave - rabbit_test_util:control_action(sync_queue, CfgA, [binary_to_list(Q)], - [{"-p", "/"}]), - wait_for_sync(CfgA, rabbit_misc:r(<<"/">>, queue, Q)), - passed. - -%%---------------------------------------------------------------------------- - -open() -> - {ok, Conn} = amqp_connection:start(#amqp_params_network{}), - {ok, Ch} = amqp_connection:open_channel(Conn), - {Conn, Ch}. - -declare(Ch, Q, Args) when is_list(Args) -> - amqp_channel:call(Ch, #'queue.declare'{queue = Q, - durable = true, - arguments = Args}); -declare(Ch, Q, Max) -> - declare(Ch, Q, arguments(Max)). - -delete(Ch, Q) -> - amqp_channel:call(Ch, #'queue.delete'{queue = Q}). - -publish(Ch, Q, Ps) -> - amqp_channel:call(Ch, #'confirm.select'{}), - [publish1(Ch, Q, P) || P <- Ps], - amqp_channel:wait_for_confirms(Ch). - -publish_many(_Ch, _Q, 0) -> ok; -publish_many( Ch, Q, N) -> publish1(Ch, Q, random:uniform(5)), - publish_many(Ch, Q, N - 1). - -publish1(Ch, Q, P) -> - amqp_channel:cast(Ch, #'basic.publish'{routing_key = Q}, - #amqp_msg{props = props(P), - payload = priority2bin(P)}). - -props(undefined) -> #'P_basic'{delivery_mode = 2}; -props(P) -> #'P_basic'{priority = P, - delivery_mode = 2}. - -consume(Ch, Q, Ack) -> - amqp_channel:subscribe(Ch, #'basic.consume'{queue = Q, - no_ack = Ack =:= no_ack, - consumer_tag = <<"ctag">>}, - self()), - receive - #'basic.consume_ok'{consumer_tag = <<"ctag">>} -> - ok - end. - -cancel(Ch) -> - amqp_channel:call(Ch, #'basic.cancel'{consumer_tag = <<"ctag">>}). - -assert_delivered(Ch, Ack, P) -> - PBin = priority2bin(P), - receive - {#'basic.deliver'{delivery_tag = DTag}, #amqp_msg{payload = PBin2}} -> - ?assertEqual(PBin, PBin2), - maybe_ack(Ch, Ack, DTag) - end. - -get_all(Ch, Q, Ack, Ps) -> - DTags = get_partial(Ch, Q, Ack, Ps), - get_empty(Ch, Q), - DTags. - -get_partial(Ch, Q, Ack, Ps) -> - [get_ok(Ch, Q, Ack, P) || P <- Ps]. - -get_empty(Ch, Q) -> - #'basic.get_empty'{} = amqp_channel:call(Ch, #'basic.get'{queue = Q}). - -get_ok(Ch, Q, Ack, P) -> - PBin = priority2bin(P), - {#'basic.get_ok'{delivery_tag = DTag}, #amqp_msg{payload = PBin2}} = - amqp_channel:call(Ch, #'basic.get'{queue = Q, - no_ack = Ack =:= no_ack}), - ?assertEqual(PBin, PBin2), - maybe_ack(Ch, Ack, DTag). - -maybe_ack(Ch, do_ack, DTag) -> - amqp_channel:cast(Ch, #'basic.ack'{delivery_tag = DTag}), - DTag; -maybe_ack(_Ch, _, DTag) -> - DTag. - -arguments(none) -> []; -arguments(Max) -> [{<<"x-max-priority">>, byte, Max}]. - -priority2bin(undefined) -> <<"undefined">>; -priority2bin(Int) -> list_to_binary(integer_to_list(Int)). - -%%---------------------------------------------------------------------------- - -wait_for_sync(Cfg, Q) -> - case synced(Cfg, Q) of - true -> ok; - false -> timer:sleep(100), - wait_for_sync(Cfg, Q) - end. - -synced(Cfg, Q) -> - Info = rpc:call(pget(node, Cfg), - rabbit_amqqueue, info_all, - [<<"/">>, [name, synchronised_slave_pids]]), - [SSPids] = [Pids || [{name, Q1}, {synchronised_slave_pids, Pids}] <- Info, - Q =:= Q1], - length(SSPids) =:= 1. - -%%---------------------------------------------------------------------------- diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/simple_ha.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/simple_ha.erl deleted file mode 100644 index 389ff23..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/simple_ha.erl +++ /dev/null @@ -1,143 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(simple_ha). - --compile(export_all). --include_lib("eunit/include/eunit.hrl"). --include_lib("amqp_client/include/amqp_client.hrl"). - --import(rabbit_test_util, [set_ha_policy/3, a2b/1]). --import(rabbit_misc, [pget/2]). - --define(CONFIG, [cluster_abc, ha_policy_all]). - -rapid_redeclare_with() -> [cluster_ab, ha_policy_all]. -rapid_redeclare([CfgA | _]) -> - Ch = pget(channel, CfgA), - Queue = <<"test">>, - [begin - amqp_channel:call(Ch, #'queue.declare'{queue = Queue, - durable = true}), - amqp_channel:call(Ch, #'queue.delete'{queue = Queue}) - end || _I <- lists:seq(1, 20)], - ok. - -%% Check that by the time we get a declare-ok back, the slaves are up -%% and in Mnesia. -declare_synchrony_with() -> [cluster_ab, ha_policy_all]. -declare_synchrony([Rabbit, Hare]) -> - RabbitCh = pget(channel, Rabbit), - HareCh = pget(channel, Hare), - Q = <<"mirrored-queue">>, - declare(RabbitCh, Q), - amqp_channel:call(RabbitCh, #'confirm.select'{}), - amqp_channel:cast(RabbitCh, #'basic.publish'{routing_key = Q}, - #amqp_msg{props = #'P_basic'{delivery_mode = 2}}), - amqp_channel:wait_for_confirms(RabbitCh), - _Rabbit2 = rabbit_test_configs:kill_node(Rabbit), - - #'queue.declare_ok'{message_count = 1} = declare(HareCh, Q), - ok. - -declare(Ch, Name) -> - amqp_channel:call(Ch, #'queue.declare'{durable = true, queue = Name}). - -consume_survives_stop_with() -> ?CONFIG. -consume_survives_sigkill_with() -> ?CONFIG. -consume_survives_policy_with() -> ?CONFIG. -auto_resume_with() -> ?CONFIG. -auto_resume_no_ccn_client_with() -> ?CONFIG. - -consume_survives_stop(Cf) -> consume_survives(Cf, fun stop/2, true). -consume_survives_sigkill(Cf) -> consume_survives(Cf, fun sigkill/2, true). -consume_survives_policy(Cf) -> consume_survives(Cf, fun policy/2, true). -auto_resume(Cf) -> consume_survives(Cf, fun sigkill/2, false). -auto_resume_no_ccn_client(Cf) -> consume_survives(Cf, fun sigkill/2, false, - false). - -confirms_survive_stop_with() -> ?CONFIG. -confirms_survive_sigkill_with() -> ?CONFIG. -confirms_survive_policy_with() -> ?CONFIG. - -confirms_survive_stop(Cf) -> confirms_survive(Cf, fun stop/2). -confirms_survive_sigkill(Cf) -> confirms_survive(Cf, fun sigkill/2). -confirms_survive_policy(Cf) -> confirms_survive(Cf, fun policy/2). - -%%---------------------------------------------------------------------------- - -consume_survives(Nodes, DeathFun, CancelOnFailover) -> - consume_survives(Nodes, DeathFun, CancelOnFailover, true). - -consume_survives([CfgA, CfgB, CfgC] = Nodes, - DeathFun, CancelOnFailover, CCNSupported) -> - Msgs = rabbit_test_configs:cover_work_factor(20000, CfgA), - Channel1 = pget(channel, CfgA), - Channel2 = pget(channel, CfgB), - Channel3 = pget(channel, CfgC), - - %% declare the queue on the master, mirrored to the two slaves - Queue = <<"test">>, - amqp_channel:call(Channel1, #'queue.declare'{queue = Queue, - auto_delete = false}), - - %% start up a consumer - ConsCh = case CCNSupported of - true -> Channel2; - false -> open_incapable_channel(pget(port, CfgB)) - end, - ConsumerPid = rabbit_ha_test_consumer:create( - ConsCh, Queue, self(), CancelOnFailover, Msgs), - - %% send a bunch of messages from the producer - ProducerPid = rabbit_ha_test_producer:create(Channel3, Queue, - self(), false, Msgs), - DeathFun(CfgA, Nodes), - %% verify that the consumer got all msgs, or die - the await_response - %% calls throw an exception if anything goes wrong.... - rabbit_ha_test_consumer:await_response(ConsumerPid), - rabbit_ha_test_producer:await_response(ProducerPid), - ok. - -confirms_survive([CfgA, CfgB, _CfgC] = Nodes, DeathFun) -> - Msgs = rabbit_test_configs:cover_work_factor(20000, CfgA), - Node1Channel = pget(channel, CfgA), - Node2Channel = pget(channel, CfgB), - - %% declare the queue on the master, mirrored to the two slaves - Queue = <<"test">>, - amqp_channel:call(Node1Channel,#'queue.declare'{queue = Queue, - auto_delete = false, - durable = true}), - - %% send a bunch of messages from the producer - ProducerPid = rabbit_ha_test_producer:create(Node2Channel, Queue, - self(), true, Msgs), - DeathFun(CfgA, Nodes), - rabbit_ha_test_producer:await_response(ProducerPid), - ok. - -stop(Cfg, _Cfgs) -> rabbit_test_util:kill_after(50, Cfg, stop). -sigkill(Cfg, _Cfgs) -> rabbit_test_util:kill_after(50, Cfg, sigkill). -policy(Cfg, [_|T]) -> Nodes = [a2b(pget(node, C)) || C <- T], - set_ha_policy(Cfg, <<".*">>, {<<"nodes">>, Nodes}). - -open_incapable_channel(NodePort) -> - Props = [{<<"capabilities">>, table, []}], - {ok, ConsConn} = - amqp_connection:start(#amqp_params_network{port = NodePort, - client_properties = Props}), - {ok, Ch} = amqp_connection:open_channel(ConsConn), - Ch. diff --git a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/sync_detection.erl b/rabbitmq-server/plugins-src/rabbitmq-test/test/src/sync_detection.erl deleted file mode 100644 index 18f6f5d..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-test/test/src/sync_detection.erl +++ /dev/null @@ -1,189 +0,0 @@ -%% The contents of this file are subject to the Mozilla Public License -%% Version 1.1 (the "License"); you may not use this file except in -%% compliance with the License. You may obtain a copy of the License -%% at http://www.mozilla.org/MPL/ -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and -%% limitations under the License. -%% -%% The Original Code is RabbitMQ. -%% -%% The Initial Developer of the Original Code is GoPivotal, Inc. -%% Copyright (c) 2007-2014 GoPivotal, Inc. All rights reserved. -%% --module(sync_detection). - --compile(export_all). --include_lib("eunit/include/eunit.hrl"). --include_lib("amqp_client/include/amqp_client.hrl"). - --import(rabbit_test_util, [stop_app/1, start_app/1]). --import(rabbit_misc, [pget/2]). - --define(LOOP_RECURSION_DELAY, 100). - -slave_synchronization_with() -> [cluster_ab, ha_policy_two_pos]. -slave_synchronization([Master, Slave]) -> - Channel = pget(channel, Master), - Queue = <<"ha.two.test">>, - #'queue.declare_ok'{} = - amqp_channel:call(Channel, #'queue.declare'{queue = Queue, - auto_delete = false}), - - %% The comments on the right are the queue length and the pending acks on - %% the master. - stop_app(Slave), - - %% We get and ack one message when the slave is down, and check that when we - %% start the slave it's not marked as synced until ack the message. We also - %% publish another message when the slave is up. - send_dummy_message(Channel, Queue), % 1 - 0 - {#'basic.get_ok'{delivery_tag = Tag1}, _} = - amqp_channel:call(Channel, #'basic.get'{queue = Queue}), % 0 - 1 - - start_app(Slave), - - slave_unsynced(Master, Queue), - send_dummy_message(Channel, Queue), % 1 - 1 - slave_unsynced(Master, Queue), - - amqp_channel:cast(Channel, #'basic.ack'{delivery_tag = Tag1}), % 1 - 0 - - slave_synced(Master, Queue), - - %% We restart the slave and we send a message, so that the slave will only - %% have one of the messages. - stop_app(Slave), - start_app(Slave), - - send_dummy_message(Channel, Queue), % 2 - 0 - - slave_unsynced(Master, Queue), - - %% We reject the message that the slave doesn't have, and verify that it's - %% still unsynced - {#'basic.get_ok'{delivery_tag = Tag2}, _} = - amqp_channel:call(Channel, #'basic.get'{queue = Queue}), % 1 - 1 - slave_unsynced(Master, Queue), - amqp_channel:cast(Channel, #'basic.reject'{ delivery_tag = Tag2, - requeue = true }), % 2 - 0 - slave_unsynced(Master, Queue), - {#'basic.get_ok'{delivery_tag = Tag3}, _} = - amqp_channel:call(Channel, #'basic.get'{queue = Queue}), % 1 - 1 - amqp_channel:cast(Channel, #'basic.ack'{delivery_tag = Tag3}), % 1 - 0 - slave_synced(Master, Queue), - {#'basic.get_ok'{delivery_tag = Tag4}, _} = - amqp_channel:call(Channel, #'basic.get'{queue = Queue}), % 0 - 1 - amqp_channel:cast(Channel, #'basic.ack'{delivery_tag = Tag4}), % 0 - 0 - slave_synced(Master, Queue). - -slave_synchronization_ttl_with() -> [cluster_abc, ha_policy_two_pos]. -slave_synchronization_ttl([Master, Slave, DLX]) -> - Channel = pget(channel, Master), - DLXChannel = pget(channel, DLX), - - %% We declare a DLX queue to wait for messages to be TTL'ed - DLXQueue = <<"dlx-queue">>, - #'queue.declare_ok'{} = - amqp_channel:call(Channel, #'queue.declare'{queue = DLXQueue, - auto_delete = false}), - - TestMsgTTL = 5000, - Queue = <<"ha.two.test">>, - %% Sadly we need fairly high numbers for the TTL because starting/stopping - %% nodes takes a fair amount of time. - Args = [{<<"x-message-ttl">>, long, TestMsgTTL}, - {<<"x-dead-letter-exchange">>, longstr, <<>>}, - {<<"x-dead-letter-routing-key">>, longstr, DLXQueue}], - #'queue.declare_ok'{} = - amqp_channel:call(Channel, #'queue.declare'{queue = Queue, - auto_delete = false, - arguments = Args}), - - slave_synced(Master, Queue), - - %% All unknown - stop_app(Slave), - send_dummy_message(Channel, Queue), - send_dummy_message(Channel, Queue), - start_app(Slave), - slave_unsynced(Master, Queue), - wait_for_messages(DLXQueue, DLXChannel, 2), - slave_synced(Master, Queue), - - %% 1 unknown, 1 known - stop_app(Slave), - send_dummy_message(Channel, Queue), - start_app(Slave), - slave_unsynced(Master, Queue), - send_dummy_message(Channel, Queue), - slave_unsynced(Master, Queue), - wait_for_messages(DLXQueue, DLXChannel, 2), - slave_synced(Master, Queue), - - %% %% both known - send_dummy_message(Channel, Queue), - send_dummy_message(Channel, Queue), - slave_synced(Master, Queue), - wait_for_messages(DLXQueue, DLXChannel, 2), - slave_synced(Master, Queue), - - ok. - -send_dummy_message(Channel, Queue) -> - Payload = <<"foo">>, - Publish = #'basic.publish'{exchange = <<>>, routing_key = Queue}, - amqp_channel:cast(Channel, Publish, #amqp_msg{payload = Payload}). - -slave_pids(Node, Queue) -> - {ok, Q} = rpc:call(Node, rabbit_amqqueue, lookup, - [rabbit_misc:r(<<"/">>, queue, Queue)]), - SSP = synchronised_slave_pids, - [{SSP, Pids}] = rpc:call(Node, rabbit_amqqueue, info, [Q, [SSP]]), - case Pids of - '' -> []; - _ -> Pids - end. - -%% The mnesia syncronization takes a while, but we don't want to wait for the -%% test to fail, since the timetrap is quite high. -wait_for_sync_status(Status, Cfg, Queue) -> - Max = 10000 / ?LOOP_RECURSION_DELAY, - wait_for_sync_status(0, Max, Status, pget(node, Cfg), Queue). - -wait_for_sync_status(N, Max, Status, Node, Queue) when N >= Max -> - erlang:error({sync_status_max_tries_failed, - [{queue, Queue}, - {node, Node}, - {expected_status, Status}, - {max_tried, Max}]}); -wait_for_sync_status(N, Max, Status, Node, Queue) -> - Synced = length(slave_pids(Node, Queue)) =:= 1, - case Synced =:= Status of - true -> ok; - false -> timer:sleep(?LOOP_RECURSION_DELAY), - wait_for_sync_status(N + 1, Max, Status, Node, Queue) - end. - -slave_synced(Cfg, Queue) -> - wait_for_sync_status(true, Cfg, Queue). - -slave_unsynced(Cfg, Queue) -> - wait_for_sync_status(false, Cfg, Queue). - -wait_for_messages(Queue, Channel, N) -> - Sub = #'basic.consume'{queue = Queue}, - #'basic.consume_ok'{consumer_tag = CTag} = amqp_channel:call(Channel, Sub), - receive - #'basic.consume_ok'{} -> ok - end, - lists:foreach( - fun (_) -> receive - {#'basic.deliver'{delivery_tag = Tag}, _Content} -> - amqp_channel:cast(Channel, - #'basic.ack'{delivery_tag = Tag}) - end - end, lists:seq(1, N)), - amqp_channel:call(Channel, #'basic.cancel'{consumer_tag = CTag}). diff --git a/rabbitmq-server/plugins-src/rabbitmq-tracing/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-tracing/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-tracing/Makefile b/rabbitmq-server/plugins-src/rabbitmq-tracing/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-tracing/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-tracing/README b/rabbitmq-server/plugins-src/rabbitmq-tracing/README deleted file mode 100644 index 61b5ae4..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-tracing/README +++ /dev/null @@ -1,42 +0,0 @@ -An opinionated tracing plugin for RabbitMQ management. Build it like -any other plugin. After installation you should see a "Tracing" tab in -the management UI. Hopefully use is obvious. - -Configuration -============= - -There is one configuration option: - -directory: This controls where the log files go. It defaults to -"/var/tmp/rabbitmq-tracing". - -Performance -=========== - -On my workstation, rabbitmq-tracing can write about 2000 msg/s to a -log file. You should be careful using rabbitmq-tracing if you think -you're going to capture more messages than this. Of course, any -messages that can't be logged are queued. - -The code to serve up the log files over HTTP is pretty dumb, it loads -the whole log into memory. If you have large log files you may wish -to transfer them off the server in some other way. - -HTTP API -======== - -GET /api/traces -GET /api/traces/ -GET PUT DELETE /api/traces// -GET /api/trace-files -GET DELETE /api/trace-files/ (GET returns the file as text/plain, - not JSON describing it.) - -Example for how to create a trace: - -$ curl -i -u guest:guest -H "content-type:application/json" -XPUT \ - http://localhost:55672/api/traces/%2f/my-trace \ - -d'{"format":"text","pattern":"#", "max_payload_bytes":1000}' - -max_payload_bytes is optional (omit it to prevent payload truncation), -format and pattern are mandatory. \ No newline at end of file diff --git a/rabbitmq-server/plugins-src/rabbitmq-tracing/package.mk b/rabbitmq-server/plugins-src/rabbitmq-tracing/package.mk deleted file mode 100644 index 58341bb..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-tracing/package.mk +++ /dev/null @@ -1,8 +0,0 @@ -RELEASABLE:=true -DEPS:=rabbitmq-management -WITH_BROKER_TEST_COMMANDS:=eunit:test(rabbit_tracing_test,[verbose]) - -CONSTRUCT_APP_PREREQS:=$(shell find $(PACKAGE_DIR)/priv -type f) -define construct_app_commands - cp -r $(PACKAGE_DIR)/priv $(APP_DIR) -endef diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/CONTRIBUTING.md deleted file mode 100644 index 69a4b4a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/CONTRIBUTING.md +++ /dev/null @@ -1,51 +0,0 @@ -## Overview - -RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions. -Pull requests is the primary place of discussing code changes. - -## How to Contribute - -The process is fairly standard: - - * Fork the repository or repositories you plan on contributing to - * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella) - * `cd umbrella`, `make co` - * Create a branch with a descriptive name in the relevant repositories - * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork - * Submit pull requests with an explanation what has been changed and **why** - * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below) - * Be patient. We will get to your pull request eventually - -If what you are going to work on is a substantial change, please first ask the core team -of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users). - - -## (Brief) Code of Conduct - -In one line: don't be a dick. - -Be respectful to the maintainers and other contributors. Open source -contributors put long hours into developing projects and doing user -support. Those projects and user support are available for free. We -believe this deserves some respect. - -Be respectful to people of all races, genders, religious beliefs and -political views. Regardless of how brilliant a pull request is -technically, we will not tolerate disrespectful or aggressive -behaviour. - -Contributors who violate this straightforward Code of Conduct will see -their pull requests closed and locked. - - -## Contributor Agreement - -If you want to contribute a non-trivial change, please submit a signed copy of our -[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time -you submit your pull request. This will make it much easier (in some cases, possible) -for the RabbitMQ team at Pivotal to merge your contribution. - - -## Where to Ask Questions - -If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users). diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/Makefile b/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/package.mk b/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/package.mk deleted file mode 100644 index d5913ca..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-web-dispatch/package.mk +++ /dev/null @@ -1,3 +0,0 @@ -DEPS:=mochiweb-wrapper webmachine-wrapper -WITH_BROKER_TEST_COMMANDS:=rabbit_web_dispatch_test:test() -STANDALONE_TEST_COMMANDS:=rabbit_web_dispatch_test_unit:test() diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/.srcdist_done b/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/.srcdist_done deleted file mode 100644 index e69de29..0000000 diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/CONTRIBUTING.md b/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/CONTRIBUTING.md deleted file mode 100644 index 69a4b4a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/CONTRIBUTING.md +++ /dev/null @@ -1,51 +0,0 @@ -## Overview - -RabbitMQ projects use pull requests to discuss, collaborate on and accept code contributions. -Pull requests is the primary place of discussing code changes. - -## How to Contribute - -The process is fairly standard: - - * Fork the repository or repositories you plan on contributing to - * Clone [RabbitMQ umbrella repository](https://github.com/rabbitmq/rabbitmq-public-umbrella) - * `cd umbrella`, `make co` - * Create a branch with a descriptive name in the relevant repositories - * Make your changes, run tests, commit with a [descriptive message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html), push to your fork - * Submit pull requests with an explanation what has been changed and **why** - * Submit a filled out and signed [Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) if needed (see below) - * Be patient. We will get to your pull request eventually - -If what you are going to work on is a substantial change, please first ask the core team -of their opinion on [RabbitMQ mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users). - - -## (Brief) Code of Conduct - -In one line: don't be a dick. - -Be respectful to the maintainers and other contributors. Open source -contributors put long hours into developing projects and doing user -support. Those projects and user support are available for free. We -believe this deserves some respect. - -Be respectful to people of all races, genders, religious beliefs and -political views. Regardless of how brilliant a pull request is -technically, we will not tolerate disrespectful or aggressive -behaviour. - -Contributors who violate this straightforward Code of Conduct will see -their pull requests closed and locked. - - -## Contributor Agreement - -If you want to contribute a non-trivial change, please submit a signed copy of our -[Contributor Agreement](https://github.com/rabbitmq/ca#how-to-submit) around the time -you submit your pull request. This will make it much easier (in some cases, possible) -for the RabbitMQ team at Pivotal to merge your contribution. - - -## Where to Ask Questions - -If something isn't clear, feel free to ask on our [mailing list](https://groups.google.com/forum/#!forum/rabbitmq-users). diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/Makefile b/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/Makefile deleted file mode 100644 index 482105a..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/Makefile +++ /dev/null @@ -1 +0,0 @@ -include ../umbrella.mk diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/package.mk b/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/package.mk deleted file mode 100644 index 01e3b9d..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/package.mk +++ /dev/null @@ -1,6 +0,0 @@ -RELEASABLE:=true -DEPS:=rabbitmq-web-dispatch rabbitmq-web-stomp rabbitmq-server - -define construct_app_commands - cp -r $(PACKAGE_DIR)/priv $(APP_DIR) -endef diff --git a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/sockjs-0.3.js b/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/sockjs-0.3.js deleted file mode 100644 index 585215c..0000000 --- a/rabbitmq-server/plugins-src/rabbitmq-web-stomp-examples/priv/sockjs-0.3.js +++ /dev/null @@ -1,2379 +0,0 @@ -/* SockJS client, version 0.3.4, http://sockjs.org, MIT License - -Copyright (c) 2011-2012 VMware, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -*/ - -// JSON2 by Douglas Crockford (minified). -var JSON;JSON||(JSON={}),function(){function str(a,b){var c,d,e,f,g=gap,h,i=b[a];i&&typeof i=="object"&&typeof i.toJSON=="function"&&(i=i.toJSON(a)),typeof rep=="function"&&(i=rep.call(b,a,i));switch(typeof i){case"string":return quote(i);case"number":return isFinite(i)?String(i):"null";case"boolean":case"null":return String(i);case"object":if(!i)return"null";gap+=indent,h=[];if(Object.prototype.toString.apply(i)==="[object Array]"){f=i.length;for(c=0;c 1) { - this._listeners[eventType] = arr.slice(0, idx).concat( arr.slice(idx+1) ); - } else { - delete this._listeners[eventType]; - } - return; - } - return; -}; - -REventTarget.prototype.dispatchEvent = function (event) { - var t = event.type; - var args = Array.prototype.slice.call(arguments, 0); - if (this['on'+t]) { - this['on'+t].apply(this, args); - } - if (this._listeners && t in this._listeners) { - for(var i=0; i < this._listeners[t].length; i++) { - this._listeners[t][i].apply(this, args); - } - } -}; -// [*] End of lib/reventtarget.js - - -// [*] Including lib/simpleevent.js -/* - * ***** BEGIN LICENSE BLOCK ***** - * Copyright (c) 2011-2012 VMware, Inc. - * - * For the license see COPYING. - * ***** END LICENSE BLOCK ***** - */ - -var SimpleEvent = function(type, obj) { - this.type = type; - if (typeof obj !== 'undefined') { - for(var k in obj) { - if (!obj.hasOwnProperty(k)) continue; - this[k] = obj[k]; - } - } -}; - -SimpleEvent.prototype.toString = function() { - var r = []; - for(var k in this) { - if (!this.hasOwnProperty(k)) continue; - var v = this[k]; - if (typeof v === 'function') v = '[function]'; - r.push(k + '=' + v); - } - return 'SimpleEvent(' + r.join(', ') + ')'; -}; -// [*] End of lib/simpleevent.js - - -// [*] Including lib/eventemitter.js -/* - * ***** BEGIN LICENSE BLOCK ***** - * Copyright (c) 2011-2012 VMware, Inc. - * - * For the license see COPYING. - * ***** END LICENSE BLOCK ***** - */ - -var EventEmitter = function(events) { - var that = this; - that._events = events || []; - that._listeners = {}; -}; -EventEmitter.prototype.emit = function(type) { - var that = this; - that._verifyType(type); - if (that._nuked) return; - - var args = Array.prototype.slice.call(arguments, 1); - if (that['on'+type]) { - that['on'+type].apply(that, args); - } - if (type in that._listeners) { - for(var i = 0; i < that._listeners[type].length; i++) { - that._listeners[type][i].apply(that, args); - } - } -}; - -EventEmitter.prototype.on = function(type, callback) { - var that = this; - that._verifyType(type); - if (that._nuked) return; - - if (!(type in that._listeners)) { - that._listeners[type] = []; - } - that._listeners[type].push(callback); -}; - -EventEmitter.prototype._verifyType = function(type) { - var that = this; - if (utils.arrIndexOf(that._events, type) === -1) { - utils.log('Event ' + JSON.stringify(type) + - ' not listed ' + JSON.stringify(that._events) + - ' in ' + that); - } -}; - -EventEmitter.prototype.nuke = function() { - var that = this; - that._nuked = true; - for(var i=0; i= 3000 && code <= 4999); -}; - -// See: http://www.erg.abdn.ac.uk/~gerrit/dccp/notes/ccid2/rto_estimator/ -// and RFC 2988. -utils.countRTO = function (rtt) { - var rto; - if (rtt > 100) { - rto = 3 * rtt; // rto > 300msec - } else { - rto = rtt + 200; // 200msec < rto <= 300msec - } - return rto; -} - -utils.log = function() { - if (_window.console && console.log && console.log.apply) { - console.log.apply(console, arguments); - } -}; - -utils.bind = function(fun, that) { - if (fun.bind) { - return fun.bind(that); - } else { - return function() { - return fun.apply(that, arguments); - }; - } -}; - -utils.flatUrl = function(url) { - return url.indexOf('?') === -1 && url.indexOf('#') === -1; -}; - -utils.amendUrl = function(url) { - var dl = _document.location; - if (!url) { - throw new Error('Wrong url for SockJS'); - } - if (!utils.flatUrl(url)) { - throw new Error('Only basic urls are supported in SockJS'); - } - - // '//abc' --> 'http://abc' - if (url.indexOf('//') === 0) { - url = dl.protocol + url; - } - // '/abc' --> 'http://localhost:80/abc' - if (url.indexOf('/') === 0) { - url = dl.protocol + '//' + dl.host + url; - } - // strip trailing slashes - url = url.replace(/[/]+$/,''); - return url; -}; - -// IE doesn't support [].indexOf. -utils.arrIndexOf = function(arr, obj){ - for(var i=0; i < arr.length; i++){ - if(arr[i] === obj){ - return i; - } - } - return -1; -}; - -utils.arrSkip = function(arr, obj) { - var idx = utils.arrIndexOf(arr, obj); - if (idx === -1) { - return arr.slice(); - } else { - var dst = arr.slice(0, idx); - return dst.concat(arr.slice(idx+1)); - } -}; - -// Via: https://gist.github.com/1133122/2121c601c5549155483f50be3da5305e83b8c5df -utils.isArray = Array.isArray || function(value) { - return {}.toString.call(value).indexOf('Array') >= 0 -}; - -utils.delay = function(t, fun) { - if(typeof t === 'function') { - fun = t; - t = 0; - } - return setTimeout(fun, t); -}; - - -// Chars worth escaping, as defined by Douglas Crockford: -// https://github.com/douglascrockford/JSON-js/blob/47a9882cddeb1e8529e07af9736218075372b8ac/json2.js#L196 -var json_escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, - json_lookup = { -"\u0000":"\\u0000","\u0001":"\\u0001","\u0002":"\\u0002","\u0003":"\\u0003", -"\u0004":"\\u0004","\u0005":"\\u0005","\u0006":"\\u0006","\u0007":"\\u0007", -"\b":"\\b","\t":"\\t","\n":"\\n","\u000b":"\\u000b","\f":"\\f","\r":"\\r", -"\u000e":"\\u000e","\u000f":"\\u000f","\u0010":"\\u0010","\u0011":"\\u0011", -"\u0012":"\\u0012","\u0013":"\\u0013","\u0014":"\\u0014","\u0015":"\\u0015", -"\u0016":"\\u0016","\u0017":"\\u0017","\u0018":"\\u0018","\u0019":"\\u0019", -"\u001a":"\\u001a","\u001b":"\\u001b","\u001c":"\\u001c","\u001d":"\\u001d", -"\u001e":"\\u001e","\u001f":"\\u001f","\"":"\\\"","\\":"\\\\", -"\u007f":"\\u007f","\u0080":"\\u0080","\u0081":"\\u0081","\u0082":"\\u0082", -"\u0083":"\\u0083","\u0084":"\\u0084","\u0085":"\\u0085","\u0086":"\\u0086", -"\u0087":"\\u0087","\u0088":"\\u0088","\u0089":"\\u0089","\u008a":"\\u008a", -"\u008b":"\\u008b","\u008c":"\\u008c","\u008d":"\\u008d","\u008e":"\\u008e", -"\u008f":"\\u008f","\u0090":"\\u0090","\u0091":"\\u0091","\u0092":"\\u0092", -"\u0093":"\\u0093","\u0094":"\\u0094","\u0095":"\\u0095","\u0096":"\\u0096", -"\u0097":"\\u0097","\u0098":"\\u0098","\u0099":"\\u0099","\u009a":"\\u009a", -"\u009b":"\\u009b","\u009c":"\\u009c","\u009d":"\\u009d","\u009e":"\\u009e", -"\u009f":"\\u009f","\u00ad":"\\u00ad","\u0600":"\\u0600","\u0601":"\\u0601", -"\u0602":"\\u0602","\u0603":"\\u0603","\u0604":"\\u0604","\u070f":"\\u070f", -"\u17b4":"\\u17b4","\u17b5":"\\u17b5","\u200c":"\\u200c","\u200d":"\\u200d", -"\u200e":"\\u200e","\u200f":"\\u200f","\u2028":"\\u2028","\u2029":"\\u2029", -"\u202a":"\\u202a","\u202b":"\\u202b","\u202c":"\\u202c","\u202d":"\\u202d", -"\u202e":"\\u202e","\u202f":"\\u202f","\u2060":"\\u2060","\u2061":"\\u2061", -"\u2062":"\\u2062","\u2063":"\\u2063","\u2064":"\\u2064","\u2065":"\\u2065", -"\u2066":"\\u2066","\u2067":"\\u2067","\u2068":"\\u2068","\u2069":"\\u2069", -"\u206a":"\\u206a","\u206b":"\\u206b","\u206c":"\\u206c","\u206d":"\\u206d", -"\u206e":"\\u206e","\u206f":"\\u206f","\ufeff":"\\ufeff","\ufff0":"\\ufff0", -"\ufff1":"\\ufff1","\ufff2":"\\ufff2","\ufff3":"\\ufff3","\ufff4":"\\ufff4", -"\ufff5":"\\ufff5","\ufff6":"\\ufff6","\ufff7":"\\ufff7","\ufff8":"\\ufff8", -"\ufff9":"\\ufff9","\ufffa":"\\ufffa","\ufffb":"\\ufffb","\ufffc":"\\ufffc", -"\ufffd":"\\ufffd","\ufffe":"\\ufffe","\uffff":"\\uffff"}; - -// Some extra characters that Chrome gets wrong, and substitutes with -// something else on the wire. -var extra_escapable = /[\x00-\x1f\ud800-\udfff\ufffe\uffff\u0300-\u0333\u033d-\u0346\u034a-\u034c\u0350-\u0352\u0357-\u0358\u035c-\u0362\u0374\u037e\u0387\u0591-\u05af\u05c4\u0610-\u0617\u0653-\u0654\u0657-\u065b\u065d-\u065e\u06df-\u06e2\u06eb-\u06ec\u0730\u0732-\u0733\u0735-\u0736\u073a\u073d\u073f-\u0741\u0743\u0745\u0747\u07eb-\u07f1\u0951\u0958-\u095f\u09dc-\u09dd\u09df\u0a33\u0a36\u0a59-\u0a5b\u0a5e\u0b5c-\u0b5d\u0e38-\u0e39\u0f43\u0f4d\u0f52\u0f57\u0f5c\u0f69\u0f72-\u0f76\u0f78\u0f80-\u0f83\u0f93\u0f9d\u0fa2\u0fa7\u0fac\u0fb9\u1939-\u193a\u1a17\u1b6b\u1cda-\u1cdb\u1dc0-\u1dcf\u1dfc\u1dfe\u1f71\u1f73\u1f75\u1f77\u1f79\u1f7b\u1f7d\u1fbb\u1fbe\u1fc9\u1fcb\u1fd3\u1fdb\u1fe3\u1feb\u1fee-\u1fef\u1ff9\u1ffb\u1ffd\u2000-\u2001\u20d0-\u20d1\u20d4-\u20d7\u20e7-\u20e9\u2126\u212a-\u212b\u2329-\u232a\u2adc\u302b-\u302c\uaab2-\uaab3\uf900-\ufa0d\ufa10\ufa12\ufa15-\ufa1e\ufa20\ufa22\ufa25-\ufa26\ufa2a-\ufa2d\ufa30-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufb4e\ufff0-\uffff]/g, - extra_lookup; - -// JSON Quote string. Use native implementation when possible. -var JSONQuote = (JSON && JSON.stringify) || function(string) { - json_escapable.lastIndex = 0; - if (json_escapable.test(string)) { - string = string.replace(json_escapable, function(a) { - return json_lookup[a]; - }); - } - return '"' + string + '"'; -}; - -// This may be quite slow, so let's delay until user actually uses bad -// characters. -var unroll_lookup = function(escapable) { - var i; - var unrolled = {} - var c = [] - for(i=0; i<65536; i++) { - c.push( String.fromCharCode(i) ); - } - escapable.lastIndex = 0; - c.join('').replace(escapable, function (a) { - unrolled[ a ] = '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); - return ''; - }); - escapable.lastIndex = 0; - return unrolled; -}; - -// Quote string, also taking care of unicode characters that browsers -// often break. Especially, take care of unicode surrogates: -// http://en.wikipedia.org/wiki/Mapping_of_Unicode_characters#Surrogates -utils.quote = function(string) { - var quoted = JSONQuote(string); - - // In most cases this should be very fast and good enough. - extra_escapable.lastIndex = 0; - if(!extra_escapable.test(quoted)) { - return quoted; - } - - if(!extra_lookup) extra_lookup = unroll_lookup(extra_escapable); - - return quoted.replace(extra_escapable, function(a) { - return extra_lookup[a]; - }); -} - -var _all_protocols = ['websocket', - 'xdr-streaming', - 'xhr-streaming', - 'iframe-eventsource', - 'iframe-htmlfile', - 'xdr-polling', - 'xhr-polling', - 'iframe-xhr-polling', - 'jsonp-polling']; - -utils.probeProtocols = function() { - var probed = {}; - for(var i=0; i<_all_protocols.length; i++) { - var protocol = _all_protocols[i]; - // User can have a typo in protocol name. - probed[protocol] = SockJS[protocol] && - SockJS[protocol].enabled(); - } - return probed; -}; - -utils.detectProtocols = function(probed, protocols_whitelist, info) { - var pe = {}, - protocols = []; - if (!protocols_whitelist) protocols_whitelist = _all_protocols; - for(var i=0; i 0) { - maybe_push(protos); - } - } - } - - // 1. Websocket - if (info.websocket !== false) { - maybe_push(['websocket']); - } - - // 2. Streaming - if (pe['xhr-streaming'] && !info.null_origin) { - protocols.push('xhr-streaming'); - } else { - if (pe['xdr-streaming'] && !info.cookie_needed && !info.null_origin) { - protocols.push('xdr-streaming'); - } else { - maybe_push(['iframe-eventsource', - 'iframe-htmlfile']); - } - } - - // 3. Polling - if (pe['xhr-polling'] && !info.null_origin) { - protocols.push('xhr-polling'); - } else { - if (pe['xdr-polling'] && !info.cookie_needed && !info.null_origin) { - protocols.push('xdr-polling'); - } else { - maybe_push(['iframe-xhr-polling', - 'jsonp-polling']); - } - } - return protocols; -} -// [*] End of lib/utils.js - - -// [*] Including lib/dom.js -/* - * ***** BEGIN LICENSE BLOCK ***** - * Copyright (c) 2011-2012 VMware, Inc. - * - * For the license see COPYING. - * ***** END LICENSE BLOCK ***** - */ - -// May be used by htmlfile jsonp and transports. -var MPrefix = '_sockjs_global'; -utils.createHook = function() { - var window_id = 'a' + utils.random_string(8); - if (!(MPrefix in _window)) { - var map = {}; - _window[MPrefix] = function(window_id) { - if (!(window_id in map)) { - map[window_id] = { - id: window_id, - del: function() {delete map[window_id];} - }; - } - return map[window_id]; - } - } - return _window[MPrefix](window_id); -}; - - - -utils.attachMessage = function(listener) { - utils.attachEvent('message', listener); -}; -utils.attachEvent = function(event, listener) { - if (typeof _window.addEventListener !== 'undefined') { - _window.addEventListener(event, listener, false); - } else { - // IE quirks. - // According to: http://stevesouders.com/misc/test-postmessage.php - // the message gets delivered only to 'document', not 'window'. - _document.attachEvent("on" + event, listener); - // I get 'window' for ie8. - _window.attachEvent("on" + event, listener); - } -}; - -utils.detachMessage = function(listener) { - utils.detachEvent('message', listener); -}; -utils.detachEvent = function(event, listener) { - if (typeof _window.addEventListener !== 'undefined') { - _window.removeEventListener(event, listener, false); - } else { - _document.detachEvent("on" + event, listener); - _window.detachEvent("on" + event, listener); - } -}; - - -var on_unload = {}; -// Things registered after beforeunload are to be called immediately. -var after_unload = false; - -var trigger_unload_callbacks = function() { - for(var ref in on_unload) { - on_unload[ref](); - delete on_unload[ref]; - }; -}; - -var unload_triggered = function() { - if(after_unload) return; - after_unload = true; - trigger_unload_callbacks(); -}; - -// 'unload' alone is not reliable in opera within an iframe, but we -// can't use `beforeunload` as IE fires it on javascript: links. -utils.attachEvent('unload', unload_triggered); - -utils.unload_add = function(listener) { - var ref = utils.random_string(8); - on_unload[ref] = listener; - if (after_unload) { - utils.delay(trigger_unload_callbacks); - } - return ref; -}; -utils.unload_del = function(ref) { - if (ref in on_unload) - delete on_unload[ref]; -}; - - -utils.createIframe = function (iframe_url, error_callback) { - var iframe = _document.createElement('iframe'); - var tref, unload_ref; - var unattach = function() { - clearTimeout(tref); - // Explorer had problems with that. - try {iframe.onload = null;} catch (x) {} - iframe.onerror = null; - }; - var cleanup = function() { - if (iframe) { - unattach(); - // This timeout makes chrome fire onbeforeunload event - // within iframe. Without the timeout it goes straight to - // onunload. - setTimeout(function() { - if(iframe) { - iframe.parentNode.removeChild(iframe); - } - iframe = null; - }, 0); - utils.unload_del(unload_ref); - } - }; - var onerror = function(r) { - if (iframe) { - cleanup(); - error_callback(r); - } - }; - var post = function(msg, origin) { - try { - // When the iframe is not loaded, IE raises an exception - // on 'contentWindow'. - if (iframe && iframe.contentWindow) { - iframe.contentWindow.postMessage(msg, origin); - } - } catch (x) {}; - }; - - iframe.src = iframe_url; - iframe.style.display = 'none'; - iframe.style.position = 'absolute'; - iframe.onerror = function(){onerror('onerror');}; - iframe.onload = function() { - // `onload` is triggered before scripts on the iframe are - // executed. Give it few seconds to actually load stuff. - clearTimeout(tref); - tref = setTimeout(function(){onerror('onload timeout');}, 2000); - }; - _document.body.appendChild(iframe); - tref = setTimeout(function(){onerror('timeout');}, 15000); - unload_ref = utils.unload_add(cleanup); - return { - post: post, - cleanup: cleanup, - loaded: unattach - }; -}; - -utils.createHtmlfile = function (iframe_url, error_callback) { - var doc = new ActiveXObject('htmlfile'); - var tref, unload_ref; - var iframe; - var unattach = function() { - clearTimeout(tref); - }; - var cleanup = function() { - if (doc) { - unattach(); - utils.unload_del(unload_ref); - iframe.parentNode.removeChild(iframe); - iframe = doc = null; - CollectGarbage(); - } - }; - var onerror = function(r) { - if (doc) { - cleanup(); - error_callback(r); - } - }; - var post = function(msg, origin) { - try { - // When the iframe is not loaded, IE raises an exception - // on 'contentWindow'. - if (iframe && iframe.contentWindow) { - iframe.contentWindow.postMessage(msg, origin); - } - } catch (x) {}; - }; - - doc.open(); - doc.write('' + - 'document.domain="' + document.domain + '";' + - ''); - doc.close(); - doc.parentWindow[WPrefix] = _window[WPrefix]; - var c = doc.createElement('div'); - doc.body.appendChild(c); - iframe = doc.createElement('iframe'); - c.appendChild(iframe); - iframe.src = iframe_url; - tref = setTimeout(function(){onerror('timeout');}, 15000); - unload_ref = utils.unload_add(cleanup); - return { - post: post, - cleanup: cleanup, - loaded: unattach - }; -}; -// [*] End of lib/dom.js - - -// [*] Including lib/dom2.js -/* - * ***** BEGIN LICENSE BLOCK ***** - * Copyright (c) 2011-2012 VMware, Inc. - * - * For the license see COPYING. - * ***** END LICENSE BLOCK ***** - */ - -var AbstractXHRObject = function(){}; -AbstractXHRObject.prototype = new EventEmitter(['chunk', 'finish']); - -AbstractXHRObject.prototype._start = function(method, url, payload, opts) { - var that = this; - - try { - that.xhr = new XMLHttpRequest(); - } catch(x) {}; - - if (!that.xhr) { - try { - that.xhr = new _window.ActiveXObject('Microsoft.XMLHTTP'); - } catch(x) {}; - } - if (_window.ActiveXObject || _window.XDomainRequest) { - // IE8 caches even POSTs - url += ((url.indexOf('?') === -1) ? '?' : '&') + 't='+(+new Date); - } - - // Explorer tends to keep connection open, even after the - // tab gets closed: http://bugs.jquery.com/ticket/5280 - that.unload_ref = utils.unload_add(function(){that._cleanup(true);}); - try { - that.xhr.open(method, url, true); - } catch(e) { - // IE raises an exception on wrong port. - that.emit('finish', 0, ''); - that._cleanup(); - return; - }; - - if (!opts || !opts.no_credentials) { - // Mozilla docs says https://developer.mozilla.org/en/XMLHttpRequest : - // "This never affects same-site requests." - that.xhr.withCredentials = 'true'; - } - if (opts && opts.headers) { - for(var key in opts.headers) { - that.xhr.setRequestHeader(key, opts.headers[key]); - } - } - - that.xhr.onreadystatechange = function() { - if (that.xhr) { - var x = that.xhr; - switch (x.readyState) { - case 3: - // IE doesn't like peeking into responseText or status - // on Microsoft.XMLHTTP and readystate=3 - try { - var status = x.status; - var text = x.responseText; - } catch (x) {}; - // IE returns 1223 for 204: http://bugs.jquery.com/ticket/1450 - if (status === 1223) status = 204; - - // IE does return readystate == 3 for 404 answers. - if (text && text.length > 0) { - that.emit('chunk', status, text); - } - break; - case 4: - var status = x.status; - // IE returns 1223 for 204: http://bugs.jquery.com/ticket/1450 - if (status === 1223) status = 204; - - that.emit('finish', status, x.responseText); - that._cleanup(false); - break; - } - } - }; - that.xhr.send(payload); -}; - -AbstractXHRObject.prototype._cleanup = function(abort) { - var that = this; - if (!that.xhr) return; - utils.unload_del(that.unload_ref); - - // IE needs this field to be a function - that.xhr.onreadystatechange = function(){}; - - if (abort) { - try { - that.xhr.abort(); - } catch(x) {}; - } - that.unload_ref = that.xhr = null; -}; - -AbstractXHRObject.prototype.close = function() { - var that = this; - that.nuke(); - that._cleanup(true); -}; - -var XHRCorsObject = utils.XHRCorsObject = function() { - var that = this, args = arguments; - utils.delay(function(){that._start.apply(that, args);}); -}; -XHRCorsObject.prototype = new AbstractXHRObject(); - -var XHRLocalObject = utils.XHRLocalObject = function(method, url, payload) { - var that = this; - utils.delay(function(){ - that._start(method, url, payload, { - no_credentials: true - }); - }); -}; -XHRLocalObject.prototype = new AbstractXHRObject(); - - - -// References: -// http://ajaxian.com/archives/100-line-ajax-wrapper -// http://msdn.microsoft.com/en-us/library/cc288060(v=VS.85).aspx -var XDRObject = utils.XDRObject = function(method, url, payload) { - var that = this; - utils.delay(function(){that._start(method, url, payload);}); -}; -XDRObject.prototype = new EventEmitter(['chunk', 'finish']); -XDRObject.prototype._start = function(method, url, payload) { - var that = this; - var xdr = new XDomainRequest(); - // IE caches even POSTs - url += ((url.indexOf('?') === -1) ? '?' : '&') + 't='+(+new Date); - - var onerror = xdr.ontimeout = xdr.onerror = function() { - that.emit('finish', 0, ''); - that._cleanup(false); - }; - xdr.onprogress = function() { - that.emit('chunk', 200, xdr.responseText); - }; - xdr.onload = function() { - that.emit('finish', 200, xdr.responseText); - that._cleanup(false); - }; - that.xdr = xdr; - that.unload_ref = utils.unload_add(function(){that._cleanup(true);}); - try { - // Fails with AccessDenied if port number is bogus - that.xdr.open(method, url); - that.xdr.send(payload); - } catch(x) { - onerror(); - } -}; - -XDRObject.prototype._cleanup = function(abort) { - var that = this; - if (!that.xdr) return; - utils.unload_del(that.unload_ref); - - that.xdr.ontimeout = that.xdr.onerror = that.xdr.onprogress = - that.xdr.onload = null; - if (abort) { - try { - that.xdr.abort(); - } catch(x) {}; - } - that.unload_ref = that.xdr = null; -}; - -XDRObject.prototype.close = function() { - var that = this; - that.nuke(); - that._cleanup(true); -}; - -// 1. Is natively via XHR -// 2. Is natively via XDR -// 3. Nope, but postMessage is there so it should work via the Iframe. -// 4. Nope, sorry. -utils.isXHRCorsCapable = function() { - if (_window.XMLHttpRequest && 'withCredentials' in new XMLHttpRequest()) { - return 1; - } - // XDomainRequest doesn't work if page is served from file:// - if (_window.XDomainRequest && _document.domain) { - return 2; - } - if (IframeTransport.enabled()) { - return 3; - } - return 4; -}; -// [*] End of lib/dom2.js - - -// [*] Including lib/sockjs.js -/* - * ***** BEGIN LICENSE BLOCK ***** - * Copyright (c) 2011-2012 VMware, Inc. - * - * For the license see COPYING. - * ***** END LICENSE BLOCK ***** - */ - -var SockJS = function(url, dep_protocols_whitelist, options) { - if (this === _window) { - // makes `new` optional - return new SockJS(url, dep_protocols_whitelist, options); - } - - var that = this, protocols_whitelist; - that._options = {devel: false, debug: false, protocols_whitelist: [], - info: undefined, rtt: undefined}; - if (options) { - utils.objectExtend(that._options, options); - } - that._base_url = utils.amendUrl(url); - that._server = that._options.server || utils.random_number_string(1000); - if (that._options.protocols_whitelist && - that._options.protocols_whitelist.length) { - protocols_whitelist = that._options.protocols_whitelist; - } else { - // Deprecated API - if (typeof dep_protocols_whitelist === 'string' && - dep_protocols_whitelist.length > 0) { - protocols_whitelist = [dep_protocols_whitelist]; - } else if (utils.isArray(dep_protocols_whitelist)) { - protocols_whitelist = dep_protocols_whitelist - } else { - protocols_whitelist = null; - } - if (protocols_whitelist) { - that._debug('Deprecated API: Use "protocols_whitelist" option ' + - 'instead of supplying protocol list as a second ' + - 'parameter to SockJS constructor.'); - } - } - that._protocols = []; - that.protocol = null; - that.readyState = SockJS.CONNECTING; - that._ir = createInfoReceiver(that._base_url); - that._ir.onfinish = function(info, rtt) { - that._ir = null; - if (info) { - if (that._options.info) { - // Override if user supplies the option - info = utils.objectExtend(info, that._options.info); - } - if (that._options.rtt) { - rtt = that._options.rtt; - } - that._applyInfo(info, rtt, protocols_whitelist); - that._didClose(); - } else { - that._didClose(1002, 'Can\'t connect to server', true); - } - }; -}; -// Inheritance -SockJS.prototype = new REventTarget(); - -SockJS.version = "0.3.4"; - -SockJS.CONNECTING = 0; -SockJS.OPEN = 1; -SockJS.CLOSING = 2; -SockJS.CLOSED = 3; - -SockJS.prototype._debug = function() { - if (this._options.debug) - utils.log.apply(utils, arguments); -}; - -SockJS.prototype._dispatchOpen = function() { - var that = this; - if (that.readyState === SockJS.CONNECTING) { - if (that._transport_tref) { - clearTimeout(that._transport_tref); - that._transport_tref = null; - } - that.readyState = SockJS.OPEN; - that.dispatchEvent(new SimpleEvent("open")); - } else { - // The server might have been restarted, and lost track of our - // connection. - that._didClose(1006, "Server lost session"); - } -}; - -SockJS.prototype._dispatchMessage = function(data) { - var that = this; - if (that.readyState !== SockJS.OPEN) - return; - that.dispatchEvent(new SimpleEvent("message", {data: data})); -}; - -SockJS.prototype._dispatchHeartbeat = function(data) { - var that = this; - if (that.readyState !== SockJS.OPEN) - return; - that.dispatchEvent(new SimpleEvent('heartbeat', {})); -}; - -SockJS.prototype._didClose = function(code, reason, force) { - var that = this; - if (that.readyState !== SockJS.CONNECTING && - that.readyState !== SockJS.OPEN && - that.readyState !== SockJS.CLOSING) - throw new Error('INVALID_STATE_ERR'); - if (that._ir) { - that._ir.nuke(); - that._ir = null; - } - - if (that._transport) { - that._transport.doCleanup(); - that._transport = null; - } - - var close_event = new SimpleEvent("close", { - code: code, - reason: reason, - wasClean: utils.userSetCode(code)}); - - if (!utils.userSetCode(code) && - that.readyState === SockJS.CONNECTING && !force) { - if (that._try_next_protocol(close_event)) { - return; - } - close_event = new SimpleEvent("close", {code: 2000, - reason: "All transports failed", - wasClean: false, - last_event: close_event}); - } - that.readyState = SockJS.CLOSED; - - utils.delay(function() { - that.dispatchEvent(close_event); - }); -}; - -SockJS.prototype._didMessage = function(data) { - var that = this; - var type = data.slice(0, 1); - switch(type) { - case 'o': - that._dispatchOpen(); - break; - case 'a': - var payload = JSON.parse(data.slice(1) || '[]'); - for(var i=0; i < payload.length; i++){ - that._dispatchMessage(payload[i]); - } - break; - case 'm': - var payload = JSON.parse(data.slice(1) || 'null'); - that._dispatchMessage(payload); - break; - case 'c': - var payload = JSON.parse(data.slice(1) || '[]'); - that._didClose(payload[0], payload[1]); - break; - case 'h': - that._dispatchHeartbeat(); - break; - } -}; - -SockJS.prototype._try_next_protocol = function(close_event) { - var that = this; - if (that.protocol) { - that._debug('Closed transport:', that.protocol, ''+close_event); - that.protocol = null; - } - if (that._transport_tref) { - clearTimeout(that._transport_tref); - that._transport_tref = null; - } - - while(1) { - var protocol = that.protocol = that._protocols.shift(); - if (!protocol) { - return false; - } - // Some protocols require access to `body`, what if were in - // the `head`? - if (SockJS[protocol] && - SockJS[protocol].need_body === true && - (!_document.body || - (typeof _document.readyState !== 'undefined' - && _document.readyState !== 'complete'))) { - that._protocols.unshift(protocol); - that.protocol = 'waiting-for-load'; - utils.attachEvent('load', function(){ - that._try_next_protocol(); - }); - return true; - } - - if (!SockJS[protocol] || - !SockJS[protocol].enabled(that._options)) { - that._debug('Skipping transport:', protocol); - } else { - var roundTrips = SockJS[protocol].roundTrips || 1; - var to = ((that._options.rto || 0) * roundTrips) || 5000; - that._transport_tref = utils.delay(to, function() { - if (that.readyState === SockJS.CONNECTING) { - // I can't understand how it is possible to run - // this timer, when the state is CLOSED, but - // apparently in IE everythin is possible. - that._didClose(2007, "Transport timeouted"); - } - }); - - var connid = utils.random_string(8); - var trans_url = that._base_url + '/' + that._server + '/' + connid; - that._debug('Opening transport:', protocol, ' url:'+trans_url, - ' RTO:'+that._options.rto); - that._transport = new SockJS[protocol](that, trans_url, - that._base_url); - return true; - } - } -}; - -SockJS.prototype.close = function(code, reason) { - var that = this; - if (code && !utils.userSetCode(code)) - throw new Error("INVALID_ACCESS_ERR"); - if(that.readyState !== SockJS.CONNECTING && - that.readyState !== SockJS.OPEN) { - return false; - } - that.readyState = SockJS.CLOSING; - that._didClose(code || 1000, reason || "Normal closure"); - return true; -}; - -SockJS.prototype.send = function(data) { - var that = this; - if (that.readyState === SockJS.CONNECTING) - throw new Error('INVALID_STATE_ERR'); - if (that.readyState === SockJS.OPEN) { - that._transport.doSend(utils.quote('' + data)); - } - return true; -}; - -SockJS.prototype._applyInfo = function(info, rtt, protocols_whitelist) { - var that = this; - that._options.info = info; - that._options.rtt = rtt; - that._options.rto = utils.countRTO(rtt); - that._options.info.null_origin = !_document.domain; - var probed = utils.probeProtocols(); - that._protocols = utils.detectProtocols(probed, protocols_whitelist, info); -}; -// [*] End of lib/sockjs.js - - -// [*] Including lib/trans-websocket.js -/* - * ***** BEGIN LICENSE BLOCK ***** - * Copyright (c) 2011-2012 VMware, Inc. - * - * For the license see COPYING. - * ***** END LICENSE BLOCK ***** - */ - -var WebSocketTransport = SockJS.websocket = function(ri, trans_url) { - var that = this; - var url = trans_url + '/websocket'; - if (url.slice(0, 5) === 'https') { - url = 'wss' + url.slice(5); - } else { - url = 'ws' + url.slice(4); - } - that.ri = ri; - that.url = url; - var Constructor = _window.WebSocket || _window.MozWebSocket; - - that.ws = new Constructor(that.url); - that.ws.onmessage = function(e) { - that.ri._didMessage(e.data); - }; - // Firefox has an interesting bug. If a websocket connection is - // created after onunload, it stays alive even when user - // navigates away from the page. In such situation let's lie - - // let's not open the ws connection at all. See: - // https://github.com/sockjs/sockjs-client/issues/28 - // https://bugzilla.mozilla.org/show_bug.cgi?id=696085 - that.unload_ref = utils.unload_add(function(){that.ws.close()}); - that.ws.onclose = function() { - that.ri._didMessage(utils.closeFrame(1006, "WebSocket connection broken")); - }; -}; - -WebSocketTransport.prototype.doSend = function(data) { - this.ws.send('[' + data + ']'); -}; - -WebSocketTransport.prototype.doCleanup = function() { - var that = this; - var ws = that.ws; - if (ws) { - ws.onmessage = ws.onclose = null; - ws.close(); - utils.unload_del(that.unload_ref); - that.unload_ref = that.ri = that.ws = null; - } -}; - -WebSocketTransport.enabled = function() { - return !!(_window.WebSocket || _window.MozWebSocket); -}; - -// In theory, ws should require 1 round trip. But in chrome, this is -// not very stable over SSL. Most likely a ws connection requires a -// separate SSL connection, in which case 2 round trips are an -// absolute minumum. -WebSocketTransport.roundTrips = 2; -// [*] End of lib/trans-websocket.js - - -// [*] Including lib/trans-sender.js -/* - * ***** BEGIN LICENSE BLOCK ***** - * Copyright (c) 2011-2012 VMware, Inc. - * - * For the license see COPYING. - * ***** END LICENSE BLOCK ***** - */ - -var BufferedSender = function() {}; -BufferedSender.prototype.send_constructor = function(sender) { - var that = this; - that.send_buffer = []; - that.sender = sender; -}; -BufferedSender.prototype.doSend = function(message) { - var that = this; - that.send_buffer.push(message); - if (!that.send_stop) { - that.send_schedule(); - } -}; - -// For polling transports in a situation when in the message callback, -// new message is being send. If the sending connection was started -// before receiving one, it is possible to saturate the network and -// timeout due to the lack of receiving socket. To avoid that we delay -// sending messages by some small time, in order to let receiving -// connection be started beforehand. This is only a halfmeasure and -// does not fix the big problem, but it does make the tests go more -// stable on slow networks. -BufferedSender.prototype.send_schedule_wait = function() { - var that = this; - var tref; - that.send_stop = function() { - that.send_stop = null; - clearTimeout(tref); - }; - tref = utils.delay(25, function() { - that.send_stop = null; - that.send_schedule(); - }); -}; - -BufferedSender.prototype.send_schedule = function() { - var that = this; - if (that.send_buffer.length > 0) { - var payload = '[' + that.send_buffer.join(',') + ']'; - that.send_stop = that.sender(that.trans_url, payload, function(success, abort_reason) { - that.send_stop = null; - if (success === false) { - that.ri._didClose(1006, 'Sending error ' + abort_reason); - } else { - that.send_schedule_wait(); - } - }); - that.send_buffer = []; - } -}; - -BufferedSender.prototype.send_destructor = function() { - var that = this; - if (that._send_stop) { - that._send_stop(); - } - that._send_stop = null; -}; - -var jsonPGenericSender = function(url, payload, callback) { - var that = this; - - if (!('_send_form' in that)) { - var form = that._send_form = _document.createElement('form'); - var area = that._send_area = _document.createElement('textarea'); - area.name = 'd'; - form.style.display = 'none'; - form.style.position = 'absolute'; - form.method = 'POST'; - form.enctype = 'application/x-www-form-urlencoded'; - form.acceptCharset = "UTF-8"; - form.appendChild(area); - _document.body.appendChild(form); - } - var form = that._send_form; - var area = that._send_area; - var id = 'a' + utils.random_string(8); - form.target = id; - form.action = url + '/jsonp_send?i=' + id; - - var iframe; - try { - // ie6 dynamic iframes with target="" support (thanks Chris Lambacher) - iframe = _document.createElement('